Microsoft.DataFactory factories/pipelines

Bicep resource definition

The factories/pipelines resource type can be deployed with operations that target:

For a list of changed properties in each API version, see change log.

Resource format

To create a Microsoft.DataFactory/factories/pipelines resource, add the following Bicep to your template.

resource symbolicname 'Microsoft.DataFactory/factories/pipelines@2018-06-01' = {
  parent: resourceSymbolicName
  name: 'string'
  properties: {
    activities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
    annotations: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    concurrency: int
    description: 'string'
    folder: {
      name: 'string'
    }
    parameters: {
      {customized property}: {
        defaultValue: any(Azure.Bicep.Types.Concrete.AnyType)
        type: 'string'
      }
    }
    policy: {
      elapsedTimeMetric: {
        duration: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    runDimensions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    variables: {
      {customized property}: {
        defaultValue: any(Azure.Bicep.Types.Concrete.AnyType)
        type: 'string'
      }
    }
  }
}

StoreReadSettings objects

Set the type property to specify the type of object.

For AmazonS3CompatibleReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AmazonS3CompatibleReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AmazonS3ReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AmazonS3ReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AzureBlobFSReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureBlobFSReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AzureBlobStorageReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureBlobStorageReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AzureDataLakeStoreReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  listAfter: any(Azure.Bicep.Types.Concrete.AnyType)
  listBefore: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataLakeStoreReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AzureFileStorageReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureFileStorageReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For FileServerReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileFilter: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'FileServerReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For FtpReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  disableChunking: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'FtpReadSettings'
  useBinaryTransfer: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For GoogleCloudStorageReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'GoogleCloudStorageReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For HdfsReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  distcpSettings: {
    distcpOptions: any(Azure.Bicep.Types.Concrete.AnyType)
    resourceManagerEndpoint: any(Azure.Bicep.Types.Concrete.AnyType)
    tempScriptPath: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HdfsReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For HttpReadSettings, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  additionalHeaders: any(Azure.Bicep.Types.Concrete.AnyType)
  requestBody: any(Azure.Bicep.Types.Concrete.AnyType)
  requestMethod: any(Azure.Bicep.Types.Concrete.AnyType)
  requestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HttpReadSettings'
}

For LakeHouseReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'LakeHouseReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For OracleCloudStorageReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  prefix: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OracleCloudStorageReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

For SftpReadSettings, use:

{
  deleteFilesAfterCompletion: any(Azure.Bicep.Types.Concrete.AnyType)
  disableChunking: any(Azure.Bicep.Types.Concrete.AnyType)
  enablePartitionDiscovery: any(Azure.Bicep.Types.Concrete.AnyType)
  fileListPath: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeEnd: any(Azure.Bicep.Types.Concrete.AnyType)
  modifiedDatetimeStart: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionRootPath: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SftpReadSettings'
  wildcardFileName: any(Azure.Bicep.Types.Concrete.AnyType)
  wildcardFolderPath: any(Azure.Bicep.Types.Concrete.AnyType)
}

Activity objects

Set the type property to specify the type of object.

For AppendVariable, use:

{
  type: 'AppendVariable'
  typeProperties: {
    value: any(Azure.Bicep.Types.Concrete.AnyType)
    variableName: 'string'
  }
}

For AzureDataExplorerCommand, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'AzureDataExplorerCommand'
  typeProperties: {
    command: any(Azure.Bicep.Types.Concrete.AnyType)
    commandTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For AzureFunctionActivity, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'AzureFunctionActivity'
  typeProperties: {
    body: any(Azure.Bicep.Types.Concrete.AnyType)
    functionName: any(Azure.Bicep.Types.Concrete.AnyType)
    headers: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    method: 'string'
  }
}

For AzureMLBatchExecution, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'AzureMLBatchExecution'
  typeProperties: {
    globalParameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    webServiceInputs: {
      {customized property}: {
        filePath: any(Azure.Bicep.Types.Concrete.AnyType)
        linkedServiceName: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
      }
    }
    webServiceOutputs: {
      {customized property}: {
        filePath: any(Azure.Bicep.Types.Concrete.AnyType)
        linkedServiceName: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
      }
    }
  }
}

For AzureMLExecutePipeline, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'AzureMLExecutePipeline'
  typeProperties: {
    continueOnStepFailure: any(Azure.Bicep.Types.Concrete.AnyType)
    dataPathAssignments: any(Azure.Bicep.Types.Concrete.AnyType)
    experimentName: any(Azure.Bicep.Types.Concrete.AnyType)
    mlParentRunId: any(Azure.Bicep.Types.Concrete.AnyType)
    mlPipelineEndpointId: any(Azure.Bicep.Types.Concrete.AnyType)
    mlPipelineId: any(Azure.Bicep.Types.Concrete.AnyType)
    mlPipelineParameters: any(Azure.Bicep.Types.Concrete.AnyType)
    version: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For AzureMLUpdateResource, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'AzureMLUpdateResource'
  typeProperties: {
    trainedModelFilePath: any(Azure.Bicep.Types.Concrete.AnyType)
    trainedModelLinkedServiceName: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    trainedModelName: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For ExecutePipeline, use:

{
  policy: {
    secureInput: bool
  }
  type: 'ExecutePipeline'
  typeProperties: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    pipeline: {
      name: 'string'
      referenceName: 'string'
      type: 'string'
    }
    waitOnCompletion: bool
  }
}

For Fail, use:

{
  type: 'Fail'
  typeProperties: {
    errorCode: any(Azure.Bicep.Types.Concrete.AnyType)
    message: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Filter, use:

{
  type: 'Filter'
  typeProperties: {
    condition: {
      type: 'string'
      value: 'string'
    }
    items: {
      type: 'string'
      value: 'string'
    }
  }
}

For ForEach, use:

{
  type: 'ForEach'
  typeProperties: {
    activities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
    batchCount: int
    isSequential: bool
    items: {
      type: 'string'
      value: 'string'
    }
  }
}

For IfCondition, use:

{
  type: 'IfCondition'
  typeProperties: {
    expression: {
      type: 'string'
      value: 'string'
    }
    ifFalseActivities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
    ifTrueActivities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
  }
}

For SetVariable, use:

{
  policy: {
    secureInput: bool
    secureOutput: bool
  }
  type: 'SetVariable'
  typeProperties: {
    setSystemVariable: bool
    value: any(Azure.Bicep.Types.Concrete.AnyType)
    variableName: 'string'
  }
}

For Switch, use:

{
  type: 'Switch'
  typeProperties: {
    cases: [
      {
        activities: [
          {
            dependsOn: [
              {
                activity: 'string'
                dependencyConditions: [
                  'string'
                ]
              }
            ]
            description: 'string'
            name: 'string'
            onInactiveMarkAs: 'string'
            state: 'string'
            userProperties: [
              {
                name: 'string'
                value: any(Azure.Bicep.Types.Concrete.AnyType)
              }
            ]
            type: 'string'
            // For remaining properties, see Activity objects
          }
        ]
        value: 'string'
      }
    ]
    defaultActivities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
    on: {
      type: 'string'
      value: 'string'
    }
  }
}

For Until, use:

{
  type: 'Until'
  typeProperties: {
    activities: [
      {
        dependsOn: [
          {
            activity: 'string'
            dependencyConditions: [
              'string'
            ]
          }
        ]
        description: 'string'
        name: 'string'
        onInactiveMarkAs: 'string'
        state: 'string'
        userProperties: [
          {
            name: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        type: 'string'
        // For remaining properties, see Activity objects
      }
    ]
    expression: {
      type: 'string'
      value: 'string'
    }
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Validation, use:

{
  type: 'Validation'
  typeProperties: {
    childItems: any(Azure.Bicep.Types.Concrete.AnyType)
    dataset: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    minimumSize: any(Azure.Bicep.Types.Concrete.AnyType)
    sleep: any(Azure.Bicep.Types.Concrete.AnyType)
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Wait, use:

{
  type: 'Wait'
  typeProperties: {
    waitTimeInSeconds: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For WebHook, use:

{
  policy: {
    secureInput: bool
    secureOutput: bool
  }
  type: 'WebHook'
  typeProperties: {
    authentication: {
      credential: {
        referenceName: 'string'
        type: 'string'
      }
      password: {
        type: 'string'
        // For remaining properties, see SecretBase objects
      }
      pfx: {
        type: 'string'
        // For remaining properties, see SecretBase objects
      }
      resource: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      username: any(Azure.Bicep.Types.Concrete.AnyType)
      userTenant: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    body: any(Azure.Bicep.Types.Concrete.AnyType)
    headers: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    method: 'string'
    reportStatusOnCallBack: any(Azure.Bicep.Types.Concrete.AnyType)
    timeout: 'string'
    url: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Copy, use:

{
  inputs: [
    {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
  ]
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  outputs: [
    {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
  ]
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'Copy'
  typeProperties: {
    dataIntegrationUnits: any(Azure.Bicep.Types.Concrete.AnyType)
    enableSkipIncompatibleRow: any(Azure.Bicep.Types.Concrete.AnyType)
    enableStaging: any(Azure.Bicep.Types.Concrete.AnyType)
    logSettings: {
      copyActivityLogSettings: {
        enableReliableLogging: any(Azure.Bicep.Types.Concrete.AnyType)
        logLevel: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      enableCopyActivityLog: any(Azure.Bicep.Types.Concrete.AnyType)
      logLocationSettings: {
        linkedServiceName: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        path: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    logStorageSettings: {
      enableReliableLogging: any(Azure.Bicep.Types.Concrete.AnyType)
      linkedServiceName: {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
      logLevel: any(Azure.Bicep.Types.Concrete.AnyType)
      path: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    parallelCopies: any(Azure.Bicep.Types.Concrete.AnyType)
    preserve: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    preserveRules: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    redirectIncompatibleRowSettings: {
      linkedServiceName: any(Azure.Bicep.Types.Concrete.AnyType)
      path: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    sink: {
      disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
      maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
      sinkRetryCount: any(Azure.Bicep.Types.Concrete.AnyType)
      sinkRetryWait: any(Azure.Bicep.Types.Concrete.AnyType)
      writeBatchSize: any(Azure.Bicep.Types.Concrete.AnyType)
      writeBatchTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      // For remaining properties, see CopySink objects
    }
    skipErrorFile: {
      dataInconsistency: any(Azure.Bicep.Types.Concrete.AnyType)
      fileMissing: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    source: {
      disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
      maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
      sourceRetryCount: any(Azure.Bicep.Types.Concrete.AnyType)
      sourceRetryWait: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      // For remaining properties, see CopySource objects
    }
    stagingSettings: {
      enableCompression: any(Azure.Bicep.Types.Concrete.AnyType)
      linkedServiceName: {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
      path: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    translator: any(Azure.Bicep.Types.Concrete.AnyType)
    validateDataConsistency: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Custom, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'Custom'
  typeProperties: {
    autoUserSpecification: any(Azure.Bicep.Types.Concrete.AnyType)
    command: any(Azure.Bicep.Types.Concrete.AnyType)
    extendedProperties: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    folderPath: any(Azure.Bicep.Types.Concrete.AnyType)
    referenceObjects: {
      datasets: [
        {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
      ]
      linkedServices: [
        {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
      ]
    }
    resourceLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    retentionTimeInDays: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For DataLakeAnalyticsU-SQL, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'DataLakeAnalyticsU-SQL'
  typeProperties: {
    compilationMode: any(Azure.Bicep.Types.Concrete.AnyType)
    degreeOfParallelism: any(Azure.Bicep.Types.Concrete.AnyType)
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    priority: any(Azure.Bicep.Types.Concrete.AnyType)
    runtimeVersion: any(Azure.Bicep.Types.Concrete.AnyType)
    scriptLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    scriptPath: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For DatabricksNotebook, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'DatabricksNotebook'
  typeProperties: {
    baseParameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    libraries: [
      {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    notebookPath: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For DatabricksSparkJar, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'DatabricksSparkJar'
  typeProperties: {
    libraries: [
      {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    mainClassName: any(Azure.Bicep.Types.Concrete.AnyType)
    parameters: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
  }
}

For DatabricksSparkPython, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'DatabricksSparkPython'
  typeProperties: {
    libraries: [
      {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    parameters: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    pythonFile: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For Delete, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'Delete'
  typeProperties: {
    dataset: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    enableLogging: any(Azure.Bicep.Types.Concrete.AnyType)
    logStorageSettings: {
      enableReliableLogging: any(Azure.Bicep.Types.Concrete.AnyType)
      linkedServiceName: {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
      logLevel: any(Azure.Bicep.Types.Concrete.AnyType)
      path: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    maxConcurrentConnections: int
    recursive: any(Azure.Bicep.Types.Concrete.AnyType)
    storeSettings: {
      disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
      maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For ExecuteDataFlow, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'ExecuteDataFlow'
  typeProperties: {
    compute: {
      computeType: any(Azure.Bicep.Types.Concrete.AnyType)
      coreCount: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    continuationSettings: {
      continuationTtlInMinutes: any(Azure.Bicep.Types.Concrete.AnyType)
      customizedCheckpointKey: any(Azure.Bicep.Types.Concrete.AnyType)
      idleCondition: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    continueOnError: any(Azure.Bicep.Types.Concrete.AnyType)
    dataFlow: {
      datasetParameters: any(Azure.Bicep.Types.Concrete.AnyType)
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    integrationRuntime: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    runConcurrently: any(Azure.Bicep.Types.Concrete.AnyType)
    sourceStagingConcurrency: any(Azure.Bicep.Types.Concrete.AnyType)
    staging: {
      folderPath: any(Azure.Bicep.Types.Concrete.AnyType)
      linkedService: {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    }
    traceLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For ExecuteSSISPackage, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'ExecuteSSISPackage'
  typeProperties: {
    connectVia: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    environmentPath: any(Azure.Bicep.Types.Concrete.AnyType)
    executionCredential: {
      domain: any(Azure.Bicep.Types.Concrete.AnyType)
      password: {
        type: 'string'
        value: 'string'
      }
      userName: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    loggingLevel: any(Azure.Bicep.Types.Concrete.AnyType)
    logLocation: {
      logPath: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      typeProperties: {
        accessCredential: {
          domain: any(Azure.Bicep.Types.Concrete.AnyType)
          password: {
            type: 'string'
            // For remaining properties, see SecretBase objects
          }
          userName: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        logRefreshInterval: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    packageConnectionManagers: {
      {customized property}: {
        {customized property}: {
          value: any(Azure.Bicep.Types.Concrete.AnyType)
        }
      }
    }
    packageLocation: {
      packagePath: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      typeProperties: {
        accessCredential: {
          domain: any(Azure.Bicep.Types.Concrete.AnyType)
          password: {
            type: 'string'
            // For remaining properties, see SecretBase objects
          }
          userName: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        childPackages: [
          {
            packageContent: any(Azure.Bicep.Types.Concrete.AnyType)
            packageLastModifiedDate: 'string'
            packageName: 'string'
            packagePath: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        configurationAccessCredential: {
          domain: any(Azure.Bicep.Types.Concrete.AnyType)
          password: {
            type: 'string'
            // For remaining properties, see SecretBase objects
          }
          userName: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        configurationPath: any(Azure.Bicep.Types.Concrete.AnyType)
        packageContent: any(Azure.Bicep.Types.Concrete.AnyType)
        packageLastModifiedDate: 'string'
        packageName: 'string'
        packagePassword: {
          type: 'string'
          // For remaining properties, see SecretBase objects
        }
      }
    }
    packageParameters: {
      {customized property}: {
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    projectConnectionManagers: {
      {customized property}: {
        {customized property}: {
          value: any(Azure.Bicep.Types.Concrete.AnyType)
        }
      }
    }
    projectParameters: {
      {customized property}: {
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    propertyOverrides: {
      {customized property}: {
        isSensitive: bool
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    runtime: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For ExecuteWranglingDataflow, use:

{
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'ExecuteWranglingDataflow'
  typeProperties: {
    compute: {
      computeType: any(Azure.Bicep.Types.Concrete.AnyType)
      coreCount: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    continuationSettings: {
      continuationTtlInMinutes: any(Azure.Bicep.Types.Concrete.AnyType)
      customizedCheckpointKey: any(Azure.Bicep.Types.Concrete.AnyType)
      idleCondition: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    continueOnError: any(Azure.Bicep.Types.Concrete.AnyType)
    dataFlow: {
      datasetParameters: any(Azure.Bicep.Types.Concrete.AnyType)
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    integrationRuntime: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    queries: [
      {
        dataflowSinks: [
          {
            dataset: {
              parameters: {
                {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
              }
              referenceName: 'string'
              type: 'string'
            }
            description: 'string'
            flowlet: {
              datasetParameters: any(Azure.Bicep.Types.Concrete.AnyType)
              parameters: {
                {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
              }
              referenceName: 'string'
              type: 'string'
            }
            linkedService: {
              parameters: {
                {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
              }
              referenceName: 'string'
              type: 'string'
            }
            name: 'string'
            rejectedDataLinkedService: {
              parameters: {
                {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
              }
              referenceName: 'string'
              type: 'string'
            }
            schemaLinkedService: {
              parameters: {
                {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
              }
              referenceName: 'string'
              type: 'string'
            }
            script: 'string'
          }
        ]
        queryName: 'string'
      }
    ]
    runConcurrently: any(Azure.Bicep.Types.Concrete.AnyType)
    sinks: {
      {customized property}: {
        dataset: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        description: 'string'
        flowlet: {
          datasetParameters: any(Azure.Bicep.Types.Concrete.AnyType)
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        linkedService: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        name: 'string'
        rejectedDataLinkedService: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        schemaLinkedService: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        script: 'string'
      }
    }
    sourceStagingConcurrency: any(Azure.Bicep.Types.Concrete.AnyType)
    staging: {
      folderPath: any(Azure.Bicep.Types.Concrete.AnyType)
      linkedService: {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    }
    traceLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For GetMetadata, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'GetMetadata'
  typeProperties: {
    dataset: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    fieldList: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    formatSettings: {
      type: 'string'
      // For remaining properties, see FormatReadSettings objects
    }
    storeSettings: {
      disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
      maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For HDInsightHive, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'HDInsightHive'
  typeProperties: {
    arguments: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    defines: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    getDebugInfo: 'string'
    queryTimeout: int
    scriptLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    scriptPath: any(Azure.Bicep.Types.Concrete.AnyType)
    storageLinkedServices: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
    variables: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
  }
}

For HDInsightMapReduce, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'HDInsightMapReduce'
  typeProperties: {
    arguments: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    className: any(Azure.Bicep.Types.Concrete.AnyType)
    defines: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    getDebugInfo: 'string'
    jarFilePath: any(Azure.Bicep.Types.Concrete.AnyType)
    jarLibs: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    jarLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    storageLinkedServices: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
  }
}

For HDInsightPig, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'HDInsightPig'
  typeProperties: {
    arguments: any(Azure.Bicep.Types.Concrete.AnyType)
    defines: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    getDebugInfo: 'string'
    scriptLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    scriptPath: any(Azure.Bicep.Types.Concrete.AnyType)
    storageLinkedServices: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
  }
}

For HDInsightSpark, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'HDInsightSpark'
  typeProperties: {
    arguments: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    className: 'string'
    entryFilePath: any(Azure.Bicep.Types.Concrete.AnyType)
    getDebugInfo: 'string'
    proxyUser: any(Azure.Bicep.Types.Concrete.AnyType)
    rootPath: any(Azure.Bicep.Types.Concrete.AnyType)
    sparkConfig: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    sparkJobLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
  }
}

For HDInsightStreaming, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'HDInsightStreaming'
  typeProperties: {
    arguments: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    combiner: any(Azure.Bicep.Types.Concrete.AnyType)
    commandEnvironment: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    defines: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    fileLinkedService: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    filePaths: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    getDebugInfo: 'string'
    input: any(Azure.Bicep.Types.Concrete.AnyType)
    mapper: any(Azure.Bicep.Types.Concrete.AnyType)
    output: any(Azure.Bicep.Types.Concrete.AnyType)
    reducer: any(Azure.Bicep.Types.Concrete.AnyType)
    storageLinkedServices: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
  }
}

For Lookup, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'Lookup'
  typeProperties: {
    dataset: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    firstRowOnly: any(Azure.Bicep.Types.Concrete.AnyType)
    source: {
      disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
      maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
      sourceRetryCount: any(Azure.Bicep.Types.Concrete.AnyType)
      sourceRetryWait: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      // For remaining properties, see CopySource objects
    }
  }
}

For Script, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'Script'
  typeProperties: {
    logSettings: {
      logDestination: 'string'
      logLocationSettings: {
        linkedServiceName: {
          parameters: {
            {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
          }
          referenceName: 'string'
          type: 'string'
        }
        path: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    scriptBlockExecutionTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
    scripts: [
      {
        parameters: [
          {
            direction: 'string'
            name: any(Azure.Bicep.Types.Concrete.AnyType)
            size: int
            type: 'string'
            value: any(Azure.Bicep.Types.Concrete.AnyType)
          }
        ]
        text: any(Azure.Bicep.Types.Concrete.AnyType)
        type: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
  }
}

For SparkJob, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'SparkJob'
  typeProperties: {
    args: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    className: any(Azure.Bicep.Types.Concrete.AnyType)
    conf: any(Azure.Bicep.Types.Concrete.AnyType)
    configurationType: 'string'
    driverSize: any(Azure.Bicep.Types.Concrete.AnyType)
    executorSize: any(Azure.Bicep.Types.Concrete.AnyType)
    file: any(Azure.Bicep.Types.Concrete.AnyType)
    files: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    filesV2: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    numExecutors: any(Azure.Bicep.Types.Concrete.AnyType)
    pythonCodeReference: [
      any(Azure.Bicep.Types.Concrete.AnyType)
    ]
    scanFolder: any(Azure.Bicep.Types.Concrete.AnyType)
    sparkConfig: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    sparkJob: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
    targetBigDataPool: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
    targetSparkConfiguration: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
  }
}

For SqlServerStoredProcedure, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'SqlServerStoredProcedure'
  typeProperties: {
    storedProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
    storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

For SynapseNotebook, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'SynapseNotebook'
  typeProperties: {
    conf: any(Azure.Bicep.Types.Concrete.AnyType)
    configurationType: 'string'
    driverSize: any(Azure.Bicep.Types.Concrete.AnyType)
    executorSize: any(Azure.Bicep.Types.Concrete.AnyType)
    notebook: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
    numExecutors: any(Azure.Bicep.Types.Concrete.AnyType)
    parameters: {
      {customized property}: {
        type: 'string'
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    }
    sparkConfig: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    sparkPool: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
    targetSparkConfiguration: {
      referenceName: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
    }
  }
}

For WebActivity, use:

{
  linkedServiceName: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  policy: {
    retry: any(Azure.Bicep.Types.Concrete.AnyType)
    retryIntervalInSeconds: int
    secureInput: bool
    secureOutput: bool
    timeout: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  type: 'WebActivity'
  typeProperties: {
    authentication: {
      credential: {
        referenceName: 'string'
        type: 'string'
      }
      password: {
        type: 'string'
        // For remaining properties, see SecretBase objects
      }
      pfx: {
        type: 'string'
        // For remaining properties, see SecretBase objects
      }
      resource: any(Azure.Bicep.Types.Concrete.AnyType)
      type: 'string'
      username: any(Azure.Bicep.Types.Concrete.AnyType)
      userTenant: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    body: any(Azure.Bicep.Types.Concrete.AnyType)
    connectVia: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
    datasets: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
    disableCertValidation: bool
    headers: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
    linkedServices: [
      {
        parameters: {
          {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
        }
        referenceName: 'string'
        type: 'string'
      }
    ]
    method: 'string'
    turnOffAsync: bool
    url: any(Azure.Bicep.Types.Concrete.AnyType)
  }
}

CompressionReadSettings objects

Set the type property to specify the type of object.

For TarGZipReadSettings, use:

{
  preserveCompressionFileNameAsFolder: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'TarGZipReadSettings'
}

For TarReadSettings, use:

{
  preserveCompressionFileNameAsFolder: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'TarReadSettings'
}

For ZipDeflateReadSettings, use:

{
  preserveZipFileNameAsFolder: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ZipDeflateReadSettings'
}

StoreWriteSettings objects

Set the type property to specify the type of object.

For AzureBlobFSWriteSettings, use:

{
  blockSizeInMB: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureBlobFSWriteSettings'
}

For AzureBlobStorageWriteSettings, use:

{
  blockSizeInMB: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureBlobStorageWriteSettings'
}

For AzureDataLakeStoreWriteSettings, use:

{
  expiryDateTime: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataLakeStoreWriteSettings'
}

For AzureFileStorageWriteSettings, use:

{
  type: 'AzureFileStorageWriteSettings'
}

For FileServerWriteSettings, use:

{
  type: 'FileServerWriteSettings'
}

For LakeHouseWriteSettings, use:

{
  type: 'LakeHouseWriteSettings'
}

For SftpWriteSettings, use:

{
  operationTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SftpWriteSettings'
  useTempFileRename: any(Azure.Bicep.Types.Concrete.AnyType)
}

CopySink objects

Set the type property to specify the type of object.

For AvroSink, use:

{
  formatSettings: {
    fileNamePrefix: any(Azure.Bicep.Types.Concrete.AnyType)
    maxRowsPerFile: any(Azure.Bicep.Types.Concrete.AnyType)
    recordName: 'string'
    recordNamespace: 'string'
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'AvroSink'
}

For AzureBlobFSSink, use:

{
  copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  metadata: [
    {
      name: any(Azure.Bicep.Types.Concrete.AnyType)
      value: any(Azure.Bicep.Types.Concrete.AnyType)
    }
  ]
  type: 'AzureBlobFSSink'
}

For AzureDataExplorerSink, use:

{
  flushImmediately: any(Azure.Bicep.Types.Concrete.AnyType)
  ingestionMappingAsJson: any(Azure.Bicep.Types.Concrete.AnyType)
  ingestionMappingName: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataExplorerSink'
}

For AzureDataLakeStoreSink, use:

{
  copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  enableAdlsSingleFileParallel: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataLakeStoreSink'
}

For AzureDatabricksDeltaLakeSink, use:

{
  importSettings: {
    dateFormat: any(Azure.Bicep.Types.Concrete.AnyType)
    timestampFormat: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDatabricksDeltaLakeSink'
}

For AzureMySqlSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureMySqlSink'
}

For AzurePostgreSqlSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzurePostgreSqlSink'
}

For AzureQueueSink, use:

{
  type: 'AzureQueueSink'
}

For AzureSearchIndexSink, use:

{
  type: 'AzureSearchIndexSink'
  writeBehavior: 'string'
}

For AzureSqlSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterTableType: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterUseTableLock: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureTableTypeParameterName: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureSqlSink'
  upsertSettings: {
    interimSchemaName: any(Azure.Bicep.Types.Concrete.AnyType)
    keys: any(Azure.Bicep.Types.Concrete.AnyType)
    useTempDB: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For AzureTableSink, use:

{
  azureTableDefaultPartitionKeyValue: any(Azure.Bicep.Types.Concrete.AnyType)
  azureTableInsertType: any(Azure.Bicep.Types.Concrete.AnyType)
  azureTablePartitionKeyName: any(Azure.Bicep.Types.Concrete.AnyType)
  azureTableRowKeyName: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureTableSink'
}

For BinarySink, use:

{
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'BinarySink'
}

For BlobSink, use:

{
  blobWriterAddHeader: any(Azure.Bicep.Types.Concrete.AnyType)
  blobWriterDateTimeFormat: any(Azure.Bicep.Types.Concrete.AnyType)
  blobWriterOverwriteFiles: any(Azure.Bicep.Types.Concrete.AnyType)
  copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  metadata: [
    {
      name: any(Azure.Bicep.Types.Concrete.AnyType)
      value: any(Azure.Bicep.Types.Concrete.AnyType)
    }
  ]
  type: 'BlobSink'
}

For CommonDataServiceForAppsSink, use:

{
  alternateKeyName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CommonDataServiceForAppsSink'
  writeBehavior: 'string'
}

For CosmosDbMongoDbApiSink, use:

{
  type: 'CosmosDbMongoDbApiSink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For CosmosDbSqlApiSink, use:

{
  type: 'CosmosDbSqlApiSink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For DelimitedTextSink, use:

{
  formatSettings: {
    fileExtension: any(Azure.Bicep.Types.Concrete.AnyType)
    fileNamePrefix: any(Azure.Bicep.Types.Concrete.AnyType)
    maxRowsPerFile: any(Azure.Bicep.Types.Concrete.AnyType)
    quoteAllText: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'DelimitedTextSink'
}

For DocumentDbCollectionSink, use:

{
  nestingSeparator: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DocumentDbCollectionSink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For DynamicsCrmSink, use:

{
  alternateKeyName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DynamicsCrmSink'
  writeBehavior: 'string'
}

For DynamicsSink, use:

{
  alternateKeyName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DynamicsSink'
  writeBehavior: 'string'
}

For FileSystemSink, use:

{
  copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'FileSystemSink'
}

For IcebergSink, use:

{
  formatSettings: {
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'IcebergSink'
}

For InformixSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'InformixSink'
}

For JsonSink, use:

{
  formatSettings: {
    filePattern: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'JsonSink'
}

For LakeHouseTableSink, use:

{
  partitionNameList: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  tableActionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'LakeHouseTableSink'
}

For MicrosoftAccessSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MicrosoftAccessSink'
}

For MongoDbAtlasSink, use:

{
  type: 'MongoDbAtlasSink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For MongoDbV2Sink, use:

{
  type: 'MongoDbV2Sink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For OdbcSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OdbcSink'
}

For OracleSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OracleSink'
}

For OrcSink, use:

{
  formatSettings: {
    fileNamePrefix: any(Azure.Bicep.Types.Concrete.AnyType)
    maxRowsPerFile: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'OrcSink'
}

For ParquetSink, use:

{
  formatSettings: {
    fileNamePrefix: any(Azure.Bicep.Types.Concrete.AnyType)
    maxRowsPerFile: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  storeSettings: {
    copyBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    metadata: [
      {
        name: any(Azure.Bicep.Types.Concrete.AnyType)
        value: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
    type: 'string'
    // For remaining properties, see StoreWriteSettings objects
  }
  type: 'ParquetSink'
}

For RestSink, use:

{
  additionalHeaders: any(Azure.Bicep.Types.Concrete.AnyType)
  httpCompressionType: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  requestInterval: any(Azure.Bicep.Types.Concrete.AnyType)
  requestMethod: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'RestSink'
}

For SalesforceServiceCloudSink, use:

{
  externalIdFieldName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceServiceCloudSink'
  writeBehavior: 'string'
}

For SalesforceServiceCloudV2Sink, use:

{
  externalIdFieldName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceServiceCloudV2Sink'
  writeBehavior: 'string'
}

For SalesforceSink, use:

{
  externalIdFieldName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceSink'
  writeBehavior: 'string'
}

For SalesforceV2Sink, use:

{
  externalIdFieldName: any(Azure.Bicep.Types.Concrete.AnyType)
  ignoreNullValues: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceV2Sink'
  writeBehavior: 'string'
}

For SapCloudForCustomerSink, use:

{
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapCloudForCustomerSink'
  writeBehavior: 'string'
}

For SnowflakeSink, use:

{
  importSettings: {
    additionalCopyOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    additionalFormatOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    storageIntegration: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SnowflakeSink'
}

For SnowflakeV2Sink, use:

{
  importSettings: {
    additionalCopyOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    additionalFormatOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    storageIntegration: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SnowflakeV2Sink'
}

For SqlDWSink, use:

{
  allowCopyCommand: any(Azure.Bicep.Types.Concrete.AnyType)
  allowPolyBase: any(Azure.Bicep.Types.Concrete.AnyType)
  copyCommandSettings: {
    additionalOptions: {
      {customized property}: 'string'
    }
    defaultValues: [
      {
        columnName: any(Azure.Bicep.Types.Concrete.AnyType)
        defaultValue: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
  }
  polyBaseSettings: {
    rejectSampleValue: any(Azure.Bicep.Types.Concrete.AnyType)
    rejectType: 'string'
    rejectValue: any(Azure.Bicep.Types.Concrete.AnyType)
    useTypeDefault: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterUseTableLock: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlDWSink'
  upsertSettings: {
    interimSchemaName: any(Azure.Bicep.Types.Concrete.AnyType)
    keys: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For SqlMISink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterTableType: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterUseTableLock: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureTableTypeParameterName: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlMISink'
  upsertSettings: {
    interimSchemaName: any(Azure.Bicep.Types.Concrete.AnyType)
    keys: any(Azure.Bicep.Types.Concrete.AnyType)
    useTempDB: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For SqlServerSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterTableType: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterUseTableLock: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureTableTypeParameterName: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlServerSink'
  upsertSettings: {
    interimSchemaName: any(Azure.Bicep.Types.Concrete.AnyType)
    keys: any(Azure.Bicep.Types.Concrete.AnyType)
    useTempDB: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For SqlSink, use:

{
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterTableType: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlWriterUseTableLock: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureTableTypeParameterName: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlSink'
  upsertSettings: {
    interimSchemaName: any(Azure.Bicep.Types.Concrete.AnyType)
    keys: any(Azure.Bicep.Types.Concrete.AnyType)
    useTempDB: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

For WarehouseSink, use:

{
  allowCopyCommand: any(Azure.Bicep.Types.Concrete.AnyType)
  copyCommandSettings: {
    additionalOptions: {
      {customized property}: 'string'
    }
    defaultValues: [
      {
        columnName: any(Azure.Bicep.Types.Concrete.AnyType)
        defaultValue: any(Azure.Bicep.Types.Concrete.AnyType)
      }
    ]
  }
  preCopyScript: any(Azure.Bicep.Types.Concrete.AnyType)
  tableOption: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'WarehouseSink'
  writeBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
}

FormatReadSettings objects

Set the type property to specify the type of object.

For BinaryReadSettings, use:

{
  compressionProperties: {
    type: 'string'
    // For remaining properties, see CompressionReadSettings objects
  }
  type: 'BinaryReadSettings'
}

For DelimitedTextReadSettings, use:

{
  compressionProperties: {
    type: 'string'
    // For remaining properties, see CompressionReadSettings objects
  }
  skipLineCount: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DelimitedTextReadSettings'
}

For JsonReadSettings, use:

{
  compressionProperties: {
    type: 'string'
    // For remaining properties, see CompressionReadSettings objects
  }
  type: 'JsonReadSettings'
}

For ParquetReadSettings, use:

{
  compressionProperties: {
    type: 'string'
    // For remaining properties, see CompressionReadSettings objects
  }
  type: 'ParquetReadSettings'
}

For XmlReadSettings, use:

{
  compressionProperties: {
    type: 'string'
    // For remaining properties, see CompressionReadSettings objects
  }
  detectDataType: any(Azure.Bicep.Types.Concrete.AnyType)
  namespacePrefixes: any(Azure.Bicep.Types.Concrete.AnyType)
  namespaces: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'XmlReadSettings'
  validationMode: any(Azure.Bicep.Types.Concrete.AnyType)
}

CopySource objects

Set the type property to specify the type of object.

For AmazonMWSSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AmazonMWSSource'
}

For AmazonRdsForOracleSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  oracleReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionNames: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AmazonRdsForOracleSource'
}

For AmazonRdsForSqlServerSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  produceAdditionalTypes: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AmazonRdsForSqlServerSource'
}

For AmazonRedshiftSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  redshiftUnloadSettings: {
    bucketName: any(Azure.Bicep.Types.Concrete.AnyType)
    s3LinkedServiceName: {
      parameters: {
        {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
      }
      referenceName: 'string'
      type: 'string'
    }
  }
  type: 'AmazonRedshiftSource'
}

For AvroSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'AvroSource'
}

For AzureBlobFSSource, use:

{
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  skipHeaderLineCount: any(Azure.Bicep.Types.Concrete.AnyType)
  treatEmptyAsNull: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureBlobFSSource'
}

For AzureDataExplorerSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  noTruncation: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataExplorerSource'
}

For AzureDataLakeStoreSource, use:

{
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDataLakeStoreSource'
}

For AzureDatabricksDeltaLakeSource, use:

{
  exportSettings: {
    dateFormat: any(Azure.Bicep.Types.Concrete.AnyType)
    timestampFormat: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureDatabricksDeltaLakeSource'
}

For AzureMariaDBSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureMariaDBSource'
}

For AzureMySqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureMySqlSource'
}

For AzurePostgreSqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzurePostgreSqlSource'
}

For AzureSqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  produceAdditionalTypes: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureSqlSource'
}

For AzureTableSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  azureTableSourceIgnoreTableNotFound: any(Azure.Bicep.Types.Concrete.AnyType)
  azureTableSourceQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'AzureTableSource'
}

For BinarySource, use:

{
  formatSettings: {
    compressionProperties: {
      type: 'string'
      // For remaining properties, see CompressionReadSettings objects
    }
    type: 'string'
  }
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'BinarySource'
}

For BlobSource, use:

{
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  skipHeaderLineCount: any(Azure.Bicep.Types.Concrete.AnyType)
  treatEmptyAsNull: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'BlobSource'
}

For CassandraSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  consistencyLevel: 'string'
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CassandraSource'
}

For CommonDataServiceForAppsSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CommonDataServiceForAppsSource'
}

For ConcurSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ConcurSource'
}

For CosmosDbMongoDbApiSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  batchSize: any(Azure.Bicep.Types.Concrete.AnyType)
  cursorMethods: {
    limit: any(Azure.Bicep.Types.Concrete.AnyType)
    project: any(Azure.Bicep.Types.Concrete.AnyType)
    skip: any(Azure.Bicep.Types.Concrete.AnyType)
    sort: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  filter: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CosmosDbMongoDbApiSource'
}

For CosmosDbSqlApiSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  detectDatetime: any(Azure.Bicep.Types.Concrete.AnyType)
  pageSize: any(Azure.Bicep.Types.Concrete.AnyType)
  preferredRegions: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CosmosDbSqlApiSource'
}

For CouchbaseSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'CouchbaseSource'
}

For Db2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'Db2Source'
}

For DelimitedTextSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  formatSettings: {
    compressionProperties: {
      type: 'string'
      // For remaining properties, see CompressionReadSettings objects
    }
    skipLineCount: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'DelimitedTextSource'
}

For DocumentDbCollectionSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  nestingSeparator: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DocumentDbCollectionSource'
}

For DrillSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DrillSource'
}

For DynamicsAXSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DynamicsAXSource'
}

For DynamicsCrmSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DynamicsCrmSource'
}

For DynamicsSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'DynamicsSource'
}

For EloquaSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'EloquaSource'
}

For ExcelSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'ExcelSource'
}

For FileSystemSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'FileSystemSource'
}

For GoogleAdWordsSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'GoogleAdWordsSource'
}

For GoogleBigQuerySource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'GoogleBigQuerySource'
}

For GoogleBigQueryV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'GoogleBigQueryV2Source'
}

For GreenplumSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'GreenplumSource'
}

For HBaseSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HBaseSource'
}

For HdfsSource, use:

{
  distcpSettings: {
    distcpOptions: any(Azure.Bicep.Types.Concrete.AnyType)
    resourceManagerEndpoint: any(Azure.Bicep.Types.Concrete.AnyType)
    tempScriptPath: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  recursive: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HdfsSource'
}

For HiveSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HiveSource'
}

For HttpSource, use:

{
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HttpSource'
}

For HubspotSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'HubspotSource'
}

For ImpalaSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ImpalaSource'
}

For InformixSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'InformixSource'
}

For JiraSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'JiraSource'
}

For JsonSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  formatSettings: {
    compressionProperties: {
      type: 'string'
      // For remaining properties, see CompressionReadSettings objects
    }
    type: 'string'
  }
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'JsonSource'
}

For LakeHouseTableSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  timestampAsOf: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'LakeHouseTableSource'
  versionAsOf: any(Azure.Bicep.Types.Concrete.AnyType)
}

For MagentoSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MagentoSource'
}

For MariaDBSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MariaDBSource'
}

For MarketoSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MarketoSource'
}

For MicrosoftAccessSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MicrosoftAccessSource'
}

For MongoDbAtlasSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  batchSize: any(Azure.Bicep.Types.Concrete.AnyType)
  cursorMethods: {
    limit: any(Azure.Bicep.Types.Concrete.AnyType)
    project: any(Azure.Bicep.Types.Concrete.AnyType)
    skip: any(Azure.Bicep.Types.Concrete.AnyType)
    sort: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  filter: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MongoDbAtlasSource'
}

For MongoDbSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MongoDbSource'
}

For MongoDbV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  batchSize: any(Azure.Bicep.Types.Concrete.AnyType)
  cursorMethods: {
    limit: any(Azure.Bicep.Types.Concrete.AnyType)
    project: any(Azure.Bicep.Types.Concrete.AnyType)
    skip: any(Azure.Bicep.Types.Concrete.AnyType)
    sort: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  filter: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MongoDbV2Source'
}

For MySqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'MySqlSource'
}

For NetezzaSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'NetezzaSource'
}

For ODataSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ODataSource'
}

For OdbcSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OdbcSource'
}

For Office365Source, use:

{
  allowedGroups: any(Azure.Bicep.Types.Concrete.AnyType)
  dateFilterColumn: any(Azure.Bicep.Types.Concrete.AnyType)
  endTime: any(Azure.Bicep.Types.Concrete.AnyType)
  outputColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  startTime: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'Office365Source'
  userScopeFilterUri: any(Azure.Bicep.Types.Concrete.AnyType)
}

For OracleServiceCloudSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OracleServiceCloudSource'
}

For OracleSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  oracleReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionNames: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'OracleSource'
}

For OrcSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'OrcSource'
}

For ParquetSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  formatSettings: {
    compressionProperties: {
      type: 'string'
      // For remaining properties, see CompressionReadSettings objects
    }
    type: 'string'
  }
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'ParquetSource'
}

For PaypalSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'PaypalSource'
}

For PhoenixSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'PhoenixSource'
}

For PostgreSqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'PostgreSqlSource'
}

For PostgreSqlV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'PostgreSqlV2Source'
}

For PrestoSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'PrestoSource'
}

For QuickBooksSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'QuickBooksSource'
}

For RelationalSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'RelationalSource'
}

For ResponsysSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ResponsysSource'
}

For RestSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  additionalHeaders: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  paginationRules: any(Azure.Bicep.Types.Concrete.AnyType)
  requestBody: any(Azure.Bicep.Types.Concrete.AnyType)
  requestInterval: any(Azure.Bicep.Types.Concrete.AnyType)
  requestMethod: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'RestSource'
}

For SalesforceMarketingCloudSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceMarketingCloudSource'
}

For SalesforceServiceCloudSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  readBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceServiceCloudSource'
}

For SalesforceServiceCloudV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  includeDeletedObjects: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  SOQLQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceServiceCloudV2Source'
}

For SalesforceSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  readBehavior: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceSource'
}

For SalesforceV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  includeDeletedObjects: any(Azure.Bicep.Types.Concrete.AnyType)
  pageSize: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  SOQLQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SalesforceV2Source'
}

For SapBwSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapBwSource'
}

For SapCloudForCustomerSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapCloudForCustomerSource'
}

For SapEccSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapEccSource'
}

For SapHanaSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  packetSize: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapHanaSource'
}

For SapOdpSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  extractionMode: any(Azure.Bicep.Types.Concrete.AnyType)
  projection: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  selection: any(Azure.Bicep.Types.Concrete.AnyType)
  subscriberProcess: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapOdpSource'
}

For SapOpenHubSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  baseRequestId: any(Azure.Bicep.Types.Concrete.AnyType)
  customRfcReadTableFunctionModule: any(Azure.Bicep.Types.Concrete.AnyType)
  excludeLastRequest: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sapDataColumnDelimiter: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapOpenHubSource'
}

For SapTableSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  batchSize: any(Azure.Bicep.Types.Concrete.AnyType)
  customRfcReadTableFunctionModule: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    maxPartitionsNumber: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  rfcTableFields: any(Azure.Bicep.Types.Concrete.AnyType)
  rfcTableOptions: any(Azure.Bicep.Types.Concrete.AnyType)
  rowCount: any(Azure.Bicep.Types.Concrete.AnyType)
  rowSkips: any(Azure.Bicep.Types.Concrete.AnyType)
  sapDataColumnDelimiter: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SapTableSource'
}

For ServiceNowSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ServiceNowSource'
}

For ServiceNowV2Source, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  expression: {
    operands: [
      ...
    ]
    operators: [
      'string'
    ]
    type: 'string'
    value: 'string'
  }
  pageSize: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ServiceNowV2Source'
}

For SharePointOnlineListSource, use:

{
  httpRequestTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SharePointOnlineListSource'
}

For ShopifySource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ShopifySource'
}

For SnowflakeSource, use:

{
  exportSettings: {
    additionalCopyOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    additionalFormatOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    storageIntegration: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SnowflakeSource'
}

For SnowflakeV2Source, use:

{
  exportSettings: {
    additionalCopyOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    additionalFormatOptions: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    storageIntegration: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SnowflakeV2Source'
}

For SparkSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SparkSource'
}

For SqlDWSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlDWSource'
}

For SqlMISource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  produceAdditionalTypes: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlMISource'
}

For SqlServerSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  produceAdditionalTypes: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlServerSource'
}

For SqlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SqlSource'
}

For SquareSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SquareSource'
}

For SybaseSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'SybaseSource'
}

For TeradataSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'TeradataSource'
}

For VerticaSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'VerticaSource'
}

For WarehouseSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  isolationLevel: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionOption: any(Azure.Bicep.Types.Concrete.AnyType)
  partitionSettings: {
    partitionColumnName: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionLowerBound: any(Azure.Bicep.Types.Concrete.AnyType)
    partitionUpperBound: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderQuery: any(Azure.Bicep.Types.Concrete.AnyType)
  sqlReaderStoredProcedureName: any(Azure.Bicep.Types.Concrete.AnyType)
  storedProcedureParameters: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'WarehouseSource'
}

For XeroSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'XeroSource'
}

For ZohoSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  query: any(Azure.Bicep.Types.Concrete.AnyType)
  queryTimeout: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'ZohoSource'
}

For WebSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  type: 'WebSource'
}

For XmlSource, use:

{
  additionalColumns: any(Azure.Bicep.Types.Concrete.AnyType)
  formatSettings: {
    compressionProperties: {
      type: 'string'
      // For remaining properties, see CompressionReadSettings objects
    }
    detectDataType: any(Azure.Bicep.Types.Concrete.AnyType)
    namespacePrefixes: any(Azure.Bicep.Types.Concrete.AnyType)
    namespaces: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    validationMode: any(Azure.Bicep.Types.Concrete.AnyType)
  }
  storeSettings: {
    disableMetricsCollection: any(Azure.Bicep.Types.Concrete.AnyType)
    maxConcurrentConnections: any(Azure.Bicep.Types.Concrete.AnyType)
    type: 'string'
    // For remaining properties, see StoreReadSettings objects
  }
  type: 'XmlSource'
}

SecretBase objects

Set the type property to specify the type of object.

For AzureKeyVaultSecret, use:

{
  secretName: any(Azure.Bicep.Types.Concrete.AnyType)
  secretVersion: any(Azure.Bicep.Types.Concrete.AnyType)
  store: {
    parameters: {
      {customized property}: any(Azure.Bicep.Types.Concrete.AnyType)
    }
    referenceName: 'string'
    type: 'string'
  }
  type: 'AzureKeyVaultSecret'
}

For SecureString, use:

{
  type: 'SecureString'
  value: 'string'
}

Property values

Activity

Name Description Value
dependsOn Activity depends on condition. ActivityDependency[]
description Activity description. string
name Activity name. string (required)
onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default. 'Failed'
'Skipped'
'Succeeded'
state Activity state. This is an optional property and if not provided, the state will be Active by default. 'Active'
'Inactive'
type Set to 'AppendVariable' for type AppendVariableActivity. Set to 'AzureDataExplorerCommand' for type AzureDataExplorerCommandActivity. Set to 'AzureFunctionActivity' for type AzureFunctionActivity. Set to 'AzureMLBatchExecution' for type AzureMLBatchExecutionActivity. Set to 'AzureMLExecutePipeline' for type AzureMLExecutePipelineActivity. Set to 'AzureMLUpdateResource' for type AzureMLUpdateResourceActivity. Set to 'ExecutePipeline' for type ExecutePipelineActivity. Set to 'Fail' for type FailActivity. Set to 'Filter' for type FilterActivity. Set to 'ForEach' for type ForEachActivity. Set to 'IfCondition' for type IfConditionActivity. Set to 'SetVariable' for type SetVariableActivity. Set to 'Switch' for type SwitchActivity. Set to 'Until' for type UntilActivity. Set to 'Validation' for type ValidationActivity. Set to 'Wait' for type WaitActivity. Set to 'WebHook' for type WebHookActivity. Set to 'Copy' for type CopyActivity. Set to 'Custom' for type CustomActivity. Set to 'DataLakeAnalyticsU-SQL' for type DataLakeAnalyticsUsqlActivity. Set to 'DatabricksNotebook' for type DatabricksNotebookActivity. Set to 'DatabricksSparkJar' for type DatabricksSparkJarActivity. Set to 'DatabricksSparkPython' for type DatabricksSparkPythonActivity. Set to 'Delete' for type DeleteActivity. Set to 'ExecuteDataFlow' for type ExecuteDataFlowActivity. Set to 'ExecuteSSISPackage' for type ExecuteSsisPackageActivity. Set to 'ExecuteWranglingDataflow' for type ExecuteWranglingDataflowActivity. Set to 'GetMetadata' for type GetMetadataActivity. Set to 'HDInsightHive' for type HDInsightHiveActivity. Set to 'HDInsightMapReduce' for type HDInsightMapReduceActivity. Set to 'HDInsightPig' for type HDInsightPigActivity. Set to 'HDInsightSpark' for type HDInsightSparkActivity. Set to 'HDInsightStreaming' for type HDInsightStreamingActivity. Set to 'Lookup' for type LookupActivity. Set to 'Script' for type ScriptActivity. Set to 'SparkJob' for type SynapseSparkJobDefinitionActivity. Set to 'SqlServerStoredProcedure' for type SqlServerStoredProcedureActivity. Set to 'SynapseNotebook' for type SynapseNotebookActivity. Set to 'WebActivity' for type WebActivity. 'AppendVariable'
'AzureDataExplorerCommand'
'AzureFunctionActivity'
'AzureMLBatchExecution'
'AzureMLExecutePipeline'
'AzureMLUpdateResource'
'Copy'
'Custom'
'DatabricksNotebook'
'DatabricksSparkJar'
'DatabricksSparkPython'
'DataLakeAnalyticsU-SQL'
'Delete'
'ExecuteDataFlow'
'ExecutePipeline'
'ExecuteSSISPackage'
'ExecuteWranglingDataflow'
'Fail'
'Filter'
'ForEach'
'GetMetadata'
'HDInsightHive'
'HDInsightMapReduce'
'HDInsightPig'
'HDInsightSpark'
'HDInsightStreaming'
'IfCondition'
'Lookup'
'Script'
'SetVariable'
'SparkJob'
'SqlServerStoredProcedure'
'Switch'
'SynapseNotebook'
'Until'
'Validation'
'Wait'
'WebActivity'
'WebHook' (required)
userProperties Activity user properties. UserProperty[]

ActivityDependency

Name Description Value
activity Activity name. string (required)
dependencyConditions Match-Condition for the dependency. String array containing any of:
'Completed'
'Failed'
'Skipped'
'Succeeded' (required)

ActivityPolicy

Name Description Value
retry Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. any
retryIntervalInSeconds Interval between each retry attempt (in seconds). The default is 30 sec. int

Constraints:
Min value = 30
Max value = 86400
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool
secureOutput When set to true, Output from activity is considered as secure and will not be logged to monitoring. bool
timeout Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

AmazonMWSSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AmazonMWSSource' (required)

AmazonRdsForOraclePartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionNames Names of the physical partitions of AmazonRdsForOracle table. any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

AmazonRdsForOracleSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
oracleReaderQuery AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for AmazonRdsForOracle source partitioning. AmazonRdsForOraclePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AmazonRdsForOracleSource' (required)

AmazonRdsForSqlServerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'AmazonRdsForSqlServerSource' (required)

AmazonRedshiftSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
redshiftUnloadSettings The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. RedshiftUnloadSettings
type Copy source type. 'AmazonRedshiftSource' (required)

AmazonS3CompatibleReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AmazonS3CompatibleReadSettings' (required)
wildcardFileName Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). any

AmazonS3ReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the S3 object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AmazonS3ReadSettings' (required)
wildcardFileName AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). any

AppendVariableActivity

Name Description Value
type Type of activity. 'AppendVariable' (required)
typeProperties Append Variable activity properties. AppendVariableActivityTypeProperties (required)

AppendVariableActivityTypeProperties

Name Description Value
value Value to be appended. Type: could be a static value matching type of the variable item or Expression with resultType matching type of the variable item any
variableName Name of the variable whose value needs to be appended to. string

AvroSink

Name Description Value
formatSettings Avro format settings. AvroWriteSettings
storeSettings Avro store settings. StoreWriteSettings
type Copy sink type. 'AvroSink' (required)

AvroSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings Avro store settings. StoreReadSettings
type Copy source type. 'AvroSource' (required)

AvroWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
recordName Top level record name in write result, which is required in AVRO spec. string
recordNamespace Record namespace in the write result. string
type The write setting type. string (required)

AzureBlobFSReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureBlobFSReadSettings' (required)
wildcardFileName Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureBlobFSSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. Type: string (or Expression with resultType string). any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Copy sink type. 'AzureBlobFSSink' (required)

AzureBlobFSSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
skipHeaderLineCount Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). any
treatEmptyAsNull Treat empty as null. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'AzureBlobFSSource' (required)

AzureBlobFSWriteSettings

Name Description Value
blockSizeInMB Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). any
type The write setting type. 'AzureBlobFSWriteSettings' (required)

AzureBlobStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureBlobStorageReadSettings' (required)
wildcardFileName Azure blob wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureBlobStorageWriteSettings

Name Description Value
blockSizeInMB Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). any
type The write setting type. 'AzureBlobStorageWriteSettings' (required)

AzureDatabricksDeltaLakeExportCommand

Name Description Value
dateFormat Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
timestampFormat Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
type The export setting type. string (required)

AzureDatabricksDeltaLakeImportCommand

Name Description Value
dateFormat Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
timestampFormat Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
type The import setting type. string (required)

AzureDatabricksDeltaLakeSink

Name Description Value
importSettings Azure Databricks Delta Lake import settings. AzureDatabricksDeltaLakeImportCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureDatabricksDeltaLakeSink' (required)

AzureDatabricksDeltaLakeSource

Name Description Value
exportSettings Azure Databricks Delta Lake export settings. AzureDatabricksDeltaLakeExportCommand
query Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'AzureDatabricksDeltaLakeSource' (required)

AzureDataExplorerCommandActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureDataExplorerCommand' (required)
typeProperties Azure Data Explorer command activity properties. AzureDataExplorerCommandActivityTypeProperties (required)

AzureDataExplorerCommandActivityTypeProperties

Name Description Value
command A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). any (required)
commandTimeout Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9]))..) any

AzureDataExplorerSink

Name Description Value
flushImmediately If set to true, any aggregation will be skipped. Default is false. Type: boolean. any
ingestionMappingAsJson An explicit column mapping description provided in a json format. Type: string. any
ingestionMappingName A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. any
type Copy sink type. 'AzureDataExplorerSink' (required)

AzureDataExplorerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
noTruncation The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. any
query Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). any (required)
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])).. any
type Copy source type. 'AzureDataExplorerSource' (required)

AzureDataLakeStoreReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
listAfter Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). any
listBefore Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureDataLakeStoreReadSettings' (required)
wildcardFileName ADLS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath ADLS wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureDataLakeStoreSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. Type: string (or Expression with resultType string). any
enableAdlsSingleFileParallel Single File Parallel. any
type Copy sink type. 'AzureDataLakeStoreSink' (required)

AzureDataLakeStoreSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'AzureDataLakeStoreSource' (required)

AzureDataLakeStoreWriteSettings

Name Description Value
expiryDateTime Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). any
type The write setting type. 'AzureDataLakeStoreWriteSettings' (required)

AzureFileStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureFileStorageReadSettings' (required)
wildcardFileName Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureFileStorageWriteSettings

Name Description Value
type The write setting type. 'AzureFileStorageWriteSettings' (required)

AzureFunctionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureFunctionActivity' (required)
typeProperties Azure Function activity properties. AzureFunctionActivityTypeProperties (required)

AzureFunctionActivityTypeProperties

Name Description Value
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
functionName Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) any (required)
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). AzureFunctionActivityTypePropertiesHeaders
method Rest API method for target endpoint. 'DELETE'
'GET'
'HEAD'
'OPTIONS'
'POST'
'PUT'
'TRACE' (required)

AzureFunctionActivityTypePropertiesHeaders

Name Description Value

AzureKeyVaultSecretReference

Name Description Value
secretName The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). any (required)
secretVersion The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). any
store The Azure Key Vault linked service reference. LinkedServiceReference (required)
type Type of the secret. 'AzureKeyVaultSecret' (required)

AzureMariaDBSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureMariaDBSource' (required)

AzureMLBatchExecutionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLBatchExecution' (required)
typeProperties Azure ML Batch Execution activity properties. AzureMLBatchExecutionActivityTypeProperties (required)

AzureMLBatchExecutionActivityTypeProperties

Name Description Value
globalParameters Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesGlobalParameters
webServiceInputs Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesWebServiceInputs
webServiceOutputs Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesWebServiceOutputs

AzureMLBatchExecutionActivityTypePropertiesGlobalParameters

Name Description Value

AzureMLBatchExecutionActivityTypePropertiesWebServiceInputs

Name Description Value

AzureMLBatchExecutionActivityTypePropertiesWebServiceOutputs

Name Description Value

AzureMLExecutePipelineActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLExecutePipeline' (required)
typeProperties Azure ML Execute Pipeline activity properties. AzureMLExecutePipelineActivityTypeProperties (required)

AzureMLExecutePipelineActivityTypeProperties

Name Description Value
continueOnStepFailure Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). any
dataPathAssignments Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with resultType object). any
experimentName Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). any
mlParentRunId The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). any
mlPipelineEndpointId ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). any
mlPipelineId ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). any
mlPipelineParameters Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). any
version Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). any

AzureMLUpdateResourceActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLUpdateResource' (required)
typeProperties Azure ML Update Resource management activity properties. AzureMLUpdateResourceActivityTypeProperties (required)

AzureMLUpdateResourceActivityTypeProperties

Name Description Value
trainedModelFilePath The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). any (required)
trainedModelLinkedServiceName Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. LinkedServiceReference (required)
trainedModelName Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). any (required)

AzureMLWebServiceFile

Name Description Value
filePath The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). any (required)
linkedServiceName Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. LinkedServiceReference (required)

AzureMySqlSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureMySqlSink' (required)

AzureMySqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureMySqlSource' (required)

AzurePostgreSqlSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzurePostgreSqlSink' (required)

AzurePostgreSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzurePostgreSqlSource' (required)

AzureQueueSink

Name Description Value
type Copy sink type. 'AzureQueueSink' (required)

AzureSearchIndexSink

Name Description Value
type Copy sink type. 'AzureSearchIndexSink' (required)
writeBehavior Specify the write behavior when upserting documents into Azure Search Index. 'Merge'
'Upload'

AzureSqlSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureSqlSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) any

AzureSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'AzureSqlSource' (required)

AzureTableSink

Name Description Value
azureTableDefaultPartitionKeyValue Azure Table default partition key value. Type: string (or Expression with resultType string). any
azureTableInsertType Azure Table insert type. Type: string (or Expression with resultType string). any
azureTablePartitionKeyName Azure Table partition key name. Type: string (or Expression with resultType string). any
azureTableRowKeyName Azure Table row key name. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureTableSink' (required)

AzureTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
azureTableSourceIgnoreTableNotFound Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). any
azureTableSourceQuery Azure Table source query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureTableSource' (required)

BigDataPoolParametrizationReference

Name Description Value
referenceName Reference big data pool name. Type: string (or Expression with resultType string). any (required)
type Big data pool reference type. 'BigDataPoolReference' (required)

BinaryReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'BinaryReadSettings' (required)

BinaryReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

BinarySink

Name Description Value
storeSettings Binary store settings. StoreWriteSettings
type Copy sink type. 'BinarySink' (required)

BinarySource

Name Description Value
formatSettings Binary format settings. BinaryReadSettings
storeSettings Binary store settings. StoreReadSettings
type Copy source type. 'BinarySource' (required)

BlobSink

Name Description Value
blobWriterAddHeader Blob writer add header. Type: boolean (or Expression with resultType boolean). any
blobWriterDateTimeFormat Blob writer date time format. Type: string (or Expression with resultType string). any
blobWriterOverwriteFiles Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). any
copyBehavior The type of copy behavior for copy sink. any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Copy sink type. 'BlobSink' (required)

BlobSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
skipHeaderLineCount Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). any
treatEmptyAsNull Treat empty as null. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'BlobSource' (required)

CassandraSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
consistencyLevel The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. 'ALL'
'EACH_QUORUM'
'LOCAL_ONE'
'LOCAL_QUORUM'
'LOCAL_SERIAL'
'ONE'
'QUORUM'
'SERIAL'
'THREE'
'TWO'
query Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CassandraSource' (required)

CommonDataServiceForAppsSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'CommonDataServiceForAppsSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

CommonDataServiceForAppsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'CommonDataServiceForAppsSource' (required)

CompressionReadSettings

Name Description Value
type Set to 'TarGZipReadSettings' for type TarGZipReadSettings. Set to 'TarReadSettings' for type TarReadSettings. Set to 'ZipDeflateReadSettings' for type ZipDeflateReadSettings. 'TarGZipReadSettings'
'TarReadSettings'
'ZipDeflateReadSettings' (required)

ConcurSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ConcurSource' (required)

ContinuationSettingsReference

Name Description Value
continuationTtlInMinutes Continuation TTL in minutes. any
customizedCheckpointKey Customized checkpoint key. any
idleCondition Idle condition. any

CopyActivity

Name Description Value
inputs List of inputs for the activity. DatasetReference[]
linkedServiceName Linked service reference. LinkedServiceReference
outputs List of outputs for the activity. DatasetReference[]
policy Activity policy. ActivityPolicy
type Type of activity. 'Copy' (required)
typeProperties Copy activity properties. CopyActivityTypeProperties (required)

CopyActivityLogSettings

Name Description Value
enableReliableLogging Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). any
logLevel Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). any

CopyActivityTypeProperties

Name Description Value
dataIntegrationUnits Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. any
enableSkipIncompatibleRow Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). any
enableStaging Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). any
logSettings Log settings customer needs provide when enabling log. LogSettings
logStorageSettings (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. LogStorageSettings
parallelCopies Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. any
preserve Preserve rules. any[]
preserveRules Preserve Rules. any[]
redirectIncompatibleRowSettings Redirect incompatible row settings when EnableSkipIncompatibleRow is true. RedirectIncompatibleRowSettings
sink Copy activity sink. CopySink (required)
skipErrorFile Specify the fault tolerance for data consistency. SkipErrorFile
source Copy activity source. CopySource (required)
stagingSettings Specifies interim staging settings when EnableStaging is true. StagingSettings
translator Copy activity translator. If not specified, tabular translator is used. any
validateDataConsistency Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). any

CopySink

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). any
sinkRetryCount Sink retry count. Type: integer (or Expression with resultType integer). any
sinkRetryWait Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Set to 'AvroSink' for type AvroSink. Set to 'AzureBlobFSSink' for type AzureBlobFSSink. Set to 'AzureDataExplorerSink' for type AzureDataExplorerSink. Set to 'AzureDataLakeStoreSink' for type AzureDataLakeStoreSink. Set to 'AzureDatabricksDeltaLakeSink' for type AzureDatabricksDeltaLakeSink. Set to 'AzureMySqlSink' for type AzureMySqlSink. Set to 'AzurePostgreSqlSink' for type AzurePostgreSqlSink. Set to 'AzureQueueSink' for type AzureQueueSink. Set to 'AzureSearchIndexSink' for type AzureSearchIndexSink. Set to 'AzureSqlSink' for type AzureSqlSink. Set to 'AzureTableSink' for type AzureTableSink. Set to 'BinarySink' for type BinarySink. Set to 'BlobSink' for type BlobSink. Set to 'CommonDataServiceForAppsSink' for type CommonDataServiceForAppsSink. Set to 'CosmosDbMongoDbApiSink' for type CosmosDbMongoDbApiSink. Set to 'CosmosDbSqlApiSink' for type CosmosDbSqlApiSink. Set to 'DelimitedTextSink' for type DelimitedTextSink. Set to 'DocumentDbCollectionSink' for type DocumentDbCollectionSink. Set to 'DynamicsCrmSink' for type DynamicsCrmSink. Set to 'DynamicsSink' for type DynamicsSink. Set to 'FileSystemSink' for type FileSystemSink. Set to 'IcebergSink' for type IcebergSink. Set to 'InformixSink' for type InformixSink. Set to 'JsonSink' for type JsonSink. Set to 'LakeHouseTableSink' for type LakeHouseTableSink. Set to 'MicrosoftAccessSink' for type MicrosoftAccessSink. Set to 'MongoDbAtlasSink' for type MongoDbAtlasSink. Set to 'MongoDbV2Sink' for type MongoDbV2Sink. Set to 'OdbcSink' for type OdbcSink. Set to 'OracleSink' for type OracleSink. Set to 'OrcSink' for type OrcSink. Set to 'ParquetSink' for type ParquetSink. Set to 'RestSink' for type RestSink. Set to 'SalesforceServiceCloudSink' for type SalesforceServiceCloudSink. Set to 'SalesforceServiceCloudV2Sink' for type SalesforceServiceCloudV2Sink. Set to 'SalesforceSink' for type SalesforceSink. Set to 'SalesforceV2Sink' for type SalesforceV2Sink. Set to 'SapCloudForCustomerSink' for type SapCloudForCustomerSink. Set to 'SnowflakeSink' for type SnowflakeSink. Set to 'SnowflakeV2Sink' for type SnowflakeV2Sink. Set to 'SqlDWSink' for type SqlDWSink. Set to 'SqlMISink' for type SqlMISink. Set to 'SqlServerSink' for type SqlServerSink. Set to 'SqlSink' for type SqlSink. Set to 'WarehouseSink' for type WarehouseSink. 'AvroSink'
'AzureBlobFSSink'
'AzureDatabricksDeltaLakeSink'
'AzureDataExplorerSink'
'AzureDataLakeStoreSink'
'AzureMySqlSink'
'AzurePostgreSqlSink'
'AzureQueueSink'
'AzureSearchIndexSink'
'AzureSqlSink'
'AzureTableSink'
'BinarySink'
'BlobSink'
'CommonDataServiceForAppsSink'
'CosmosDbMongoDbApiSink'
'CosmosDbSqlApiSink'
'DelimitedTextSink'
'DocumentDbCollectionSink'
'DynamicsCrmSink'
'DynamicsSink'
'FileSystemSink'
'IcebergSink'
'InformixSink'
'JsonSink'
'LakeHouseTableSink'
'MicrosoftAccessSink'
'MongoDbAtlasSink'
'MongoDbV2Sink'
'OdbcSink'
'OracleSink'
'OrcSink'
'ParquetSink'
'RestSink'
'SalesforceServiceCloudSink'
'SalesforceServiceCloudV2Sink'
'SalesforceSink'
'SalesforceV2Sink'
'SapCloudForCustomerSink'
'SnowflakeSink'
'SnowflakeV2Sink'
'SqlDWSink'
'SqlMISink'
'SqlServerSink'
'SqlSink'
'WarehouseSink' (required)
writeBatchSize Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. any
writeBatchTimeout Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

CopySource

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
sourceRetryCount Source retry count. Type: integer (or Expression with resultType integer). any
sourceRetryWait Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Set to 'AmazonMWSSource' for type AmazonMWSSource. Set to 'AmazonRdsForOracleSource' for type AmazonRdsForOracleSource. Set to 'AmazonRdsForSqlServerSource' for type AmazonRdsForSqlServerSource. Set to 'AmazonRedshiftSource' for type AmazonRedshiftSource. Set to 'AvroSource' for type AvroSource. Set to 'AzureBlobFSSource' for type AzureBlobFSSource. Set to 'AzureDataExplorerSource' for type AzureDataExplorerSource. Set to 'AzureDataLakeStoreSource' for type AzureDataLakeStoreSource. Set to 'AzureDatabricksDeltaLakeSource' for type AzureDatabricksDeltaLakeSource. Set to 'AzureMariaDBSource' for type AzureMariaDBSource. Set to 'AzureMySqlSource' for type AzureMySqlSource. Set to 'AzurePostgreSqlSource' for type AzurePostgreSqlSource. Set to 'AzureSqlSource' for type AzureSqlSource. Set to 'AzureTableSource' for type AzureTableSource. Set to 'BinarySource' for type BinarySource. Set to 'BlobSource' for type BlobSource. Set to 'CassandraSource' for type CassandraSource. Set to 'CommonDataServiceForAppsSource' for type CommonDataServiceForAppsSource. Set to 'ConcurSource' for type ConcurSource. Set to 'CosmosDbMongoDbApiSource' for type CosmosDbMongoDbApiSource. Set to 'CosmosDbSqlApiSource' for type CosmosDbSqlApiSource. Set to 'CouchbaseSource' for type CouchbaseSource. Set to 'Db2Source' for type Db2Source. Set to 'DelimitedTextSource' for type DelimitedTextSource. Set to 'DocumentDbCollectionSource' for type DocumentDbCollectionSource. Set to 'DrillSource' for type DrillSource. Set to 'DynamicsAXSource' for type DynamicsAXSource. Set to 'DynamicsCrmSource' for type DynamicsCrmSource. Set to 'DynamicsSource' for type DynamicsSource. Set to 'EloquaSource' for type EloquaSource. Set to 'ExcelSource' for type ExcelSource. Set to 'FileSystemSource' for type FileSystemSource. Set to 'GoogleAdWordsSource' for type GoogleAdWordsSource. Set to 'GoogleBigQuerySource' for type GoogleBigQuerySource. Set to 'GoogleBigQueryV2Source' for type GoogleBigQueryV2Source. Set to 'GreenplumSource' for type GreenplumSource. Set to 'HBaseSource' for type HBaseSource. Set to 'HdfsSource' for type HdfsSource. Set to 'HiveSource' for type HiveSource. Set to 'HttpSource' for type HttpSource. Set to 'HubspotSource' for type HubspotSource. Set to 'ImpalaSource' for type ImpalaSource. Set to 'InformixSource' for type InformixSource. Set to 'JiraSource' for type JiraSource. Set to 'JsonSource' for type JsonSource. Set to 'LakeHouseTableSource' for type LakeHouseTableSource. Set to 'MagentoSource' for type MagentoSource. Set to 'MariaDBSource' for type MariaDBSource. Set to 'MarketoSource' for type MarketoSource. Set to 'MicrosoftAccessSource' for type MicrosoftAccessSource. Set to 'MongoDbAtlasSource' for type MongoDbAtlasSource. Set to 'MongoDbSource' for type MongoDbSource. Set to 'MongoDbV2Source' for type MongoDbV2Source. Set to 'MySqlSource' for type MySqlSource. Set to 'NetezzaSource' for type NetezzaSource. Set to 'ODataSource' for type ODataSource. Set to 'OdbcSource' for type OdbcSource. Set to 'Office365Source' for type Office365Source. Set to 'OracleServiceCloudSource' for type OracleServiceCloudSource. Set to 'OracleSource' for type OracleSource. Set to 'OrcSource' for type OrcSource. Set to 'ParquetSource' for type ParquetSource. Set to 'PaypalSource' for type PaypalSource. Set to 'PhoenixSource' for type PhoenixSource. Set to 'PostgreSqlSource' for type PostgreSqlSource. Set to 'PostgreSqlV2Source' for type PostgreSqlV2Source. Set to 'PrestoSource' for type PrestoSource. Set to 'QuickBooksSource' for type QuickBooksSource. Set to 'RelationalSource' for type RelationalSource. Set to 'ResponsysSource' for type ResponsysSource. Set to 'RestSource' for type RestSource. Set to 'SalesforceMarketingCloudSource' for type SalesforceMarketingCloudSource. Set to 'SalesforceServiceCloudSource' for type SalesforceServiceCloudSource. Set to 'SalesforceServiceCloudV2Source' for type SalesforceServiceCloudV2Source. Set to 'SalesforceSource' for type SalesforceSource. Set to 'SalesforceV2Source' for type SalesforceV2Source. Set to 'SapBwSource' for type SapBwSource. Set to 'SapCloudForCustomerSource' for type SapCloudForCustomerSource. Set to 'SapEccSource' for type SapEccSource. Set to 'SapHanaSource' for type SapHanaSource. Set to 'SapOdpSource' for type SapOdpSource. Set to 'SapOpenHubSource' for type SapOpenHubSource. Set to 'SapTableSource' for type SapTableSource. Set to 'ServiceNowSource' for type ServiceNowSource. Set to 'ServiceNowV2Source' for type ServiceNowV2Source. Set to 'SharePointOnlineListSource' for type SharePointOnlineListSource. Set to 'ShopifySource' for type ShopifySource. Set to 'SnowflakeSource' for type SnowflakeSource. Set to 'SnowflakeV2Source' for type SnowflakeV2Source. Set to 'SparkSource' for type SparkSource. Set to 'SqlDWSource' for type SqlDWSource. Set to 'SqlMISource' for type SqlMISource. Set to 'SqlServerSource' for type SqlServerSource. Set to 'SqlSource' for type SqlSource. Set to 'SquareSource' for type SquareSource. Set to 'SybaseSource' for type SybaseSource. Set to 'TeradataSource' for type TeradataSource. Set to 'VerticaSource' for type VerticaSource. Set to 'WarehouseSource' for type WarehouseSource. Set to 'XeroSource' for type XeroSource. Set to 'ZohoSource' for type ZohoSource. Set to 'WebSource' for type WebSource. Set to 'XmlSource' for type XmlSource. 'AmazonMWSSource'
'AmazonRdsForOracleSource'
'AmazonRdsForSqlServerSource'
'AmazonRedshiftSource'
'AvroSource'
'AzureBlobFSSource'
'AzureDatabricksDeltaLakeSource'
'AzureDataExplorerSource'
'AzureDataLakeStoreSource'
'AzureMariaDBSource'
'AzureMySqlSource'
'AzurePostgreSqlSource'
'AzureSqlSource'
'AzureTableSource'
'BinarySource'
'BlobSource'
'CassandraSource'
'CommonDataServiceForAppsSource'
'ConcurSource'
'CosmosDbMongoDbApiSource'
'CosmosDbSqlApiSource'
'CouchbaseSource'
'Db2Source'
'DelimitedTextSource'
'DocumentDbCollectionSource'
'DrillSource'
'DynamicsAXSource'
'DynamicsCrmSource'
'DynamicsSource'
'EloquaSource'
'ExcelSource'
'FileSystemSource'
'GoogleAdWordsSource'
'GoogleBigQuerySource'
'GoogleBigQueryV2Source'
'GreenplumSource'
'HBaseSource'
'HdfsSource'
'HiveSource'
'HttpSource'
'HubspotSource'
'ImpalaSource'
'InformixSource'
'JiraSource'
'JsonSource'
'LakeHouseTableSource'
'MagentoSource'
'MariaDBSource'
'MarketoSource'
'MicrosoftAccessSource'
'MongoDbAtlasSource'
'MongoDbSource'
'MongoDbV2Source'
'MySqlSource'
'NetezzaSource'
'ODataSource'
'OdbcSource'
'Office365Source'
'OracleServiceCloudSource'
'OracleSource'
'OrcSource'
'ParquetSource'
'PaypalSource'
'PhoenixSource'
'PostgreSqlSource'
'PostgreSqlV2Source'
'PrestoSource'
'QuickBooksSource'
'RelationalSource'
'ResponsysSource'
'RestSource'
'SalesforceMarketingCloudSource'
'SalesforceServiceCloudSource'
'SalesforceServiceCloudV2Source'
'SalesforceSource'
'SalesforceV2Source'
'SapBwSource'
'SapCloudForCustomerSource'
'SapEccSource'
'SapHanaSource'
'SapOdpSource'
'SapOpenHubSource'
'SapTableSource'
'ServiceNowSource'
'ServiceNowV2Source'
'SharePointOnlineListSource'
'ShopifySource'
'SnowflakeSource'
'SnowflakeV2Source'
'SparkSource'
'SqlDWSource'
'SqlMISource'
'SqlServerSource'
'SqlSource'
'SquareSource'
'SybaseSource'
'TeradataSource'
'VerticaSource'
'WarehouseSource'
'WebSource'
'XeroSource'
'XmlSource'
'ZohoSource' (required)

CosmosDbMongoDbApiSink

Name Description Value
type Copy sink type. 'CosmosDbMongoDbApiSink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

CosmosDbMongoDbApiSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query. MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CosmosDbMongoDbApiSource' (required)

CosmosDbSqlApiSink

Name Description Value
type Copy sink type. 'CosmosDbSqlApiSink' (required)
writeBehavior Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. any

CosmosDbSqlApiSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
detectDatetime Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). any
pageSize Page size of the result. Type: integer (or Expression with resultType integer). any
preferredRegions Preferred regions. Type: array of strings (or Expression with resultType array of strings). any
query SQL API query. Type: string (or Expression with resultType string). any
type Copy source type. 'CosmosDbSqlApiSource' (required)

CouchbaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CouchbaseSource' (required)

CredentialReference

Name Description Value
referenceName Reference credential name. string (required)
type Credential reference type. 'CredentialReference' (required)

CustomActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Custom' (required)
typeProperties Custom activity properties. CustomActivityTypeProperties (required)

CustomActivityReferenceObject

Name Description Value
datasets Dataset references. DatasetReference[]
linkedServices Linked service references. LinkedServiceReference[]

CustomActivityTypeProperties

Name Description Value
autoUserSpecification Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). any
command Command for custom activity Type: string (or Expression with resultType string). any (required)
extendedProperties User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. CustomActivityTypePropertiesExtendedProperties
folderPath Folder path for resource files Type: string (or Expression with resultType string). any
referenceObjects Reference objects CustomActivityReferenceObject
resourceLinkedService Resource linked service reference. LinkedServiceReference
retentionTimeInDays The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). any

CustomActivityTypePropertiesExtendedProperties

Name Description Value

DatabricksNotebookActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksNotebook' (required)
typeProperties Databricks Notebook activity properties. DatabricksNotebookActivityTypeProperties (required)

DatabricksNotebookActivityTypeProperties

Name Description Value
baseParameters Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. DatabricksNotebookActivityTypePropertiesBaseParameters
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksNotebookActivityTypePropertiesLibrariesItem[]
notebookPath The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). any (required)

DatabricksNotebookActivityTypePropertiesBaseParameters

Name Description Value

DatabricksNotebookActivityTypePropertiesLibrariesItem

Name Description Value

DatabricksSparkJarActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksSparkJar' (required)
typeProperties Databricks SparkJar activity properties. DatabricksSparkJarActivityTypeProperties (required)

DatabricksSparkJarActivityTypeProperties

Name Description Value
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksSparkJarActivityTypePropertiesLibrariesItem[]
mainClassName The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). any (required)
parameters Parameters that will be passed to the main method. any[]

DatabricksSparkJarActivityTypePropertiesLibrariesItem

Name Description Value

DatabricksSparkPythonActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksSparkPython' (required)
typeProperties Databricks SparkPython activity properties. DatabricksSparkPythonActivityTypeProperties (required)

DatabricksSparkPythonActivityTypeProperties

Name Description Value
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksSparkPythonActivityTypePropertiesLibrariesItem[]
parameters Command line parameters that will be passed to the Python file. any[]
pythonFile The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). any (required)

DatabricksSparkPythonActivityTypePropertiesLibrariesItem

Name Description Value

DataFlowReference

Name Description Value
datasetParameters Reference data flow parameters from dataset. any
parameters Data flow parameters ParameterValueSpecification
referenceName Reference data flow name. string (required)
type Data flow reference type. 'DataFlowReference' (required)

DataFlowStagingInfo

Name Description Value
folderPath Folder path for staging blob. Type: string (or Expression with resultType string) any
linkedService Staging linked service reference. LinkedServiceReference

DataLakeAnalyticsUsqlActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DataLakeAnalyticsU-SQL' (required)
typeProperties Data Lake Analytics U-SQL activity properties. DataLakeAnalyticsUsqlActivityTypeProperties (required)

DataLakeAnalyticsUsqlActivityTypeProperties

Name Description Value
compilationMode Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). any
degreeOfParallelism The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. any
parameters Parameters for U-SQL job request. DataLakeAnalyticsUsqlActivityTypePropertiesParameters
priority Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. any
runtimeVersion Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). any
scriptLinkedService Script linked service reference. LinkedServiceReference (required)
scriptPath Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). any (required)

DataLakeAnalyticsUsqlActivityTypePropertiesParameters

Name Description Value

DatasetReference

Name Description Value
parameters Arguments for dataset. ParameterValueSpecification
referenceName Reference dataset name. string (required)
type Dataset reference type. 'DatasetReference' (required)

Db2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'Db2Source' (required)

DeleteActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Delete' (required)
typeProperties Delete activity properties. DeleteActivityTypeProperties (required)

DeleteActivityTypeProperties

Name Description Value
dataset Delete activity dataset reference. DatasetReference (required)
enableLogging Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). any
logStorageSettings Log storage settings customer need to provide when enableLogging is true. LogStorageSettings
maxConcurrentConnections The max concurrent connections to connect data source at the same time. int

Constraints:
Min value = 1
recursive If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). any
storeSettings Delete activity store settings. StoreReadSettings

DelimitedTextReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
skipLineCount Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). any
type The read setting type. 'DelimitedTextReadSettings' (required)

DelimitedTextReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
skipLineCount Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). any
type The read setting type. string (required)

DelimitedTextSink

Name Description Value
formatSettings DelimitedText format settings. DelimitedTextWriteSettings
storeSettings DelimitedText store settings. StoreWriteSettings
type Copy sink type. 'DelimitedTextSink' (required)

DelimitedTextSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings DelimitedText format settings. DelimitedTextReadSettings
storeSettings DelimitedText store settings. StoreReadSettings
type Copy source type. 'DelimitedTextSource' (required)

DelimitedTextWriteSettings

Name Description Value
fileExtension The file extension used to create the files. Type: string (or Expression with resultType string). any (required)
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
quoteAllText Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). any
type The write setting type. string (required)

DistcpSettings

Name Description Value
distcpOptions Specifies the Distcp options. Type: string (or Expression with resultType string). any
resourceManagerEndpoint Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). any (required)
tempScriptPath Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). any (required)

DocumentDbCollectionSink

Name Description Value
nestingSeparator Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). any
type Copy sink type. 'DocumentDbCollectionSink' (required)
writeBehavior Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. any

DocumentDbCollectionSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
nestingSeparator Nested properties separator. Type: string (or Expression with resultType string). any
query Documents query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DocumentDbCollectionSource' (required)

DrillSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DrillSource' (required)

DWCopyCommandDefaultValue

Name Description Value
columnName Column name. Type: object (or Expression with resultType string). any
defaultValue The default value of the column. Type: object (or Expression with resultType string). any

DWCopyCommandSettings

Name Description Value
additionalOptions Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } DWCopyCommandSettingsAdditionalOptions
defaultValues Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). DWCopyCommandDefaultValue[]

DWCopyCommandSettingsAdditionalOptions

Name Description Value

DynamicsAXSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DynamicsAXSource' (required)

DynamicsCrmSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'DynamicsCrmSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

DynamicsCrmSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'DynamicsCrmSource' (required)

DynamicsSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'DynamicsSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

DynamicsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'DynamicsSource' (required)

EloquaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'EloquaSource' (required)

ExcelSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings Excel store settings. StoreReadSettings
type Copy source type. 'ExcelSource' (required)

ExecuteDataFlowActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteDataFlow' (required)
typeProperties Execute data flow activity properties. ExecuteDataFlowActivityTypeProperties (required)

ExecuteDataFlowActivityTypeProperties

Name Description Value
compute Compute properties for data flow activity. ExecuteDataFlowActivityTypePropertiesCompute
continuationSettings Continuation settings for execute data flow activity. ContinuationSettingsReference
continueOnError Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) any
dataFlow Data flow reference. DataFlowReference (required)
integrationRuntime The integration runtime reference. IntegrationRuntimeReference
runConcurrently Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) any
sourceStagingConcurrency Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) any
staging Staging info for execute data flow activity. DataFlowStagingInfo
traceLevel Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) any

ExecuteDataFlowActivityTypePropertiesCompute

Name Description Value
computeType Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string) any
coreCount Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer) any

ExecutePipelineActivity

Name Description Value
policy Execute pipeline activity policy. ExecutePipelineActivityPolicy
type Type of activity. 'ExecutePipeline' (required)
typeProperties Execute pipeline activity properties. ExecutePipelineActivityTypeProperties (required)

ExecutePipelineActivityPolicy

Name Description Value
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool

ExecutePipelineActivityTypeProperties

Name Description Value
parameters Pipeline parameters. ParameterValueSpecification
pipeline Pipeline reference. PipelineReference (required)
waitOnCompletion Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. bool

ExecutePowerQueryActivityTypeProperties

Name Description Value
compute Compute properties for data flow activity. ExecuteDataFlowActivityTypePropertiesCompute
continuationSettings Continuation settings for execute data flow activity. ContinuationSettingsReference
continueOnError Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) any
dataFlow Data flow reference. DataFlowReference (required)
integrationRuntime The integration runtime reference. IntegrationRuntimeReference
queries List of mapping for Power Query mashup query to sink dataset(s). PowerQuerySinkMapping[]
runConcurrently Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) any
sinks (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. ExecutePowerQueryActivityTypePropertiesSinks
sourceStagingConcurrency Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) any
staging Staging info for execute data flow activity. DataFlowStagingInfo
traceLevel Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) any

ExecutePowerQueryActivityTypePropertiesSinks

Name Description Value

ExecuteSsisPackageActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteSSISPackage' (required)
typeProperties Execute SSIS package activity properties. ExecuteSsisPackageActivityTypeProperties (required)

ExecuteSsisPackageActivityTypeProperties

Name Description Value
connectVia The integration runtime reference. IntegrationRuntimeReference (required)
environmentPath The environment path to execute the SSIS package. Type: string (or Expression with resultType string). any
executionCredential The package execution credential. SsisExecutionCredential
loggingLevel The logging level of SSIS package execution. Type: string (or Expression with resultType string). any
logLocation SSIS package execution log location. SsisLogLocation
packageConnectionManagers The package level connection managers to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPackageConnectionManagers
packageLocation SSIS package location. SsisPackageLocation (required)
packageParameters The package level parameters to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPackageParameters
projectConnectionManagers The project level connection managers to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesProjectConnectionManagers
projectParameters The project level parameters to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesProjectParameters
propertyOverrides The property overrides to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPropertyOverrides
runtime Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). any

ExecuteSsisPackageActivityTypePropertiesPackageConnectionManagers

Name Description Value

ExecuteSsisPackageActivityTypePropertiesPackageParameters

Name Description Value

ExecuteSsisPackageActivityTypePropertiesProjectConnectionManagers

Name Description Value

ExecuteSsisPackageActivityTypePropertiesProjectParameters

Name Description Value

ExecuteSsisPackageActivityTypePropertiesPropertyOverrides

Name Description Value

ExecuteWranglingDataflowActivity

Name Description Value
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteWranglingDataflow' (required)
typeProperties Execute power query activity properties. ExecutePowerQueryActivityTypeProperties (required)

Expression

Name Description Value
type Expression type. 'Expression' (required)
value Expression value. string (required)

ExpressionV2

Name Description Value
operands List of nested expressions. ExpressionV2[]
operators Expression operator value Type: list of strings. string[]
type Type of expressions supported by the system. Type: string. 'Binary'
'Constant'
'Field'
'NAry'
'Unary'
value Value for Constant/Field Type: string. string

FailActivity

Name Description Value
type Type of activity. 'Fail' (required)
typeProperties Fail activity properties. FailActivityTypeProperties (required)

FailActivityTypeProperties

Name Description Value
errorCode The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). any (required)
message The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). any (required)

FileServerReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileFilter Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'FileServerReadSettings' (required)
wildcardFileName FileServer wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath FileServer wildcardFolderPath. Type: string (or Expression with resultType string). any

FileServerWriteSettings

Name Description Value
type The write setting type. 'FileServerWriteSettings' (required)

FileSystemSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. any
type Copy sink type. 'FileSystemSink' (required)

FileSystemSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'FileSystemSource' (required)

FilterActivity

Name Description Value
type Type of activity. 'Filter' (required)
typeProperties Filter activity properties. FilterActivityTypeProperties (required)

FilterActivityTypeProperties

Name Description Value
condition Condition to be used for filtering the input. Expression (required)
items Input array on which filter should be applied. Expression (required)

ForEachActivity

Name Description Value
type Type of activity. 'ForEach' (required)
typeProperties ForEach activity properties. ForEachActivityTypeProperties (required)

ForEachActivityTypeProperties

Name Description Value
activities List of activities to execute . Activity[] (required)
batchCount Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). int

Constraints:
Max value = 50
isSequential Should the loop be executed in sequence or in parallel (max 50) bool
items Collection to iterate. Expression (required)

FormatReadSettings

Name Description Value
type Set to 'BinaryReadSettings' for type BinaryReadSettings. Set to 'DelimitedTextReadSettings' for type DelimitedTextReadSettings. Set to 'JsonReadSettings' for type JsonReadSettings. Set to 'ParquetReadSettings' for type ParquetReadSettings. Set to 'XmlReadSettings' for type XmlReadSettings. 'BinaryReadSettings'
'DelimitedTextReadSettings'
'JsonReadSettings'
'ParquetReadSettings'
'XmlReadSettings' (required)

FtpReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
disableChunking If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'FtpReadSettings' (required)
useBinaryTransfer Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType boolean). any
wildcardFileName Ftp wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Ftp wildcardFolderPath. Type: string (or Expression with resultType string). any

GetMetadataActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'GetMetadata' (required)
typeProperties GetMetadata activity properties. GetMetadataActivityTypeProperties (required)

GetMetadataActivityTypeProperties

Name Description Value
dataset GetMetadata activity dataset reference. DatasetReference (required)
fieldList Fields of metadata to get from dataset. any[]
formatSettings GetMetadata activity format settings. FormatReadSettings
storeSettings GetMetadata activity store settings. StoreReadSettings

GoogleAdWordsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleAdWordsSource' (required)

GoogleBigQuerySource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleBigQuerySource' (required)

GoogleBigQueryV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleBigQueryV2Source' (required)

GoogleCloudStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'GoogleCloudStorageReadSettings' (required)
wildcardFileName Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

GreenplumSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GreenplumSource' (required)

HBaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HBaseSource' (required)

HdfsReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
distcpSettings Specifies Distcp-related settings. DistcpSettings
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'HdfsReadSettings' (required)
wildcardFileName HDFS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath HDFS wildcardFolderPath. Type: string (or Expression with resultType string). any

HdfsSource

Name Description Value
distcpSettings Specifies Distcp-related settings. DistcpSettings
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'HdfsSource' (required)

HDInsightHiveActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightHive' (required)
typeProperties HDInsight Hive activity properties. HDInsightHiveActivityTypeProperties (required)

HDInsightHiveActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
defines Allows user to specify defines for Hive job request. HDInsightHiveActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
queryTimeout Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) int
scriptLinkedService Script linked service reference. LinkedServiceReference
scriptPath Script path. Type: string (or Expression with resultType string). any
storageLinkedServices Storage linked service references. LinkedServiceReference[]
variables User specified arguments under hivevar namespace. HDInsightHiveActivityTypePropertiesVariables

HDInsightHiveActivityTypePropertiesDefines

Name Description Value

HDInsightHiveActivityTypePropertiesVariables

Name Description Value

HDInsightMapReduceActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightMapReduce' (required)
typeProperties HDInsight MapReduce activity properties. HDInsightMapReduceActivityTypeProperties (required)

HDInsightMapReduceActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
className Class name. Type: string (or Expression with resultType string). any (required)
defines Allows user to specify defines for the MapReduce job request. HDInsightMapReduceActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
jarFilePath Jar path. Type: string (or Expression with resultType string). any (required)
jarLibs Jar libs. any[]
jarLinkedService Jar linked service reference. LinkedServiceReference
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightMapReduceActivityTypePropertiesDefines

Name Description Value

HDInsightPigActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightPig' (required)
typeProperties HDInsight Pig activity properties. HDInsightPigActivityTypeProperties (required)

HDInsightPigActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). any
defines Allows user to specify defines for Pig job request. HDInsightPigActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
scriptLinkedService Script linked service reference. LinkedServiceReference
scriptPath Script path. Type: string (or Expression with resultType string). any
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightPigActivityTypePropertiesDefines

Name Description Value

HDInsightSparkActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightSpark' (required)
typeProperties HDInsight spark activity properties. HDInsightSparkActivityTypeProperties (required)

HDInsightSparkActivityTypeProperties

Name Description Value
arguments The user-specified arguments to HDInsightSparkActivity. any[]
className The application's Java/Spark main class. string
entryFilePath The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). any (required)
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
proxyUser The user to impersonate that will execute the job. Type: string (or Expression with resultType string). any
rootPath The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). any (required)
sparkConfig Spark configuration property. HDInsightSparkActivityTypePropertiesSparkConfig
sparkJobLinkedService The storage linked service for uploading the entry file and dependencies, and for receiving logs. LinkedServiceReference

HDInsightSparkActivityTypePropertiesSparkConfig

Name Description Value

HDInsightStreamingActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightStreaming' (required)
typeProperties HDInsight streaming activity properties. HDInsightStreamingActivityTypeProperties (required)

HDInsightStreamingActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
combiner Combiner executable name. Type: string (or Expression with resultType string). any
commandEnvironment Command line environment values. any[]
defines Allows user to specify defines for streaming job request. HDInsightStreamingActivityTypePropertiesDefines
fileLinkedService Linked service reference where the files are located. LinkedServiceReference
filePaths Paths to streaming job files. Can be directories. any[] (required)
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
input Input blob path. Type: string (or Expression with resultType string). any (required)
mapper Mapper executable name. Type: string (or Expression with resultType string). any (required)
output Output blob path. Type: string (or Expression with resultType string). any (required)
reducer Reducer executable name. Type: string (or Expression with resultType string). any (required)
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightStreamingActivityTypePropertiesDefines

Name Description Value

HiveSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HiveSource' (required)

HttpReadSettings

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). any
requestBody The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). any
requestMethod The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). any
requestTimeout Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with resultType string). any
type The read setting type. 'HttpReadSettings' (required)

HttpSource

Name Description Value
httpRequestTimeout Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HttpSource' (required)

HubspotSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HubspotSource' (required)

IcebergSink

Name Description Value
formatSettings Iceberg format settings. IcebergWriteSettings
storeSettings Iceberg store settings. StoreWriteSettings
type Copy sink type. 'IcebergSink' (required)

IcebergWriteSettings

Name Description Value
type The write setting type. string (required)

IfConditionActivity

Name Description Value
type Type of activity. 'IfCondition' (required)
typeProperties IfCondition activity properties. IfConditionActivityTypeProperties (required)

IfConditionActivityTypeProperties

Name Description Value
expression An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. Expression (required)
ifFalseActivities List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. Activity[]
ifTrueActivities List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. Activity[]

ImpalaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ImpalaSource' (required)

InformixSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'InformixSink' (required)

InformixSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'InformixSource' (required)

IntegrationRuntimeReference

Name Description Value
parameters Arguments for integration runtime. ParameterValueSpecification
referenceName Reference integration runtime name. string (required)
type Type of integration runtime. 'IntegrationRuntimeReference' (required)

JiraSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'JiraSource' (required)

JsonReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'JsonReadSettings' (required)

JsonReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

JsonSink

Name Description Value
formatSettings Json format settings. JsonWriteSettings
storeSettings Json store settings. StoreWriteSettings
type Copy sink type. 'JsonSink' (required)

JsonSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Json format settings. JsonReadSettings
storeSettings Json store settings. StoreReadSettings
type Copy source type. 'JsonSource' (required)

JsonWriteSettings

Name Description Value
filePattern File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. any
type The write setting type. string (required)

LakeHouseReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'LakeHouseReadSettings' (required)
wildcardFileName Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string (or Expression with resultType string). any

LakeHouseTableSink

Name Description Value
partitionNameList Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType array of objects). any
partitionOption Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be a new partition. Possible values include: "None", "PartitionByKey". any
tableActionOption The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". any
type Copy sink type. 'LakeHouseTableSink' (required)

LakeHouseTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
timestampAsOf Query an older snapshot by timestamp. Type: string (or Expression with resultType string). any
type Copy source type. 'LakeHouseTableSource' (required)
versionAsOf Query an older snapshot by version. Type: integer (or Expression with resultType integer). any

LakeHouseWriteSettings

Name Description Value
type The write setting type. 'LakeHouseWriteSettings' (required)

LinkedServiceReference

Name Description Value
parameters Arguments for LinkedService. ParameterValueSpecification
referenceName Reference LinkedService name. string (required)
type Linked service reference type. 'LinkedServiceReference' (required)

LogLocationSettings

Name Description Value
linkedServiceName Log storage linked service reference. LinkedServiceReference (required)
path The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). any

LogSettings

Name Description Value
copyActivityLogSettings Specifies settings for copy activity log. CopyActivityLogSettings
enableCopyActivityLog Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). any
logLocationSettings Log location settings customer needs to provide when enabling log. LogLocationSettings (required)

LogStorageSettings

Name Description Value
enableReliableLogging Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). any
linkedServiceName Log storage linked service reference. LinkedServiceReference (required)
logLevel Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). any
path The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). any

LookupActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Lookup' (required)
typeProperties Lookup activity properties. LookupActivityTypeProperties (required)

LookupActivityTypeProperties

Name Description Value
dataset Lookup activity dataset reference. DatasetReference (required)
firstRowOnly Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). any
source Dataset-specific source properties, same as copy activity source. CopySource (required)

MagentoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MagentoSource' (required)

MariaDBSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MariaDBSource' (required)

MarketoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MarketoSource' (required)

MetadataItem

Name Description Value
name Metadata item key name. Type: string (or Expression with resultType string). any
value Metadata item value. Type: string (or Expression with resultType string). any

Microsoft.DataFactory/factories/pipelines

Name Description Value
name The resource name string

Constraints:
Min length = 1
Max length = 1
Pattern = ^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$ (required)
parent In Bicep, you can specify the parent resource for a child resource. You only need to add this property when the child resource is declared outside of the parent resource.

For more information, see Child resource outside parent resource.
Symbolic name for resource of type: factories
properties Properties of the pipeline. Pipeline (required)

MicrosoftAccessSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'MicrosoftAccessSink' (required)

MicrosoftAccessSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
type Copy source type. 'MicrosoftAccessSource' (required)

MongoDbAtlasSink

Name Description Value
type Copy sink type. 'MongoDbAtlasSink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbAtlasSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MongoDbAtlasSource' (required)

MongoDbCursorMethodsProperties

Name Description Value
limit Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). any
project Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). any
skip Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). any
sort Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). any
type Copy source type. 'MongoDbSource' (required)

MongoDbV2Sink

Name Description Value
type Copy sink type. 'MongoDbV2Sink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MongoDbV2Source' (required)

MySqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MySqlSource' (required)

NetezzaPartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

NetezzaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
partitionOption The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". any
partitionSettings The settings that will be leveraged for Netezza source partitioning. NetezzaPartitionSettings
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'NetezzaSource' (required)

NotebookParameter

Name Description Value
type Notebook parameter type. 'bool'
'float'
'int'
'string'
value Notebook parameter value. Type: string (or Expression with resultType string). any

ODataSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
type Copy source type. 'ODataSource' (required)

OdbcSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'OdbcSink' (required)

OdbcSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OdbcSource' (required)

Office365Source

Name Description Value
allowedGroups The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). any
dateFilterColumn The Column to apply the <paramref name="StartTime"/> and <paramref name="EndTime"/>. Type: string (or Expression with resultType string). any
endTime End time of the requested range for this dataset. Type: string (or Expression with resultType string). any
outputColumns The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] any
startTime Start time of the requested range for this dataset. Type: string (or Expression with resultType string). any
type Copy source type. 'Office365Source' (required)
userScopeFilterUri The user scope uri. Type: string (or Expression with resultType string). any

OracleCloudStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'OracleCloudStorageReadSettings' (required)
wildcardFileName Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

OraclePartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionNames Names of the physical partitions of Oracle table. any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

OracleServiceCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OracleServiceCloudSource' (required)

OracleSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'OracleSink' (required)

OracleSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
oracleReaderQuery Oracle reader query. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Oracle source partitioning. OraclePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OracleSource' (required)

OrcSink

Name Description Value
formatSettings ORC format settings. OrcWriteSettings
storeSettings ORC store settings. StoreWriteSettings
type Copy sink type. 'OrcSink' (required)

OrcSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings ORC store settings. StoreReadSettings
type Copy source type. 'OrcSource' (required)

OrcWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
type The write setting type. string (required)

ParameterDefinitionSpecification

Name Description Value

ParameterSpecification

Name Description Value
defaultValue Default value of parameter. any
type Parameter type. 'Array'
'Bool'
'Float'
'Int'
'Object'
'SecureString'
'String' (required)

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParquetReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'ParquetReadSettings' (required)

ParquetReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

ParquetSink

Name Description Value
formatSettings Parquet format settings. ParquetWriteSettings
storeSettings Parquet store settings. StoreWriteSettings
type Copy sink type. 'ParquetSink' (required)

ParquetSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Parquet format settings. ParquetReadSettings
storeSettings Parquet store settings. StoreReadSettings
type Copy source type. 'ParquetSource' (required)

ParquetWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
type The write setting type. string (required)

PaypalSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PaypalSource' (required)

PhoenixSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PhoenixSource' (required)

Pipeline

Name Description Value
activities List of activities in pipeline. Activity[]
annotations List of tags that can be used for describing the Pipeline. any[]
concurrency The max number of concurrent runs for the pipeline. int

Constraints:
Min value = 1
description The description of the pipeline. string
folder The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. PipelineFolder
parameters List of parameters for pipeline. ParameterDefinitionSpecification
policy Pipeline Policy. PipelinePolicy
runDimensions Dimensions emitted by Pipeline. PipelineRunDimensions
variables List of variables for pipeline. VariableDefinitionSpecification

PipelineElapsedTimeMetricPolicy

Name Description Value
duration TimeSpan value, after which an Azure Monitoring Metric is fired. any

PipelineFolder

Name Description Value
name The name of the folder that this Pipeline is in. string

PipelinePolicy

Name Description Value
elapsedTimeMetric Pipeline ElapsedTime Metric Policy. PipelineElapsedTimeMetricPolicy

PipelineReference

Name Description Value
name Reference name. string
referenceName Reference pipeline name. string (required)
type Pipeline reference type. 'PipelineReference' (required)

PipelineRunDimensions

Name Description Value

PolybaseSettings

Name Description Value
rejectSampleValue Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. any
rejectType Reject type. 'percentage'
'value'
rejectValue Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. any
useTypeDefault Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). any

PostgreSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PostgreSqlSource' (required)

PostgreSqlV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PostgreSqlV2Source' (required)

PowerQuerySink

Name Description Value
dataset Dataset reference. DatasetReference
description Transformation description. string
flowlet Flowlet Reference DataFlowReference
linkedService Linked service reference. LinkedServiceReference
name Transformation name. string (required)
rejectedDataLinkedService Rejected data linked service reference. LinkedServiceReference
schemaLinkedService Schema linked service reference. LinkedServiceReference
script sink script. string

PowerQuerySinkMapping

Name Description Value
dataflowSinks List of sinks mapped to Power Query mashup query. PowerQuerySink[]
queryName Name of the query in Power Query mashup document. string

PrestoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PrestoSource' (required)

QuickBooksSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'QuickBooksSource' (required)

RedirectIncompatibleRowSettings

Name Description Value
linkedServiceName Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). any (required)
path The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). any

RedshiftUnloadSettings

Name Description Value
bucketName The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). any (required)
s3LinkedServiceName The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. LinkedServiceReference (required)

RelationalSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
type Copy source type. 'RelationalSource' (required)

ResponsysSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ResponsysSource' (required)

RestSink

Name Description Value
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string type). any
httpCompressionType Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string (or Expression with resultType string). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
requestInterval The time to await before sending next request, in milliseconds any
requestMethod The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). any
type Copy sink type. 'RestSink' (required)

RestSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). any
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
paginationRules The pagination rules to compose next page requests. Type: string (or Expression with resultType string). any
requestBody The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). any
requestInterval The time to await before sending next page request. any
requestMethod The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). any
type Copy source type. 'RestSource' (required)

SalesforceMarketingCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SalesforceMarketingCloudSource' (required)

SalesforceServiceCloudSink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceServiceCloudSink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceServiceCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
readBehavior The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceServiceCloudSource' (required)

SalesforceServiceCloudV2Sink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceServiceCloudV2Sink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceServiceCloudV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
includeDeletedObjects This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). any
query You can only use Salesforce Object Query Language (SOQL) query with limitations. For SOQL limitations, see this article: https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/queries.htm#SOQL%20Considerations. If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset will be retrieved. Type: string (or Expression with resultType string). any
SOQLQuery Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceServiceCloudV2Source' (required)

SalesforceSink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceSink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
readBehavior The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceSource' (required)

SalesforceV2Sink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceV2Sink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
includeDeletedObjects This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). any
pageSize Page size for each http request, too large pageSize will caused timeout, default 300,000. Type: integer (or Expression with resultType integer). any
query You can only use Salesforce Object Query Language (SOQL) query with limitations. For SOQL limitations, see this article: https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/queries.htm#SOQL%20Considerations. If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset will be retrieved. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
SOQLQuery Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceV2Source' (required)

SapBwSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query MDX query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapBwSource' (required)

SapCloudForCustomerSink

Name Description Value
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy sink type. 'SapCloudForCustomerSink' (required)
writeBehavior The write behavior for the operation. Default is 'Insert'. 'Insert'
'Update'

SapCloudForCustomerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapCloudForCustomerSource' (required)

SapEccSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapEccSource' (required)

SapHanaPartitionSettings

Name Description Value
partitionColumnName The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

SapHanaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
packetSize The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). any
partitionOption The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". any
partitionSettings The settings that will be leveraged for SAP HANA source partitioning. SapHanaPartitionSettings
query SAP HANA Sql query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapHanaSource' (required)

SapOdpSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
extractionMode The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). any
projection Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
selection Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). any
subscriberProcess The subscriber process to manage the delta process. Type: string (or Expression with resultType string). any
type Copy source type. 'SapOdpSource' (required)

SapOpenHubSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
baseRequestId The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). any
customRfcReadTableFunctionModule Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). any
excludeLastRequest Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sapDataColumnDelimiter The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). any
type Copy source type. 'SapOpenHubSource' (required)

SapTablePartitionSettings

Name Description Value
maxPartitionsNumber The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). any
partitionColumnName The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

SapTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). any
customRfcReadTableFunctionModule Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". any
partitionSettings The settings that will be leveraged for SAP table source partitioning. SapTablePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
rfcTableFields The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). any
rfcTableOptions The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). any
rowCount The number of rows to be retrieved. Type: integer(or Expression with resultType integer). any
rowSkips The number of rows that will be skipped. Type: integer (or Expression with resultType integer). any
sapDataColumnDelimiter The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). any
type Copy source type. 'SapTableSource' (required)

ScriptActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Script' (required)
typeProperties Script activity properties. ScriptActivityTypeProperties (required)

ScriptActivityParameter

Name Description Value
direction The direction of the parameter. 'Input'
'InputOutput'
'Output'
name The name of the parameter. Type: string (or Expression with resultType string). any
size The size of the output direction parameter. int
type The type of the parameter. 'Boolean'
'DateTime'
'DateTimeOffset'
'Decimal'
'Double'
'Guid'
'Int16'
'Int32'
'Int64'
'Single'
'String'
'Timespan'
value The value of the parameter. Type: string (or Expression with resultType string). any

ScriptActivityScriptBlock

Name Description Value
parameters Array of script parameters. Type: array. ScriptActivityParameter[]
text The query text. Type: string (or Expression with resultType string). any (required)
type The type of the query. Please refer to the ScriptType for valid options. Type: string (or Expression with resultType string). any (required)

ScriptActivityTypeProperties

Name Description Value
logSettings Log settings of script activity. ScriptActivityTypePropertiesLogSettings
scriptBlockExecutionTimeout ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
scripts Array of script blocks. Type: array. ScriptActivityScriptBlock[]

ScriptActivityTypePropertiesLogSettings

Name Description Value
logDestination The destination of logs. Type: string. 'ActivityOutput'
'ExternalStore' (required)
logLocationSettings Log location settings customer needs to provide when enabling log. LogLocationSettings

SecretBase

Name Description Value
type Set to 'AzureKeyVaultSecret' for type AzureKeyVaultSecretReference. Set to 'SecureString' for type SecureString. 'AzureKeyVaultSecret'
'SecureString' (required)

SecureInputOutputPolicy

Name Description Value
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool
secureOutput When set to true, Output from activity is considered as secure and will not be logged to monitoring. bool

SecureString

Name Description Value
type Type of the secret. string (required)
value Value of secure string. string (required)

SecureString

Name Description Value
type Type of the secret. 'SecureString' (required)
value Value of secure string. string (required)

ServiceNowSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ServiceNowSource' (required)

ServiceNowV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
expression Expression to filter data from source. ExpressionV2
pageSize Page size of the result. Type: integer (or Expression with resultType integer). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ServiceNowV2Source' (required)

SetVariableActivity

Name Description Value
policy Activity policy. SecureInputOutputPolicy
type Type of activity. 'SetVariable' (required)
typeProperties Set Variable activity properties. SetVariableActivityTypeProperties (required)

SetVariableActivityTypeProperties

Name Description Value
setSystemVariable If set to true, it sets the pipeline run return value. bool
value Value to be set. Could be a static value or Expression. any
variableName Name of the variable whose value needs to be set. string

SftpReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
disableChunking If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'SftpReadSettings' (required)
wildcardFileName Sftp wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Sftp wildcardFolderPath. Type: string (or Expression with resultType string). any

SftpWriteSettings

Name Description Value
operationTimeout Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). any
type The write setting type. 'SftpWriteSettings' (required)
useTempFileRename Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). any

SharePointOnlineListSource

Name Description Value
httpRequestTimeout The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). any
type Copy source type. 'SharePointOnlineListSource' (required)

ShopifySource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ShopifySource' (required)

SkipErrorFile

Name Description Value
dataInconsistency Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). any
fileMissing Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). any

SnowflakeExportCopyCommand

Name Description Value
additionalCopyOptions Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } SnowflakeExportCopyCommandAdditionalCopyOptions
additionalFormatOptions Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" } SnowflakeExportCopyCommandAdditionalFormatOptions
storageIntegration The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with resultType string). any
type The export setting type. string (required)

SnowflakeExportCopyCommandAdditionalCopyOptions

Name Description Value

SnowflakeExportCopyCommandAdditionalFormatOptions

Name Description Value

SnowflakeImportCopyCommand

Name Description Value
additionalCopyOptions Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } SnowflakeImportCopyCommandAdditionalCopyOptions
additionalFormatOptions Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" } SnowflakeImportCopyCommandAdditionalFormatOptions
storageIntegration The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with resultType string). any
type The import setting type. string (required)

SnowflakeImportCopyCommandAdditionalCopyOptions

Name Description Value

SnowflakeImportCopyCommandAdditionalFormatOptions

Name Description Value

SnowflakeSink

Name Description Value
importSettings Snowflake import settings. SnowflakeImportCopyCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'SnowflakeSink' (required)

SnowflakeSource

Name Description Value
exportSettings Snowflake export settings. SnowflakeExportCopyCommand (required)
query Snowflake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'SnowflakeSource' (required)

SnowflakeV2Sink

Name Description Value
importSettings Snowflake import settings. SnowflakeImportCopyCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'SnowflakeV2Sink' (required)

SnowflakeV2Source

Name Description Value
exportSettings Snowflake export settings. SnowflakeExportCopyCommand (required)
query Snowflake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'SnowflakeV2Source' (required)

SparkConfigurationParametrizationReference

Name Description Value
referenceName Reference spark configuration name. Type: string (or Expression with resultType string). any (required)
type Spark configuration reference type. 'SparkConfigurationReference' (required)

SparkSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SparkSource' (required)

SqlDWSink

Name Description Value
allowCopyCommand Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). any
allowPolyBase Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). any
copyCommandSettings Specifies Copy Command related settings when allowCopyCommand is true. DWCopyCommandSettings
polyBaseSettings Specifies PolyBase-related settings when allowPolyBase is true. PolybaseSettings
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlDWSink' (required)
upsertSettings SQL DW upsert settings. SqlDWUpsertSettings
writeBehavior Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum) any

SqlDWSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL Data Warehouse reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. any
type Copy source type. 'SqlDWSource' (required)

SqlDWUpsertSettings

Name Description Value
interimSchemaName Schema name for interim table. Type: string (or Expression with resultType string). any
keys Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). any

SqlMISink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlMISink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior White behavior when copying data into azure SQL MI. Type: string (or Expression with resultType string) any

SqlMISource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlMISource' (required)

SqlPartitionSettings

Name Description Value
partitionColumnName The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). any

SqlServerSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlServerSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into sql server. Type: string (or Expression with resultType string). any

SqlServerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlServerSource' (required)

SqlServerStoredProcedureActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SqlServerStoredProcedure' (required)
typeProperties SQL stored procedure activity properties. SqlServerStoredProcedureActivityTypeProperties (required)

SqlServerStoredProcedureActivityTypeProperties

Name Description Value
storedProcedureName Stored procedure name. Type: string (or Expression with resultType string). any (required)
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any

SqlSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into sql. Type: string (or Expression with resultType string). any

SqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlSource' (required)

SqlUpsertSettings

Name Description Value
interimSchemaName Schema name for interim table. Type: string (or Expression with resultType string). any
keys Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). any
useTempDB Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). any

SquareSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SquareSource' (required)

SsisAccessCredential

Name Description Value
domain Domain for windows authentication. Type: string (or Expression with resultType string). any (required)
password Password for windows authentication. SecretBase (required)
userName UseName for windows authentication. Type: string (or Expression with resultType string). any (required)

SsisChildPackage

Name Description Value
packageContent Content for embedded child package. Type: string (or Expression with resultType string). any (required)
packageLastModifiedDate Last modified date for embedded child package. string
packageName Name for embedded child package. string
packagePath Path for embedded child package. Type: string (or Expression with resultType string). any (required)

SsisConnectionManager

Name Description Value

SsisConnectionManager

Name Description Value

SsisExecutionCredential

Name Description Value
domain Domain for windows authentication. Type: string (or Expression with resultType string). any (required)
password Password for windows authentication. SecureString (required)
userName UseName for windows authentication. Type: string (or Expression with resultType string). any (required)

SsisExecutionParameter

Name Description Value
value SSIS package execution parameter value. Type: string (or Expression with resultType string). any (required)

SsisLogLocation

Name Description Value
logPath The SSIS package execution log path. Type: string (or Expression with resultType string). any (required)
type The type of SSIS log location. 'File' (required)
typeProperties SSIS package execution log location properties. SsisLogLocationTypeProperties (required)

SsisLogLocationTypeProperties

Name Description Value
accessCredential The package execution log access credential. SsisAccessCredential
logRefreshInterval Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

SsisPackageLocation

Name Description Value
packagePath The SSIS package path. Type: string (or Expression with resultType string). any
type The type of SSIS package location. 'File'
'InlinePackage'
'PackageStore'
'SSISDB'
typeProperties SSIS package location properties. SsisPackageLocationTypeProperties

SsisPackageLocationTypeProperties

Name Description Value
accessCredential The package access credential. SsisAccessCredential
childPackages The embedded child package list. SsisChildPackage[]
configurationAccessCredential The configuration file access credential. SsisAccessCredential
configurationPath The configuration file of the package execution. Type: string (or Expression with resultType string). any
packageContent The embedded package content. Type: string (or Expression with resultType string). any
packageLastModifiedDate The embedded package last modified date. string
packageName The package name. string
packagePassword Password of the package. SecretBase

SsisPropertyOverride

Name Description Value
isSensitive Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true bool
value SSIS package property override value. Type: string (or Expression with resultType string). any (required)

StagingSettings

Name Description Value
enableCompression Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). any
linkedServiceName Staging linked service reference. LinkedServiceReference (required)
path The path to storage for storing the interim data. Type: string (or Expression with resultType string). any

StoreReadSettings

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
type Set to 'AmazonS3CompatibleReadSettings' for type AmazonS3CompatibleReadSettings. Set to 'AmazonS3ReadSettings' for type AmazonS3ReadSettings. Set to 'AzureBlobFSReadSettings' for type AzureBlobFSReadSettings. Set to 'AzureBlobStorageReadSettings' for type AzureBlobStorageReadSettings. Set to 'AzureDataLakeStoreReadSettings' for type AzureDataLakeStoreReadSettings. Set to 'AzureFileStorageReadSettings' for type AzureFileStorageReadSettings. Set to 'FileServerReadSettings' for type FileServerReadSettings. Set to 'FtpReadSettings' for type FtpReadSettings. Set to 'GoogleCloudStorageReadSettings' for type GoogleCloudStorageReadSettings. Set to 'HdfsReadSettings' for type HdfsReadSettings. Set to 'HttpReadSettings' for type HttpReadSettings. Set to 'LakeHouseReadSettings' for type LakeHouseReadSettings. Set to 'OracleCloudStorageReadSettings' for type OracleCloudStorageReadSettings. Set to 'SftpReadSettings' for type SftpReadSettings. 'AmazonS3CompatibleReadSettings'
'AmazonS3ReadSettings'
'AzureBlobFSReadSettings'
'AzureBlobStorageReadSettings'
'AzureDataLakeStoreReadSettings'
'AzureFileStorageReadSettings'
'FileServerReadSettings'
'FtpReadSettings'
'GoogleCloudStorageReadSettings'
'HdfsReadSettings'
'HttpReadSettings'
'LakeHouseReadSettings'
'OracleCloudStorageReadSettings'
'SftpReadSettings' (required)

StoreWriteSettings

Name Description Value
copyBehavior The type of copy behavior for copy sink. any
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Set to 'AzureBlobFSWriteSettings' for type AzureBlobFSWriteSettings. Set to 'AzureBlobStorageWriteSettings' for type AzureBlobStorageWriteSettings. Set to 'AzureDataLakeStoreWriteSettings' for type AzureDataLakeStoreWriteSettings. Set to 'AzureFileStorageWriteSettings' for type AzureFileStorageWriteSettings. Set to 'FileServerWriteSettings' for type FileServerWriteSettings. Set to 'LakeHouseWriteSettings' for type LakeHouseWriteSettings. Set to 'SftpWriteSettings' for type SftpWriteSettings. 'AzureBlobFSWriteSettings'
'AzureBlobStorageWriteSettings'
'AzureDataLakeStoreWriteSettings'
'AzureFileStorageWriteSettings'
'FileServerWriteSettings'
'LakeHouseWriteSettings'
'SftpWriteSettings' (required)

SwitchActivity

Name Description Value
type Type of activity. 'Switch' (required)
typeProperties Switch activity properties. SwitchActivityTypeProperties (required)

SwitchActivityTypeProperties

Name Description Value
cases List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. SwitchCase[]
defaultActivities List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. Activity[]
on An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. Expression (required)

SwitchCase

Name Description Value
activities List of activities to execute for satisfied case condition. Activity[]
value Expected value that satisfies the expression result of the 'on' property. string

SybaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SybaseSource' (required)

SynapseNotebookActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SynapseNotebook' (required)
typeProperties Execute Synapse notebook activity properties. SynapseNotebookActivityTypeProperties (required)

SynapseNotebookActivityTypeProperties

Name Description Value
conf Spark configuration properties, which will override the 'conf' of the notebook you provide. any
configurationType The type of the spark config. 'Artifact'
'Customized'
'Default'
driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string). any
executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string). any
notebook Synapse notebook reference. SynapseNotebookReference (required)
numExecutors Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer). any
parameters Notebook parameters. SynapseNotebookActivityTypePropertiesParameters
sparkConfig Spark configuration property. SynapseNotebookActivityTypePropertiesSparkConfig
sparkPool The name of the big data pool which will be used to execute the notebook. BigDataPoolParametrizationReference
targetSparkConfiguration The spark configuration of the spark job. SparkConfigurationParametrizationReference

SynapseNotebookActivityTypePropertiesParameters

Name Description Value

SynapseNotebookActivityTypePropertiesSparkConfig

Name Description Value

SynapseNotebookReference

Name Description Value
referenceName Reference notebook name. Type: string (or Expression with resultType string). any (required)
type Synapse notebook reference type. 'NotebookReference' (required)

SynapseSparkJobActivityTypeProperties

Name Description Value
args User specified arguments to SynapseSparkJobDefinitionActivity. any[]
className The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). any
conf Spark configuration properties, which will override the 'conf' of the spark job definition you provide. any
configurationType The type of the spark config. 'Artifact'
'Customized'
'Default'
driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). any
executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). any
file The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string). any
files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide. any[]
filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide. any[]
numExecutors Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). any
pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide. any[]
scanFolder Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). any
sparkConfig Spark configuration property. SynapseSparkJobActivityTypePropertiesSparkConfig
sparkJob Synapse spark job reference. SynapseSparkJobReference (required)
targetBigDataPool The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide. BigDataPoolParametrizationReference
targetSparkConfiguration The spark configuration of the spark job. SparkConfigurationParametrizationReference

SynapseSparkJobActivityTypePropertiesSparkConfig

Name Description Value

SynapseSparkJobDefinitionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SparkJob' (required)
typeProperties Execute spark job activity properties. SynapseSparkJobActivityTypeProperties (required)

SynapseSparkJobReference

Name Description Value
referenceName Reference spark job name. Expression with resultType string. any (required)
type Synapse spark job reference type. 'SparkJobDefinitionReference' (required)

TarGZipReadSettings

Name Description Value
preserveCompressionFileNameAsFolder Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'TarGZipReadSettings' (required)

TarReadSettings

Name Description Value
preserveCompressionFileNameAsFolder Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'TarReadSettings' (required)

TeradataPartitionSettings

Name Description Value
partitionColumnName The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

TeradataSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
partitionOption The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". any
partitionSettings The settings that will be leveraged for teradata source partitioning. TeradataPartitionSettings
query Teradata query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'TeradataSource' (required)

UntilActivity

Name Description Value
type Type of activity. 'Until' (required)
typeProperties Until activity properties. UntilActivityTypeProperties (required)

UntilActivityTypeProperties

Name Description Value
activities List of activities to execute. Activity[] (required)
expression An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true Expression (required)
timeout Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

UserProperty

Name Description Value
name User property name. string (required)
value User property value. Type: string (or Expression with resultType string). any (required)

ValidationActivity

Name Description Value
type Type of activity. 'Validation' (required)
typeProperties Validation activity properties. ValidationActivityTypeProperties (required)

ValidationActivityTypeProperties

Name Description Value
childItems Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). any
dataset Validation activity dataset reference. DatasetReference (required)
minimumSize Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). any
sleep A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). any
timeout Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

VariableDefinitionSpecification

Name Description Value

VariableSpecification

Name Description Value
defaultValue Default value of variable. any
type Variable type. 'Array'
'Bool'
'String' (required)

VerticaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'VerticaSource' (required)

WaitActivity

Name Description Value
type Type of activity. 'Wait' (required)
typeProperties Wait activity properties. WaitActivityTypeProperties (required)

WaitActivityTypeProperties

Name Description Value
waitTimeInSeconds Duration in seconds. Type: integer (or Expression with resultType integer). any (required)

WarehouseSink

Name Description Value
allowCopyCommand Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). any
copyCommandSettings Specifies Copy Command related settings when allowCopyCommand is true. DWCopyCommandSettings
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'WarehouseSink' (required)
writeBehavior Write behavior when copying data into azure Microsoft Fabric Data Warehouse. Type: DWWriteBehaviorEnum (or Expression with resultType DWWriteBehaviorEnum) any

WarehouseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the Microsoft Fabric Warehouse source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery Microsoft Fabric Warehouse reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a Microsoft Fabric Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. any
type Copy source type. 'WarehouseSource' (required)

WebActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'WebActivity' (required)
typeProperties Web activity properties. WebActivityTypeProperties (required)

WebActivityAuthentication

Name Description Value
credential The credential reference containing authentication information. CredentialReference
password Password for the PFX file or basic authentication / Secret when used for ServicePrincipal SecretBase
pfx Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal SecretBase
resource Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). any
type Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal) string
username Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). any
userTenant TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). any

WebActivityTypeProperties

Name Description Value
authentication Authentication method used for calling the endpoint. WebActivityAuthentication
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
connectVia The integration runtime reference. IntegrationRuntimeReference
datasets List of datasets passed to web endpoint. DatasetReference[]
disableCertValidation When set to true, Certificate validation will be disabled. bool
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). WebActivityTypePropertiesHeaders
httpRequestTimeout Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes any
linkedServices List of linked services passed to web endpoint. LinkedServiceReference[]
method Rest API method for target endpoint. 'DELETE'
'GET'
'POST'
'PUT' (required)
turnOffAsync Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET call on location given in http response headers. bool
url Web activity target endpoint and path. Type: string (or Expression with resultType string). any (required)

WebActivityTypePropertiesHeaders

Name Description Value

WebHookActivity

Name Description Value
policy Activity policy. SecureInputOutputPolicy
type Type of activity. 'WebHook' (required)
typeProperties WebHook activity properties. WebHookActivityTypeProperties (required)

WebHookActivityTypeProperties

Name Description Value
authentication Authentication method used for calling the endpoint. WebActivityAuthentication
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). WebHookActivityTypePropertiesHeaders
method Rest API method for target endpoint. 'POST' (required)
reportStatusOnCallBack When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). any
timeout The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). string
url WebHook activity target endpoint and path. Type: string (or Expression with resultType string). any (required)

WebHookActivityTypePropertiesHeaders

Name Description Value

WebSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
type Copy source type. 'WebSource' (required)

XeroSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'XeroSource' (required)

XmlReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
detectDataType Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
namespacePrefixes Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). any
namespaces Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'XmlReadSettings' (required)
validationMode Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). any

XmlReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
detectDataType Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
namespacePrefixes Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). any
namespaces Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
type The read setting type. string (required)
validationMode Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). any

XmlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Xml format settings. XmlReadSettings
storeSettings Xml store settings. StoreReadSettings
type Copy source type. 'XmlSource' (required)

ZipDeflateReadSettings

Name Description Value
preserveZipFileNameAsFolder Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'ZipDeflateReadSettings' (required)

ZohoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ZohoSource' (required)

Quickstart samples

The following quickstart samples deploy this resource type.

Bicep File Description
Create a V2 data factory This template creates a V2 data factory that copies data from a folder in an Azure Blob Storage to another folder in the storage.
More is possible with Azure Data Factory - One click to try Azure Data Factory This template creates a data factory pipeline for a copy activity from Azure Blob into another Azure Blob

ARM template resource definition

The factories/pipelines resource type can be deployed with operations that target:

For a list of changed properties in each API version, see change log.

Resource format

To create a Microsoft.DataFactory/factories/pipelines resource, add the following JSON to your template.

{
  "type": "Microsoft.DataFactory/factories/pipelines",
  "apiVersion": "2018-06-01",
  "name": "string",
  "properties": {
    "activities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ],
    "annotations": [ {} ],
    "concurrency": "int",
    "description": "string",
    "folder": {
      "name": "string"
    },
    "parameters": {
      "{customized property}": {
        "defaultValue": {},
        "type": "string"
      }
    },
    "policy": {
      "elapsedTimeMetric": {
        "duration": {}
      }
    },
    "runDimensions": {
      "{customized property}": {}
    },
    "variables": {
      "{customized property}": {
        "defaultValue": {},
        "type": "string"
      }
    }
  }
}

StoreReadSettings objects

Set the type property to specify the type of object.

For AmazonS3CompatibleReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "AmazonS3CompatibleReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For AmazonS3ReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "AmazonS3ReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For AzureBlobFSReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "AzureBlobFSReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For AzureBlobStorageReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "AzureBlobStorageReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For AzureDataLakeStoreReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "listAfter": {},
  "listBefore": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "AzureDataLakeStoreReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For AzureFileStorageReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "AzureFileStorageReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For FileServerReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileFilter": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "FileServerReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For FtpReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "disableChunking": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "FtpReadSettings",
  "useBinaryTransfer": {},
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For GoogleCloudStorageReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "GoogleCloudStorageReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For HdfsReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "distcpSettings": {
    "distcpOptions": {},
    "resourceManagerEndpoint": {},
    "tempScriptPath": {}
  },
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "HdfsReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For HttpReadSettings, use:

{
  "additionalColumns": {},
  "additionalHeaders": {},
  "requestBody": {},
  "requestMethod": {},
  "requestTimeout": {},
  "type": "HttpReadSettings"
}

For LakeHouseReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "LakeHouseReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For OracleCloudStorageReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "prefix": {},
  "recursive": {},
  "type": "OracleCloudStorageReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

For SftpReadSettings, use:

{
  "deleteFilesAfterCompletion": {},
  "disableChunking": {},
  "enablePartitionDiscovery": {},
  "fileListPath": {},
  "modifiedDatetimeEnd": {},
  "modifiedDatetimeStart": {},
  "partitionRootPath": {},
  "recursive": {},
  "type": "SftpReadSettings",
  "wildcardFileName": {},
  "wildcardFolderPath": {}
}

Activity objects

Set the type property to specify the type of object.

For AppendVariable, use:

{
  "type": "AppendVariable",
  "typeProperties": {
    "value": {},
    "variableName": "string"
  }
}

For AzureDataExplorerCommand, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "AzureDataExplorerCommand",
  "typeProperties": {
    "command": {},
    "commandTimeout": {}
  }
}

For AzureFunctionActivity, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "AzureFunctionActivity",
  "typeProperties": {
    "body": {},
    "functionName": {},
    "headers": {
      "{customized property}": {}
    },
    "method": "string"
  }
}

For AzureMLBatchExecution, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "AzureMLBatchExecution",
  "typeProperties": {
    "globalParameters": {
      "{customized property}": {}
    },
    "webServiceInputs": {
      "{customized property}": {
        "filePath": {},
        "linkedServiceName": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        }
      }
    },
    "webServiceOutputs": {
      "{customized property}": {
        "filePath": {},
        "linkedServiceName": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        }
      }
    }
  }
}

For AzureMLExecutePipeline, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "AzureMLExecutePipeline",
  "typeProperties": {
    "continueOnStepFailure": {},
    "dataPathAssignments": {},
    "experimentName": {},
    "mlParentRunId": {},
    "mlPipelineEndpointId": {},
    "mlPipelineId": {},
    "mlPipelineParameters": {},
    "version": {}
  }
}

For AzureMLUpdateResource, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "AzureMLUpdateResource",
  "typeProperties": {
    "trainedModelFilePath": {},
    "trainedModelLinkedServiceName": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "trainedModelName": {}
  }
}

For ExecutePipeline, use:

{
  "policy": {
    "secureInput": "bool"
  },
  "type": "ExecutePipeline",
  "typeProperties": {
    "parameters": {
      "{customized property}": {}
    },
    "pipeline": {
      "name": "string",
      "referenceName": "string",
      "type": "string"
    },
    "waitOnCompletion": "bool"
  }
}

For Fail, use:

{
  "type": "Fail",
  "typeProperties": {
    "errorCode": {},
    "message": {}
  }
}

For Filter, use:

{
  "type": "Filter",
  "typeProperties": {
    "condition": {
      "type": "string",
      "value": "string"
    },
    "items": {
      "type": "string",
      "value": "string"
    }
  }
}

For ForEach, use:

{
  "type": "ForEach",
  "typeProperties": {
    "activities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ],
    "batchCount": "int",
    "isSequential": "bool",
    "items": {
      "type": "string",
      "value": "string"
    }
  }
}

For IfCondition, use:

{
  "type": "IfCondition",
  "typeProperties": {
    "expression": {
      "type": "string",
      "value": "string"
    },
    "ifFalseActivities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ],
    "ifTrueActivities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ]
  }
}

For SetVariable, use:

{
  "policy": {
    "secureInput": "bool",
    "secureOutput": "bool"
  },
  "type": "SetVariable",
  "typeProperties": {
    "setSystemVariable": "bool",
    "value": {},
    "variableName": "string"
  }
}

For Switch, use:

{
  "type": "Switch",
  "typeProperties": {
    "cases": [
      {
        "activities": [ {
          "dependsOn": [
            {
              "activity": "string",
              "dependencyConditions": [ "string" ]
            }
          ],
          "description": "string",
          "name": "string",
          "onInactiveMarkAs": "string",
          "state": "string",
          "userProperties": [
            {
              "name": "string",
              "value": {}
            }
          ],
          "type": "string"
          // For remaining properties, see Activity objects
        } ],
        "value": "string"
      }
    ],
    "defaultActivities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ],
    "on": {
      "type": "string",
      "value": "string"
    }
  }
}

For Until, use:

{
  "type": "Until",
  "typeProperties": {
    "activities": [ {
      "dependsOn": [
        {
          "activity": "string",
          "dependencyConditions": [ "string" ]
        }
      ],
      "description": "string",
      "name": "string",
      "onInactiveMarkAs": "string",
      "state": "string",
      "userProperties": [
        {
          "name": "string",
          "value": {}
        }
      ],
      "type": "string"
      // For remaining properties, see Activity objects
    } ],
    "expression": {
      "type": "string",
      "value": "string"
    },
    "timeout": {}
  }
}

For Validation, use:

{
  "type": "Validation",
  "typeProperties": {
    "childItems": {},
    "dataset": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "minimumSize": {},
    "sleep": {},
    "timeout": {}
  }
}

For Wait, use:

{
  "type": "Wait",
  "typeProperties": {
    "waitTimeInSeconds": {}
  }
}

For WebHook, use:

{
  "policy": {
    "secureInput": "bool",
    "secureOutput": "bool"
  },
  "type": "WebHook",
  "typeProperties": {
    "authentication": {
      "credential": {
        "referenceName": "string",
        "type": "string"
      },
      "password": {
        "type": "string"
        // For remaining properties, see SecretBase objects
      },
      "pfx": {
        "type": "string"
        // For remaining properties, see SecretBase objects
      },
      "resource": {},
      "type": "string",
      "username": {},
      "userTenant": {}
    },
    "body": {},
    "headers": {
      "{customized property}": {}
    },
    "method": "string",
    "reportStatusOnCallBack": {},
    "timeout": "string",
    "url": {}
  }
}

For Copy, use:

{
  "inputs": [
    {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    }
  ],
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "outputs": [
    {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    }
  ],
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "Copy",
  "typeProperties": {
    "dataIntegrationUnits": {},
    "enableSkipIncompatibleRow": {},
    "enableStaging": {},
    "logSettings": {
      "copyActivityLogSettings": {
        "enableReliableLogging": {},
        "logLevel": {}
      },
      "enableCopyActivityLog": {},
      "logLocationSettings": {
        "linkedServiceName": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "path": {}
      }
    },
    "logStorageSettings": {
      "enableReliableLogging": {},
      "linkedServiceName": {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      },
      "logLevel": {},
      "path": {}
    },
    "parallelCopies": {},
    "preserve": [ {} ],
    "preserveRules": [ {} ],
    "redirectIncompatibleRowSettings": {
      "linkedServiceName": {},
      "path": {}
    },
    "sink": {
      "disableMetricsCollection": {},
      "maxConcurrentConnections": {},
      "sinkRetryCount": {},
      "sinkRetryWait": {},
      "writeBatchSize": {},
      "writeBatchTimeout": {},
      "type": "string"
      // For remaining properties, see CopySink objects
    },
    "skipErrorFile": {
      "dataInconsistency": {},
      "fileMissing": {}
    },
    "source": {
      "disableMetricsCollection": {},
      "maxConcurrentConnections": {},
      "sourceRetryCount": {},
      "sourceRetryWait": {},
      "type": "string"
      // For remaining properties, see CopySource objects
    },
    "stagingSettings": {
      "enableCompression": {},
      "linkedServiceName": {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      },
      "path": {}
    },
    "translator": {},
    "validateDataConsistency": {}
  }
}

For Custom, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "Custom",
  "typeProperties": {
    "autoUserSpecification": {},
    "command": {},
    "extendedProperties": {
      "{customized property}": {}
    },
    "folderPath": {},
    "referenceObjects": {
      "datasets": [
        {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        }
      ],
      "linkedServices": [
        {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        }
      ]
    },
    "resourceLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "retentionTimeInDays": {}
  }
}

For DataLakeAnalyticsU-SQL, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "DataLakeAnalyticsU-SQL",
  "typeProperties": {
    "compilationMode": {},
    "degreeOfParallelism": {},
    "parameters": {
      "{customized property}": {}
    },
    "priority": {},
    "runtimeVersion": {},
    "scriptLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "scriptPath": {}
  }
}

For DatabricksNotebook, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "DatabricksNotebook",
  "typeProperties": {
    "baseParameters": {
      "{customized property}": {}
    },
    "libraries": [
      {
        "{customized property}": {}
      }
    ],
    "notebookPath": {}
  }
}

For DatabricksSparkJar, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "DatabricksSparkJar",
  "typeProperties": {
    "libraries": [
      {
        "{customized property}": {}
      }
    ],
    "mainClassName": {},
    "parameters": [ {} ]
  }
}

For DatabricksSparkPython, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "DatabricksSparkPython",
  "typeProperties": {
    "libraries": [
      {
        "{customized property}": {}
      }
    ],
    "parameters": [ {} ],
    "pythonFile": {}
  }
}

For Delete, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "Delete",
  "typeProperties": {
    "dataset": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "enableLogging": {},
    "logStorageSettings": {
      "enableReliableLogging": {},
      "linkedServiceName": {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      },
      "logLevel": {},
      "path": {}
    },
    "maxConcurrentConnections": "int",
    "recursive": {},
    "storeSettings": {
      "disableMetricsCollection": {},
      "maxConcurrentConnections": {},
      "type": "string"
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For ExecuteDataFlow, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "ExecuteDataFlow",
  "typeProperties": {
    "compute": {
      "computeType": {},
      "coreCount": {}
    },
    "continuationSettings": {
      "continuationTtlInMinutes": {},
      "customizedCheckpointKey": {},
      "idleCondition": {}
    },
    "continueOnError": {},
    "dataFlow": {
      "datasetParameters": {},
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "integrationRuntime": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "runConcurrently": {},
    "sourceStagingConcurrency": {},
    "staging": {
      "folderPath": {},
      "linkedService": {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    },
    "traceLevel": {}
  }
}

For ExecuteSSISPackage, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "ExecuteSSISPackage",
  "typeProperties": {
    "connectVia": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "environmentPath": {},
    "executionCredential": {
      "domain": {},
      "password": {
        "type": "string",
        "value": "string"
      },
      "userName": {}
    },
    "loggingLevel": {},
    "logLocation": {
      "logPath": {},
      "type": "string",
      "typeProperties": {
        "accessCredential": {
          "domain": {},
          "password": {
            "type": "string"
            // For remaining properties, see SecretBase objects
          },
          "userName": {}
        },
        "logRefreshInterval": {}
      }
    },
    "packageConnectionManagers": {
      "{customized property}": {
        "{customized property}": {
          "value": {}
        }
      }
    },
    "packageLocation": {
      "packagePath": {},
      "type": "string",
      "typeProperties": {
        "accessCredential": {
          "domain": {},
          "password": {
            "type": "string"
            // For remaining properties, see SecretBase objects
          },
          "userName": {}
        },
        "childPackages": [
          {
            "packageContent": {},
            "packageLastModifiedDate": "string",
            "packageName": "string",
            "packagePath": {}
          }
        ],
        "configurationAccessCredential": {
          "domain": {},
          "password": {
            "type": "string"
            // For remaining properties, see SecretBase objects
          },
          "userName": {}
        },
        "configurationPath": {},
        "packageContent": {},
        "packageLastModifiedDate": "string",
        "packageName": "string",
        "packagePassword": {
          "type": "string"
          // For remaining properties, see SecretBase objects
        }
      }
    },
    "packageParameters": {
      "{customized property}": {
        "value": {}
      }
    },
    "projectConnectionManagers": {
      "{customized property}": {
        "{customized property}": {
          "value": {}
        }
      }
    },
    "projectParameters": {
      "{customized property}": {
        "value": {}
      }
    },
    "propertyOverrides": {
      "{customized property}": {
        "isSensitive": "bool",
        "value": {}
      }
    },
    "runtime": {}
  }
}

For ExecuteWranglingDataflow, use:

{
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "ExecuteWranglingDataflow",
  "typeProperties": {
    "compute": {
      "computeType": {},
      "coreCount": {}
    },
    "continuationSettings": {
      "continuationTtlInMinutes": {},
      "customizedCheckpointKey": {},
      "idleCondition": {}
    },
    "continueOnError": {},
    "dataFlow": {
      "datasetParameters": {},
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "integrationRuntime": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "queries": [
      {
        "dataflowSinks": [
          {
            "dataset": {
              "parameters": {
                "{customized property}": {}
              },
              "referenceName": "string",
              "type": "string"
            },
            "description": "string",
            "flowlet": {
              "datasetParameters": {},
              "parameters": {
                "{customized property}": {}
              },
              "referenceName": "string",
              "type": "string"
            },
            "linkedService": {
              "parameters": {
                "{customized property}": {}
              },
              "referenceName": "string",
              "type": "string"
            },
            "name": "string",
            "rejectedDataLinkedService": {
              "parameters": {
                "{customized property}": {}
              },
              "referenceName": "string",
              "type": "string"
            },
            "schemaLinkedService": {
              "parameters": {
                "{customized property}": {}
              },
              "referenceName": "string",
              "type": "string"
            },
            "script": "string"
          }
        ],
        "queryName": "string"
      }
    ],
    "runConcurrently": {},
    "sinks": {
      "{customized property}": {
        "dataset": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "description": "string",
        "flowlet": {
          "datasetParameters": {},
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "linkedService": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "name": "string",
        "rejectedDataLinkedService": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "schemaLinkedService": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "script": "string"
      }
    },
    "sourceStagingConcurrency": {},
    "staging": {
      "folderPath": {},
      "linkedService": {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    },
    "traceLevel": {}
  }
}

For GetMetadata, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "GetMetadata",
  "typeProperties": {
    "dataset": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "fieldList": [ {} ],
    "formatSettings": {
      "type": "string"
      // For remaining properties, see FormatReadSettings objects
    },
    "storeSettings": {
      "disableMetricsCollection": {},
      "maxConcurrentConnections": {},
      "type": "string"
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For HDInsightHive, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "HDInsightHive",
  "typeProperties": {
    "arguments": [ {} ],
    "defines": {
      "{customized property}": {}
    },
    "getDebugInfo": "string",
    "queryTimeout": "int",
    "scriptLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "scriptPath": {},
    "storageLinkedServices": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ],
    "variables": {
      "{customized property}": {}
    }
  }
}

For HDInsightMapReduce, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "HDInsightMapReduce",
  "typeProperties": {
    "arguments": [ {} ],
    "className": {},
    "defines": {
      "{customized property}": {}
    },
    "getDebugInfo": "string",
    "jarFilePath": {},
    "jarLibs": [ {} ],
    "jarLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "storageLinkedServices": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ]
  }
}

For HDInsightPig, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "HDInsightPig",
  "typeProperties": {
    "arguments": {},
    "defines": {
      "{customized property}": {}
    },
    "getDebugInfo": "string",
    "scriptLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "scriptPath": {},
    "storageLinkedServices": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ]
  }
}

For HDInsightSpark, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "HDInsightSpark",
  "typeProperties": {
    "arguments": [ {} ],
    "className": "string",
    "entryFilePath": {},
    "getDebugInfo": "string",
    "proxyUser": {},
    "rootPath": {},
    "sparkConfig": {
      "{customized property}": {}
    },
    "sparkJobLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    }
  }
}

For HDInsightStreaming, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "HDInsightStreaming",
  "typeProperties": {
    "arguments": [ {} ],
    "combiner": {},
    "commandEnvironment": [ {} ],
    "defines": {
      "{customized property}": {}
    },
    "fileLinkedService": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "filePaths": [ {} ],
    "getDebugInfo": "string",
    "input": {},
    "mapper": {},
    "output": {},
    "reducer": {},
    "storageLinkedServices": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ]
  }
}

For Lookup, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "Lookup",
  "typeProperties": {
    "dataset": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "firstRowOnly": {},
    "source": {
      "disableMetricsCollection": {},
      "maxConcurrentConnections": {},
      "sourceRetryCount": {},
      "sourceRetryWait": {},
      "type": "string"
      // For remaining properties, see CopySource objects
    }
  }
}

For Script, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "Script",
  "typeProperties": {
    "logSettings": {
      "logDestination": "string",
      "logLocationSettings": {
        "linkedServiceName": {
          "parameters": {
            "{customized property}": {}
          },
          "referenceName": "string",
          "type": "string"
        },
        "path": {}
      }
    },
    "scriptBlockExecutionTimeout": {},
    "scripts": [
      {
        "parameters": [
          {
            "direction": "string",
            "name": {},
            "size": "int",
            "type": "string",
            "value": {}
          }
        ],
        "text": {},
        "type": {}
      }
    ]
  }
}

For SparkJob, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "SparkJob",
  "typeProperties": {
    "args": [ {} ],
    "className": {},
    "conf": {},
    "configurationType": "string",
    "driverSize": {},
    "executorSize": {},
    "file": {},
    "files": [ {} ],
    "filesV2": [ {} ],
    "numExecutors": {},
    "pythonCodeReference": [ {} ],
    "scanFolder": {},
    "sparkConfig": {
      "{customized property}": {}
    },
    "sparkJob": {
      "referenceName": {},
      "type": "string"
    },
    "targetBigDataPool": {
      "referenceName": {},
      "type": "string"
    },
    "targetSparkConfiguration": {
      "referenceName": {},
      "type": "string"
    }
  }
}

For SqlServerStoredProcedure, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "SqlServerStoredProcedure",
  "typeProperties": {
    "storedProcedureName": {},
    "storedProcedureParameters": {}
  }
}

For SynapseNotebook, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "SynapseNotebook",
  "typeProperties": {
    "conf": {},
    "configurationType": "string",
    "driverSize": {},
    "executorSize": {},
    "notebook": {
      "referenceName": {},
      "type": "string"
    },
    "numExecutors": {},
    "parameters": {
      "{customized property}": {
        "type": "string",
        "value": {}
      }
    },
    "sparkConfig": {
      "{customized property}": {}
    },
    "sparkPool": {
      "referenceName": {},
      "type": "string"
    },
    "targetSparkConfiguration": {
      "referenceName": {},
      "type": "string"
    }
  }
}

For WebActivity, use:

{
  "linkedServiceName": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "policy": {
    "retry": {},
    "retryIntervalInSeconds": "int",
    "secureInput": "bool",
    "secureOutput": "bool",
    "timeout": {}
  },
  "type": "WebActivity",
  "typeProperties": {
    "authentication": {
      "credential": {
        "referenceName": "string",
        "type": "string"
      },
      "password": {
        "type": "string"
        // For remaining properties, see SecretBase objects
      },
      "pfx": {
        "type": "string"
        // For remaining properties, see SecretBase objects
      },
      "resource": {},
      "type": "string",
      "username": {},
      "userTenant": {}
    },
    "body": {},
    "connectVia": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    },
    "datasets": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ],
    "disableCertValidation": "bool",
    "headers": {
      "{customized property}": {}
    },
    "httpRequestTimeout": {},
    "linkedServices": [
      {
        "parameters": {
          "{customized property}": {}
        },
        "referenceName": "string",
        "type": "string"
      }
    ],
    "method": "string",
    "turnOffAsync": "bool",
    "url": {}
  }
}

CompressionReadSettings objects

Set the type property to specify the type of object.

For TarGZipReadSettings, use:

{
  "preserveCompressionFileNameAsFolder": {},
  "type": "TarGZipReadSettings"
}

For TarReadSettings, use:

{
  "preserveCompressionFileNameAsFolder": {},
  "type": "TarReadSettings"
}

For ZipDeflateReadSettings, use:

{
  "preserveZipFileNameAsFolder": {},
  "type": "ZipDeflateReadSettings"
}

StoreWriteSettings objects

Set the type property to specify the type of object.

For AzureBlobFSWriteSettings, use:

{
  "blockSizeInMB": {},
  "type": "AzureBlobFSWriteSettings"
}

For AzureBlobStorageWriteSettings, use:

{
  "blockSizeInMB": {},
  "type": "AzureBlobStorageWriteSettings"
}

For AzureDataLakeStoreWriteSettings, use:

{
  "expiryDateTime": {},
  "type": "AzureDataLakeStoreWriteSettings"
}

For AzureFileStorageWriteSettings, use:

{
  "type": "AzureFileStorageWriteSettings"
}

For FileServerWriteSettings, use:

{
  "type": "FileServerWriteSettings"
}

For LakeHouseWriteSettings, use:

{
  "type": "LakeHouseWriteSettings"
}

For SftpWriteSettings, use:

{
  "operationTimeout": {},
  "type": "SftpWriteSettings",
  "useTempFileRename": {}
}

CopySink objects

Set the type property to specify the type of object.

For AvroSink, use:

{
  "formatSettings": {
    "fileNamePrefix": {},
    "maxRowsPerFile": {},
    "recordName": "string",
    "recordNamespace": "string",
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "AvroSink"
}

For AzureBlobFSSink, use:

{
  "copyBehavior": {},
  "metadata": [
    {
      "name": {},
      "value": {}
    }
  ],
  "type": "AzureBlobFSSink"
}

For AzureDataExplorerSink, use:

{
  "flushImmediately": {},
  "ingestionMappingAsJson": {},
  "ingestionMappingName": {},
  "type": "AzureDataExplorerSink"
}

For AzureDataLakeStoreSink, use:

{
  "copyBehavior": {},
  "enableAdlsSingleFileParallel": {},
  "type": "AzureDataLakeStoreSink"
}

For AzureDatabricksDeltaLakeSink, use:

{
  "importSettings": {
    "dateFormat": {},
    "timestampFormat": {},
    "type": "string"
  },
  "preCopyScript": {},
  "type": "AzureDatabricksDeltaLakeSink"
}

For AzureMySqlSink, use:

{
  "preCopyScript": {},
  "type": "AzureMySqlSink"
}

For AzurePostgreSqlSink, use:

{
  "preCopyScript": {},
  "type": "AzurePostgreSqlSink"
}

For AzureQueueSink, use:

{
  "type": "AzureQueueSink"
}

For AzureSearchIndexSink, use:

{
  "type": "AzureSearchIndexSink",
  "writeBehavior": "string"
}

For AzureSqlSink, use:

{
  "preCopyScript": {},
  "sqlWriterStoredProcedureName": {},
  "sqlWriterTableType": {},
  "sqlWriterUseTableLock": {},
  "storedProcedureParameters": {},
  "storedProcedureTableTypeParameterName": {},
  "tableOption": {},
  "type": "AzureSqlSink",
  "upsertSettings": {
    "interimSchemaName": {},
    "keys": {},
    "useTempDB": {}
  },
  "writeBehavior": {}
}

For AzureTableSink, use:

{
  "azureTableDefaultPartitionKeyValue": {},
  "azureTableInsertType": {},
  "azureTablePartitionKeyName": {},
  "azureTableRowKeyName": {},
  "type": "AzureTableSink"
}

For BinarySink, use:

{
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "BinarySink"
}

For BlobSink, use:

{
  "blobWriterAddHeader": {},
  "blobWriterDateTimeFormat": {},
  "blobWriterOverwriteFiles": {},
  "copyBehavior": {},
  "metadata": [
    {
      "name": {},
      "value": {}
    }
  ],
  "type": "BlobSink"
}

For CommonDataServiceForAppsSink, use:

{
  "alternateKeyName": {},
  "ignoreNullValues": {},
  "type": "CommonDataServiceForAppsSink",
  "writeBehavior": "string"
}

For CosmosDbMongoDbApiSink, use:

{
  "type": "CosmosDbMongoDbApiSink",
  "writeBehavior": {}
}

For CosmosDbSqlApiSink, use:

{
  "type": "CosmosDbSqlApiSink",
  "writeBehavior": {}
}

For DelimitedTextSink, use:

{
  "formatSettings": {
    "fileExtension": {},
    "fileNamePrefix": {},
    "maxRowsPerFile": {},
    "quoteAllText": {},
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "DelimitedTextSink"
}

For DocumentDbCollectionSink, use:

{
  "nestingSeparator": {},
  "type": "DocumentDbCollectionSink",
  "writeBehavior": {}
}

For DynamicsCrmSink, use:

{
  "alternateKeyName": {},
  "ignoreNullValues": {},
  "type": "DynamicsCrmSink",
  "writeBehavior": "string"
}

For DynamicsSink, use:

{
  "alternateKeyName": {},
  "ignoreNullValues": {},
  "type": "DynamicsSink",
  "writeBehavior": "string"
}

For FileSystemSink, use:

{
  "copyBehavior": {},
  "type": "FileSystemSink"
}

For IcebergSink, use:

{
  "formatSettings": {
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "IcebergSink"
}

For InformixSink, use:

{
  "preCopyScript": {},
  "type": "InformixSink"
}

For JsonSink, use:

{
  "formatSettings": {
    "filePattern": {},
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "JsonSink"
}

For LakeHouseTableSink, use:

{
  "partitionNameList": {},
  "partitionOption": {},
  "tableActionOption": {},
  "type": "LakeHouseTableSink"
}

For MicrosoftAccessSink, use:

{
  "preCopyScript": {},
  "type": "MicrosoftAccessSink"
}

For MongoDbAtlasSink, use:

{
  "type": "MongoDbAtlasSink",
  "writeBehavior": {}
}

For MongoDbV2Sink, use:

{
  "type": "MongoDbV2Sink",
  "writeBehavior": {}
}

For OdbcSink, use:

{
  "preCopyScript": {},
  "type": "OdbcSink"
}

For OracleSink, use:

{
  "preCopyScript": {},
  "type": "OracleSink"
}

For OrcSink, use:

{
  "formatSettings": {
    "fileNamePrefix": {},
    "maxRowsPerFile": {},
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "OrcSink"
}

For ParquetSink, use:

{
  "formatSettings": {
    "fileNamePrefix": {},
    "maxRowsPerFile": {},
    "type": "string"
  },
  "storeSettings": {
    "copyBehavior": {},
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "metadata": [
      {
        "name": {},
        "value": {}
      }
    ],
    "type": "string"
    // For remaining properties, see StoreWriteSettings objects
  },
  "type": "ParquetSink"
}

For RestSink, use:

{
  "additionalHeaders": {},
  "httpCompressionType": {},
  "httpRequestTimeout": {},
  "requestInterval": {},
  "requestMethod": {},
  "type": "RestSink"
}

For SalesforceServiceCloudSink, use:

{
  "externalIdFieldName": {},
  "ignoreNullValues": {},
  "type": "SalesforceServiceCloudSink",
  "writeBehavior": "string"
}

For SalesforceServiceCloudV2Sink, use:

{
  "externalIdFieldName": {},
  "ignoreNullValues": {},
  "type": "SalesforceServiceCloudV2Sink",
  "writeBehavior": "string"
}

For SalesforceSink, use:

{
  "externalIdFieldName": {},
  "ignoreNullValues": {},
  "type": "SalesforceSink",
  "writeBehavior": "string"
}

For SalesforceV2Sink, use:

{
  "externalIdFieldName": {},
  "ignoreNullValues": {},
  "type": "SalesforceV2Sink",
  "writeBehavior": "string"
}

For SapCloudForCustomerSink, use:

{
  "httpRequestTimeout": {},
  "type": "SapCloudForCustomerSink",
  "writeBehavior": "string"
}

For SnowflakeSink, use:

{
  "importSettings": {
    "additionalCopyOptions": {
      "{customized property}": {}
    },
    "additionalFormatOptions": {
      "{customized property}": {}
    },
    "storageIntegration": {},
    "type": "string"
  },
  "preCopyScript": {},
  "type": "SnowflakeSink"
}

For SnowflakeV2Sink, use:

{
  "importSettings": {
    "additionalCopyOptions": {
      "{customized property}": {}
    },
    "additionalFormatOptions": {
      "{customized property}": {}
    },
    "storageIntegration": {},
    "type": "string"
  },
  "preCopyScript": {},
  "type": "SnowflakeV2Sink"
}

For SqlDWSink, use:

{
  "allowCopyCommand": {},
  "allowPolyBase": {},
  "copyCommandSettings": {
    "additionalOptions": {
      "{customized property}": "string"
    },
    "defaultValues": [
      {
        "columnName": {},
        "defaultValue": {}
      }
    ]
  },
  "polyBaseSettings": {
    "rejectSampleValue": {},
    "rejectType": "string",
    "rejectValue": {},
    "useTypeDefault": {}
  },
  "preCopyScript": {},
  "sqlWriterUseTableLock": {},
  "tableOption": {},
  "type": "SqlDWSink",
  "upsertSettings": {
    "interimSchemaName": {},
    "keys": {}
  },
  "writeBehavior": {}
}

For SqlMISink, use:

{
  "preCopyScript": {},
  "sqlWriterStoredProcedureName": {},
  "sqlWriterTableType": {},
  "sqlWriterUseTableLock": {},
  "storedProcedureParameters": {},
  "storedProcedureTableTypeParameterName": {},
  "tableOption": {},
  "type": "SqlMISink",
  "upsertSettings": {
    "interimSchemaName": {},
    "keys": {},
    "useTempDB": {}
  },
  "writeBehavior": {}
}

For SqlServerSink, use:

{
  "preCopyScript": {},
  "sqlWriterStoredProcedureName": {},
  "sqlWriterTableType": {},
  "sqlWriterUseTableLock": {},
  "storedProcedureParameters": {},
  "storedProcedureTableTypeParameterName": {},
  "tableOption": {},
  "type": "SqlServerSink",
  "upsertSettings": {
    "interimSchemaName": {},
    "keys": {},
    "useTempDB": {}
  },
  "writeBehavior": {}
}

For SqlSink, use:

{
  "preCopyScript": {},
  "sqlWriterStoredProcedureName": {},
  "sqlWriterTableType": {},
  "sqlWriterUseTableLock": {},
  "storedProcedureParameters": {},
  "storedProcedureTableTypeParameterName": {},
  "tableOption": {},
  "type": "SqlSink",
  "upsertSettings": {
    "interimSchemaName": {},
    "keys": {},
    "useTempDB": {}
  },
  "writeBehavior": {}
}

For WarehouseSink, use:

{
  "allowCopyCommand": {},
  "copyCommandSettings": {
    "additionalOptions": {
      "{customized property}": "string"
    },
    "defaultValues": [
      {
        "columnName": {},
        "defaultValue": {}
      }
    ]
  },
  "preCopyScript": {},
  "tableOption": {},
  "type": "WarehouseSink",
  "writeBehavior": {}
}

FormatReadSettings objects

Set the type property to specify the type of object.

For BinaryReadSettings, use:

{
  "compressionProperties": {
    "type": "string"
    // For remaining properties, see CompressionReadSettings objects
  },
  "type": "BinaryReadSettings"
}

For DelimitedTextReadSettings, use:

{
  "compressionProperties": {
    "type": "string"
    // For remaining properties, see CompressionReadSettings objects
  },
  "skipLineCount": {},
  "type": "DelimitedTextReadSettings"
}

For JsonReadSettings, use:

{
  "compressionProperties": {
    "type": "string"
    // For remaining properties, see CompressionReadSettings objects
  },
  "type": "JsonReadSettings"
}

For ParquetReadSettings, use:

{
  "compressionProperties": {
    "type": "string"
    // For remaining properties, see CompressionReadSettings objects
  },
  "type": "ParquetReadSettings"
}

For XmlReadSettings, use:

{
  "compressionProperties": {
    "type": "string"
    // For remaining properties, see CompressionReadSettings objects
  },
  "detectDataType": {},
  "namespacePrefixes": {},
  "namespaces": {},
  "type": "XmlReadSettings",
  "validationMode": {}
}

CopySource objects

Set the type property to specify the type of object.

For AmazonMWSSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "AmazonMWSSource"
}

For AmazonRdsForOracleSource, use:

{
  "additionalColumns": {},
  "oracleReaderQuery": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionNames": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "type": "AmazonRdsForOracleSource"
}

For AmazonRdsForSqlServerSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "produceAdditionalTypes": {},
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "AmazonRdsForSqlServerSource"
}

For AmazonRedshiftSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "redshiftUnloadSettings": {
    "bucketName": {},
    "s3LinkedServiceName": {
      "parameters": {
        "{customized property}": {}
      },
      "referenceName": "string",
      "type": "string"
    }
  },
  "type": "AmazonRedshiftSource"
}

For AvroSource, use:

{
  "additionalColumns": {},
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "AvroSource"
}

For AzureBlobFSSource, use:

{
  "recursive": {},
  "skipHeaderLineCount": {},
  "treatEmptyAsNull": {},
  "type": "AzureBlobFSSource"
}

For AzureDataExplorerSource, use:

{
  "additionalColumns": {},
  "noTruncation": {},
  "query": {},
  "queryTimeout": {},
  "type": "AzureDataExplorerSource"
}

For AzureDataLakeStoreSource, use:

{
  "recursive": {},
  "type": "AzureDataLakeStoreSource"
}

For AzureDatabricksDeltaLakeSource, use:

{
  "exportSettings": {
    "dateFormat": {},
    "timestampFormat": {},
    "type": "string"
  },
  "query": {},
  "type": "AzureDatabricksDeltaLakeSource"
}

For AzureMariaDBSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "AzureMariaDBSource"
}

For AzureMySqlSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "AzureMySqlSource"
}

For AzurePostgreSqlSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "AzurePostgreSqlSource"
}

For AzureSqlSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "produceAdditionalTypes": {},
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "AzureSqlSource"
}

For AzureTableSource, use:

{
  "additionalColumns": {},
  "azureTableSourceIgnoreTableNotFound": {},
  "azureTableSourceQuery": {},
  "queryTimeout": {},
  "type": "AzureTableSource"
}

For BinarySource, use:

{
  "formatSettings": {
    "compressionProperties": {
      "type": "string"
      // For remaining properties, see CompressionReadSettings objects
    },
    "type": "string"
  },
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "BinarySource"
}

For BlobSource, use:

{
  "recursive": {},
  "skipHeaderLineCount": {},
  "treatEmptyAsNull": {},
  "type": "BlobSource"
}

For CassandraSource, use:

{
  "additionalColumns": {},
  "consistencyLevel": "string",
  "query": {},
  "queryTimeout": {},
  "type": "CassandraSource"
}

For CommonDataServiceForAppsSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "CommonDataServiceForAppsSource"
}

For ConcurSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ConcurSource"
}

For CosmosDbMongoDbApiSource, use:

{
  "additionalColumns": {},
  "batchSize": {},
  "cursorMethods": {
    "limit": {},
    "project": {},
    "skip": {},
    "sort": {}
  },
  "filter": {},
  "queryTimeout": {},
  "type": "CosmosDbMongoDbApiSource"
}

For CosmosDbSqlApiSource, use:

{
  "additionalColumns": {},
  "detectDatetime": {},
  "pageSize": {},
  "preferredRegions": {},
  "query": {},
  "type": "CosmosDbSqlApiSource"
}

For CouchbaseSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "CouchbaseSource"
}

For Db2Source, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "Db2Source"
}

For DelimitedTextSource, use:

{
  "additionalColumns": {},
  "formatSettings": {
    "compressionProperties": {
      "type": "string"
      // For remaining properties, see CompressionReadSettings objects
    },
    "skipLineCount": {},
    "type": "string"
  },
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "DelimitedTextSource"
}

For DocumentDbCollectionSource, use:

{
  "additionalColumns": {},
  "nestingSeparator": {},
  "query": {},
  "queryTimeout": {},
  "type": "DocumentDbCollectionSource"
}

For DrillSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "DrillSource"
}

For DynamicsAXSource, use:

{
  "additionalColumns": {},
  "httpRequestTimeout": {},
  "query": {},
  "queryTimeout": {},
  "type": "DynamicsAXSource"
}

For DynamicsCrmSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "DynamicsCrmSource"
}

For DynamicsSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "DynamicsSource"
}

For EloquaSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "EloquaSource"
}

For ExcelSource, use:

{
  "additionalColumns": {},
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "ExcelSource"
}

For FileSystemSource, use:

{
  "additionalColumns": {},
  "recursive": {},
  "type": "FileSystemSource"
}

For GoogleAdWordsSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "GoogleAdWordsSource"
}

For GoogleBigQuerySource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "GoogleBigQuerySource"
}

For GoogleBigQueryV2Source, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "GoogleBigQueryV2Source"
}

For GreenplumSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "GreenplumSource"
}

For HBaseSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "HBaseSource"
}

For HdfsSource, use:

{
  "distcpSettings": {
    "distcpOptions": {},
    "resourceManagerEndpoint": {},
    "tempScriptPath": {}
  },
  "recursive": {},
  "type": "HdfsSource"
}

For HiveSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "HiveSource"
}

For HttpSource, use:

{
  "httpRequestTimeout": {},
  "type": "HttpSource"
}

For HubspotSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "HubspotSource"
}

For ImpalaSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ImpalaSource"
}

For InformixSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "InformixSource"
}

For JiraSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "JiraSource"
}

For JsonSource, use:

{
  "additionalColumns": {},
  "formatSettings": {
    "compressionProperties": {
      "type": "string"
      // For remaining properties, see CompressionReadSettings objects
    },
    "type": "string"
  },
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "JsonSource"
}

For LakeHouseTableSource, use:

{
  "additionalColumns": {},
  "timestampAsOf": {},
  "type": "LakeHouseTableSource",
  "versionAsOf": {}
}

For MagentoSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "MagentoSource"
}

For MariaDBSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "MariaDBSource"
}

For MarketoSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "MarketoSource"
}

For MicrosoftAccessSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "MicrosoftAccessSource"
}

For MongoDbAtlasSource, use:

{
  "additionalColumns": {},
  "batchSize": {},
  "cursorMethods": {
    "limit": {},
    "project": {},
    "skip": {},
    "sort": {}
  },
  "filter": {},
  "queryTimeout": {},
  "type": "MongoDbAtlasSource"
}

For MongoDbSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "MongoDbSource"
}

For MongoDbV2Source, use:

{
  "additionalColumns": {},
  "batchSize": {},
  "cursorMethods": {
    "limit": {},
    "project": {},
    "skip": {},
    "sort": {}
  },
  "filter": {},
  "queryTimeout": {},
  "type": "MongoDbV2Source"
}

For MySqlSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "MySqlSource"
}

For NetezzaSource, use:

{
  "additionalColumns": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "query": {},
  "queryTimeout": {},
  "type": "NetezzaSource"
}

For ODataSource, use:

{
  "additionalColumns": {},
  "httpRequestTimeout": {},
  "query": {},
  "type": "ODataSource"
}

For OdbcSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "OdbcSource"
}

For Office365Source, use:

{
  "allowedGroups": {},
  "dateFilterColumn": {},
  "endTime": {},
  "outputColumns": {},
  "startTime": {},
  "type": "Office365Source",
  "userScopeFilterUri": {}
}

For OracleServiceCloudSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "OracleServiceCloudSource"
}

For OracleSource, use:

{
  "additionalColumns": {},
  "oracleReaderQuery": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionNames": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "type": "OracleSource"
}

For OrcSource, use:

{
  "additionalColumns": {},
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "OrcSource"
}

For ParquetSource, use:

{
  "additionalColumns": {},
  "formatSettings": {
    "compressionProperties": {
      "type": "string"
      // For remaining properties, see CompressionReadSettings objects
    },
    "type": "string"
  },
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "ParquetSource"
}

For PaypalSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "PaypalSource"
}

For PhoenixSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "PhoenixSource"
}

For PostgreSqlSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "PostgreSqlSource"
}

For PostgreSqlV2Source, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "PostgreSqlV2Source"
}

For PrestoSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "PrestoSource"
}

For QuickBooksSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "QuickBooksSource"
}

For RelationalSource, use:

{
  "additionalColumns": {},
  "query": {},
  "type": "RelationalSource"
}

For ResponsysSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ResponsysSource"
}

For RestSource, use:

{
  "additionalColumns": {},
  "additionalHeaders": {},
  "httpRequestTimeout": {},
  "paginationRules": {},
  "requestBody": {},
  "requestInterval": {},
  "requestMethod": {},
  "type": "RestSource"
}

For SalesforceMarketingCloudSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "SalesforceMarketingCloudSource"
}

For SalesforceServiceCloudSource, use:

{
  "additionalColumns": {},
  "query": {},
  "readBehavior": {},
  "type": "SalesforceServiceCloudSource"
}

For SalesforceServiceCloudV2Source, use:

{
  "additionalColumns": {},
  "includeDeletedObjects": {},
  "query": {},
  "SOQLQuery": {},
  "type": "SalesforceServiceCloudV2Source"
}

For SalesforceSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "readBehavior": {},
  "type": "SalesforceSource"
}

For SalesforceV2Source, use:

{
  "additionalColumns": {},
  "includeDeletedObjects": {},
  "pageSize": {},
  "query": {},
  "queryTimeout": {},
  "SOQLQuery": {},
  "type": "SalesforceV2Source"
}

For SapBwSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "SapBwSource"
}

For SapCloudForCustomerSource, use:

{
  "additionalColumns": {},
  "httpRequestTimeout": {},
  "query": {},
  "queryTimeout": {},
  "type": "SapCloudForCustomerSource"
}

For SapEccSource, use:

{
  "additionalColumns": {},
  "httpRequestTimeout": {},
  "query": {},
  "queryTimeout": {},
  "type": "SapEccSource"
}

For SapHanaSource, use:

{
  "additionalColumns": {},
  "packetSize": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {}
  },
  "query": {},
  "queryTimeout": {},
  "type": "SapHanaSource"
}

For SapOdpSource, use:

{
  "additionalColumns": {},
  "extractionMode": {},
  "projection": {},
  "queryTimeout": {},
  "selection": {},
  "subscriberProcess": {},
  "type": "SapOdpSource"
}

For SapOpenHubSource, use:

{
  "additionalColumns": {},
  "baseRequestId": {},
  "customRfcReadTableFunctionModule": {},
  "excludeLastRequest": {},
  "queryTimeout": {},
  "sapDataColumnDelimiter": {},
  "type": "SapOpenHubSource"
}

For SapTableSource, use:

{
  "additionalColumns": {},
  "batchSize": {},
  "customRfcReadTableFunctionModule": {},
  "partitionOption": {},
  "partitionSettings": {
    "maxPartitionsNumber": {},
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "rfcTableFields": {},
  "rfcTableOptions": {},
  "rowCount": {},
  "rowSkips": {},
  "sapDataColumnDelimiter": {},
  "type": "SapTableSource"
}

For ServiceNowSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ServiceNowSource"
}

For ServiceNowV2Source, use:

{
  "additionalColumns": {},
  "expression": {
    "operands": [
      ...
    ],
    "operators": [ "string" ],
    "type": "string",
    "value": "string"
  },
  "pageSize": {},
  "queryTimeout": {},
  "type": "ServiceNowV2Source"
}

For SharePointOnlineListSource, use:

{
  "httpRequestTimeout": {},
  "query": {},
  "type": "SharePointOnlineListSource"
}

For ShopifySource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ShopifySource"
}

For SnowflakeSource, use:

{
  "exportSettings": {
    "additionalCopyOptions": {
      "{customized property}": {}
    },
    "additionalFormatOptions": {
      "{customized property}": {}
    },
    "storageIntegration": {},
    "type": "string"
  },
  "query": {},
  "type": "SnowflakeSource"
}

For SnowflakeV2Source, use:

{
  "exportSettings": {
    "additionalCopyOptions": {
      "{customized property}": {}
    },
    "additionalFormatOptions": {
      "{customized property}": {}
    },
    "storageIntegration": {},
    "type": "string"
  },
  "query": {},
  "type": "SnowflakeV2Source"
}

For SparkSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "SparkSource"
}

For SqlDWSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "SqlDWSource"
}

For SqlMISource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "produceAdditionalTypes": {},
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "SqlMISource"
}

For SqlServerSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "produceAdditionalTypes": {},
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "SqlServerSource"
}

For SqlSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "SqlSource"
}

For SquareSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "SquareSource"
}

For SybaseSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "SybaseSource"
}

For TeradataSource, use:

{
  "additionalColumns": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "query": {},
  "queryTimeout": {},
  "type": "TeradataSource"
}

For VerticaSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "VerticaSource"
}

For WarehouseSource, use:

{
  "additionalColumns": {},
  "isolationLevel": {},
  "partitionOption": {},
  "partitionSettings": {
    "partitionColumnName": {},
    "partitionLowerBound": {},
    "partitionUpperBound": {}
  },
  "queryTimeout": {},
  "sqlReaderQuery": {},
  "sqlReaderStoredProcedureName": {},
  "storedProcedureParameters": {},
  "type": "WarehouseSource"
}

For XeroSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "XeroSource"
}

For ZohoSource, use:

{
  "additionalColumns": {},
  "query": {},
  "queryTimeout": {},
  "type": "ZohoSource"
}

For WebSource, use:

{
  "additionalColumns": {},
  "type": "WebSource"
}

For XmlSource, use:

{
  "additionalColumns": {},
  "formatSettings": {
    "compressionProperties": {
      "type": "string"
      // For remaining properties, see CompressionReadSettings objects
    },
    "detectDataType": {},
    "namespacePrefixes": {},
    "namespaces": {},
    "type": "string",
    "validationMode": {}
  },
  "storeSettings": {
    "disableMetricsCollection": {},
    "maxConcurrentConnections": {},
    "type": "string"
    // For remaining properties, see StoreReadSettings objects
  },
  "type": "XmlSource"
}

SecretBase objects

Set the type property to specify the type of object.

For AzureKeyVaultSecret, use:

{
  "secretName": {},
  "secretVersion": {},
  "store": {
    "parameters": {
      "{customized property}": {}
    },
    "referenceName": "string",
    "type": "string"
  },
  "type": "AzureKeyVaultSecret"
}

For SecureString, use:

{
  "type": "SecureString",
  "value": "string"
}

Property values

Activity

Name Description Value
dependsOn Activity depends on condition. ActivityDependency[]
description Activity description. string
name Activity name. string (required)
onInactiveMarkAs Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when the activity is inactive, the status will be Succeeded by default. 'Failed'
'Skipped'
'Succeeded'
state Activity state. This is an optional property and if not provided, the state will be Active by default. 'Active'
'Inactive'
type Set to 'AppendVariable' for type AppendVariableActivity. Set to 'AzureDataExplorerCommand' for type AzureDataExplorerCommandActivity. Set to 'AzureFunctionActivity' for type AzureFunctionActivity. Set to 'AzureMLBatchExecution' for type AzureMLBatchExecutionActivity. Set to 'AzureMLExecutePipeline' for type AzureMLExecutePipelineActivity. Set to 'AzureMLUpdateResource' for type AzureMLUpdateResourceActivity. Set to 'ExecutePipeline' for type ExecutePipelineActivity. Set to 'Fail' for type FailActivity. Set to 'Filter' for type FilterActivity. Set to 'ForEach' for type ForEachActivity. Set to 'IfCondition' for type IfConditionActivity. Set to 'SetVariable' for type SetVariableActivity. Set to 'Switch' for type SwitchActivity. Set to 'Until' for type UntilActivity. Set to 'Validation' for type ValidationActivity. Set to 'Wait' for type WaitActivity. Set to 'WebHook' for type WebHookActivity. Set to 'Copy' for type CopyActivity. Set to 'Custom' for type CustomActivity. Set to 'DataLakeAnalyticsU-SQL' for type DataLakeAnalyticsUsqlActivity. Set to 'DatabricksNotebook' for type DatabricksNotebookActivity. Set to 'DatabricksSparkJar' for type DatabricksSparkJarActivity. Set to 'DatabricksSparkPython' for type DatabricksSparkPythonActivity. Set to 'Delete' for type DeleteActivity. Set to 'ExecuteDataFlow' for type ExecuteDataFlowActivity. Set to 'ExecuteSSISPackage' for type ExecuteSsisPackageActivity. Set to 'ExecuteWranglingDataflow' for type ExecuteWranglingDataflowActivity. Set to 'GetMetadata' for type GetMetadataActivity. Set to 'HDInsightHive' for type HDInsightHiveActivity. Set to 'HDInsightMapReduce' for type HDInsightMapReduceActivity. Set to 'HDInsightPig' for type HDInsightPigActivity. Set to 'HDInsightSpark' for type HDInsightSparkActivity. Set to 'HDInsightStreaming' for type HDInsightStreamingActivity. Set to 'Lookup' for type LookupActivity. Set to 'Script' for type ScriptActivity. Set to 'SparkJob' for type SynapseSparkJobDefinitionActivity. Set to 'SqlServerStoredProcedure' for type SqlServerStoredProcedureActivity. Set to 'SynapseNotebook' for type SynapseNotebookActivity. Set to 'WebActivity' for type WebActivity. 'AppendVariable'
'AzureDataExplorerCommand'
'AzureFunctionActivity'
'AzureMLBatchExecution'
'AzureMLExecutePipeline'
'AzureMLUpdateResource'
'Copy'
'Custom'
'DatabricksNotebook'
'DatabricksSparkJar'
'DatabricksSparkPython'
'DataLakeAnalyticsU-SQL'
'Delete'
'ExecuteDataFlow'
'ExecutePipeline'
'ExecuteSSISPackage'
'ExecuteWranglingDataflow'
'Fail'
'Filter'
'ForEach'
'GetMetadata'
'HDInsightHive'
'HDInsightMapReduce'
'HDInsightPig'
'HDInsightSpark'
'HDInsightStreaming'
'IfCondition'
'Lookup'
'Script'
'SetVariable'
'SparkJob'
'SqlServerStoredProcedure'
'Switch'
'SynapseNotebook'
'Until'
'Validation'
'Wait'
'WebActivity'
'WebHook' (required)
userProperties Activity user properties. UserProperty[]

ActivityDependency

Name Description Value
activity Activity name. string (required)
dependencyConditions Match-Condition for the dependency. String array containing any of:
'Completed'
'Failed'
'Skipped'
'Succeeded' (required)

ActivityPolicy

Name Description Value
retry Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. any
retryIntervalInSeconds Interval between each retry attempt (in seconds). The default is 30 sec. int

Constraints:
Min value = 30
Max value = 86400
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool
secureOutput When set to true, Output from activity is considered as secure and will not be logged to monitoring. bool
timeout Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

AmazonMWSSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AmazonMWSSource' (required)

AmazonRdsForOraclePartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionNames Names of the physical partitions of AmazonRdsForOracle table. any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

AmazonRdsForOracleSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
oracleReaderQuery AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for AmazonRdsForOracle source partitioning. AmazonRdsForOraclePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AmazonRdsForOracleSource' (required)

AmazonRdsForSqlServerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'AmazonRdsForSqlServerSource' (required)

AmazonRedshiftSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
redshiftUnloadSettings The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. RedshiftUnloadSettings
type Copy source type. 'AmazonRedshiftSource' (required)

AmazonS3CompatibleReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AmazonS3CompatibleReadSettings' (required)
wildcardFileName Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). any

AmazonS3ReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the S3 object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AmazonS3ReadSettings' (required)
wildcardFileName AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). any

AppendVariableActivity

Name Description Value
type Type of activity. 'AppendVariable' (required)
typeProperties Append Variable activity properties. AppendVariableActivityTypeProperties (required)

AppendVariableActivityTypeProperties

Name Description Value
value Value to be appended. Type: could be a static value matching type of the variable item or Expression with resultType matching type of the variable item any
variableName Name of the variable whose value needs to be appended to. string

AvroSink

Name Description Value
formatSettings Avro format settings. AvroWriteSettings
storeSettings Avro store settings. StoreWriteSettings
type Copy sink type. 'AvroSink' (required)

AvroSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings Avro store settings. StoreReadSettings
type Copy source type. 'AvroSource' (required)

AvroWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
recordName Top level record name in write result, which is required in AVRO spec. string
recordNamespace Record namespace in the write result. string
type The write setting type. string (required)

AzureBlobFSReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureBlobFSReadSettings' (required)
wildcardFileName Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureBlobFSSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. Type: string (or Expression with resultType string). any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Copy sink type. 'AzureBlobFSSink' (required)

AzureBlobFSSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
skipHeaderLineCount Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). any
treatEmptyAsNull Treat empty as null. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'AzureBlobFSSource' (required)

AzureBlobFSWriteSettings

Name Description Value
blockSizeInMB Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). any
type The write setting type. 'AzureBlobFSWriteSettings' (required)

AzureBlobStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureBlobStorageReadSettings' (required)
wildcardFileName Azure blob wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureBlobStorageWriteSettings

Name Description Value
blockSizeInMB Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). any
type The write setting type. 'AzureBlobStorageWriteSettings' (required)

AzureDatabricksDeltaLakeExportCommand

Name Description Value
dateFormat Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
timestampFormat Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
type The export setting type. string (required)

AzureDatabricksDeltaLakeImportCommand

Name Description Value
dateFormat Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
timestampFormat Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). any
type The import setting type. string (required)

AzureDatabricksDeltaLakeSink

Name Description Value
importSettings Azure Databricks Delta Lake import settings. AzureDatabricksDeltaLakeImportCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureDatabricksDeltaLakeSink' (required)

AzureDatabricksDeltaLakeSource

Name Description Value
exportSettings Azure Databricks Delta Lake export settings. AzureDatabricksDeltaLakeExportCommand
query Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'AzureDatabricksDeltaLakeSource' (required)

AzureDataExplorerCommandActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureDataExplorerCommand' (required)
typeProperties Azure Data Explorer command activity properties. AzureDataExplorerCommandActivityTypeProperties (required)

AzureDataExplorerCommandActivityTypeProperties

Name Description Value
command A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). any (required)
commandTimeout Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9]))..) any

AzureDataExplorerSink

Name Description Value
flushImmediately If set to true, any aggregation will be skipped. Default is false. Type: boolean. any
ingestionMappingAsJson An explicit column mapping description provided in a json format. Type: string. any
ingestionMappingName A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. any
type Copy sink type. 'AzureDataExplorerSink' (required)

AzureDataExplorerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
noTruncation The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. any
query Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). any (required)
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])).. any
type Copy source type. 'AzureDataExplorerSource' (required)

AzureDataLakeStoreReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
listAfter Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). any
listBefore Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureDataLakeStoreReadSettings' (required)
wildcardFileName ADLS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath ADLS wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureDataLakeStoreSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. Type: string (or Expression with resultType string). any
enableAdlsSingleFileParallel Single File Parallel. any
type Copy sink type. 'AzureDataLakeStoreSink' (required)

AzureDataLakeStoreSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'AzureDataLakeStoreSource' (required)

AzureDataLakeStoreWriteSettings

Name Description Value
expiryDateTime Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). any
type The write setting type. 'AzureDataLakeStoreWriteSettings' (required)

AzureFileStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'AzureFileStorageReadSettings' (required)
wildcardFileName Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

AzureFileStorageWriteSettings

Name Description Value
type The write setting type. 'AzureFileStorageWriteSettings' (required)

AzureFunctionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureFunctionActivity' (required)
typeProperties Azure Function activity properties. AzureFunctionActivityTypeProperties (required)

AzureFunctionActivityTypeProperties

Name Description Value
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
functionName Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) any (required)
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). AzureFunctionActivityTypePropertiesHeaders
method Rest API method for target endpoint. 'DELETE'
'GET'
'HEAD'
'OPTIONS'
'POST'
'PUT'
'TRACE' (required)

AzureFunctionActivityTypePropertiesHeaders

Name Description Value

AzureKeyVaultSecretReference

Name Description Value
secretName The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). any (required)
secretVersion The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). any
store The Azure Key Vault linked service reference. LinkedServiceReference (required)
type Type of the secret. 'AzureKeyVaultSecret' (required)

AzureMariaDBSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureMariaDBSource' (required)

AzureMLBatchExecutionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLBatchExecution' (required)
typeProperties Azure ML Batch Execution activity properties. AzureMLBatchExecutionActivityTypeProperties (required)

AzureMLBatchExecutionActivityTypeProperties

Name Description Value
globalParameters Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesGlobalParameters
webServiceInputs Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesWebServiceInputs
webServiceOutputs Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. AzureMLBatchExecutionActivityTypePropertiesWebServiceOutputs

AzureMLBatchExecutionActivityTypePropertiesGlobalParameters

Name Description Value

AzureMLBatchExecutionActivityTypePropertiesWebServiceInputs

Name Description Value

AzureMLBatchExecutionActivityTypePropertiesWebServiceOutputs

Name Description Value

AzureMLExecutePipelineActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLExecutePipeline' (required)
typeProperties Azure ML Execute Pipeline activity properties. AzureMLExecutePipelineActivityTypeProperties (required)

AzureMLExecutePipelineActivityTypeProperties

Name Description Value
continueOnStepFailure Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). any
dataPathAssignments Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with resultType object). any
experimentName Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). any
mlParentRunId The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). any
mlPipelineEndpointId ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). any
mlPipelineId ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). any
mlPipelineParameters Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). any
version Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). any

AzureMLUpdateResourceActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'AzureMLUpdateResource' (required)
typeProperties Azure ML Update Resource management activity properties. AzureMLUpdateResourceActivityTypeProperties (required)

AzureMLUpdateResourceActivityTypeProperties

Name Description Value
trainedModelFilePath The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). any (required)
trainedModelLinkedServiceName Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. LinkedServiceReference (required)
trainedModelName Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). any (required)

AzureMLWebServiceFile

Name Description Value
filePath The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). any (required)
linkedServiceName Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. LinkedServiceReference (required)

AzureMySqlSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureMySqlSink' (required)

AzureMySqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureMySqlSource' (required)

AzurePostgreSqlSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzurePostgreSqlSink' (required)

AzurePostgreSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzurePostgreSqlSource' (required)

AzureQueueSink

Name Description Value
type Copy sink type. 'AzureQueueSink' (required)

AzureSearchIndexSink

Name Description Value
type Copy sink type. 'AzureSearchIndexSink' (required)
writeBehavior Specify the write behavior when upserting documents into Azure Search Index. 'Merge'
'Upload'

AzureSqlSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureSqlSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into Azure SQL. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) any

AzureSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'AzureSqlSource' (required)

AzureTableSink

Name Description Value
azureTableDefaultPartitionKeyValue Azure Table default partition key value. Type: string (or Expression with resultType string). any
azureTableInsertType Azure Table insert type. Type: string (or Expression with resultType string). any
azureTablePartitionKeyName Azure Table partition key name. Type: string (or Expression with resultType string). any
azureTableRowKeyName Azure Table row key name. Type: string (or Expression with resultType string). any
type Copy sink type. 'AzureTableSink' (required)

AzureTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
azureTableSourceIgnoreTableNotFound Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). any
azureTableSourceQuery Azure Table source query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'AzureTableSource' (required)

BigDataPoolParametrizationReference

Name Description Value
referenceName Reference big data pool name. Type: string (or Expression with resultType string). any (required)
type Big data pool reference type. 'BigDataPoolReference' (required)

BinaryReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'BinaryReadSettings' (required)

BinaryReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

BinarySink

Name Description Value
storeSettings Binary store settings. StoreWriteSettings
type Copy sink type. 'BinarySink' (required)

BinarySource

Name Description Value
formatSettings Binary format settings. BinaryReadSettings
storeSettings Binary store settings. StoreReadSettings
type Copy source type. 'BinarySource' (required)

BlobSink

Name Description Value
blobWriterAddHeader Blob writer add header. Type: boolean (or Expression with resultType boolean). any
blobWriterDateTimeFormat Blob writer date time format. Type: string (or Expression with resultType string). any
blobWriterOverwriteFiles Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). any
copyBehavior The type of copy behavior for copy sink. any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Copy sink type. 'BlobSink' (required)

BlobSource

Name Description Value
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
skipHeaderLineCount Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). any
treatEmptyAsNull Treat empty as null. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'BlobSource' (required)

CassandraSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
consistencyLevel The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. 'ALL'
'EACH_QUORUM'
'LOCAL_ONE'
'LOCAL_QUORUM'
'LOCAL_SERIAL'
'ONE'
'QUORUM'
'SERIAL'
'THREE'
'TWO'
query Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CassandraSource' (required)

CommonDataServiceForAppsSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'CommonDataServiceForAppsSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

CommonDataServiceForAppsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'CommonDataServiceForAppsSource' (required)

CompressionReadSettings

Name Description Value
type Set to 'TarGZipReadSettings' for type TarGZipReadSettings. Set to 'TarReadSettings' for type TarReadSettings. Set to 'ZipDeflateReadSettings' for type ZipDeflateReadSettings. 'TarGZipReadSettings'
'TarReadSettings'
'ZipDeflateReadSettings' (required)

ConcurSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ConcurSource' (required)

ContinuationSettingsReference

Name Description Value
continuationTtlInMinutes Continuation TTL in minutes. any
customizedCheckpointKey Customized checkpoint key. any
idleCondition Idle condition. any

CopyActivity

Name Description Value
inputs List of inputs for the activity. DatasetReference[]
linkedServiceName Linked service reference. LinkedServiceReference
outputs List of outputs for the activity. DatasetReference[]
policy Activity policy. ActivityPolicy
type Type of activity. 'Copy' (required)
typeProperties Copy activity properties. CopyActivityTypeProperties (required)

CopyActivityLogSettings

Name Description Value
enableReliableLogging Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). any
logLevel Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). any

CopyActivityTypeProperties

Name Description Value
dataIntegrationUnits Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. any
enableSkipIncompatibleRow Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). any
enableStaging Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). any
logSettings Log settings customer needs provide when enabling log. LogSettings
logStorageSettings (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. LogStorageSettings
parallelCopies Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. any
preserve Preserve rules. any[]
preserveRules Preserve Rules. any[]
redirectIncompatibleRowSettings Redirect incompatible row settings when EnableSkipIncompatibleRow is true. RedirectIncompatibleRowSettings
sink Copy activity sink. CopySink (required)
skipErrorFile Specify the fault tolerance for data consistency. SkipErrorFile
source Copy activity source. CopySource (required)
stagingSettings Specifies interim staging settings when EnableStaging is true. StagingSettings
translator Copy activity translator. If not specified, tabular translator is used. any
validateDataConsistency Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). any

CopySink

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). any
sinkRetryCount Sink retry count. Type: integer (or Expression with resultType integer). any
sinkRetryWait Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Set to 'AvroSink' for type AvroSink. Set to 'AzureBlobFSSink' for type AzureBlobFSSink. Set to 'AzureDataExplorerSink' for type AzureDataExplorerSink. Set to 'AzureDataLakeStoreSink' for type AzureDataLakeStoreSink. Set to 'AzureDatabricksDeltaLakeSink' for type AzureDatabricksDeltaLakeSink. Set to 'AzureMySqlSink' for type AzureMySqlSink. Set to 'AzurePostgreSqlSink' for type AzurePostgreSqlSink. Set to 'AzureQueueSink' for type AzureQueueSink. Set to 'AzureSearchIndexSink' for type AzureSearchIndexSink. Set to 'AzureSqlSink' for type AzureSqlSink. Set to 'AzureTableSink' for type AzureTableSink. Set to 'BinarySink' for type BinarySink. Set to 'BlobSink' for type BlobSink. Set to 'CommonDataServiceForAppsSink' for type CommonDataServiceForAppsSink. Set to 'CosmosDbMongoDbApiSink' for type CosmosDbMongoDbApiSink. Set to 'CosmosDbSqlApiSink' for type CosmosDbSqlApiSink. Set to 'DelimitedTextSink' for type DelimitedTextSink. Set to 'DocumentDbCollectionSink' for type DocumentDbCollectionSink. Set to 'DynamicsCrmSink' for type DynamicsCrmSink. Set to 'DynamicsSink' for type DynamicsSink. Set to 'FileSystemSink' for type FileSystemSink. Set to 'IcebergSink' for type IcebergSink. Set to 'InformixSink' for type InformixSink. Set to 'JsonSink' for type JsonSink. Set to 'LakeHouseTableSink' for type LakeHouseTableSink. Set to 'MicrosoftAccessSink' for type MicrosoftAccessSink. Set to 'MongoDbAtlasSink' for type MongoDbAtlasSink. Set to 'MongoDbV2Sink' for type MongoDbV2Sink. Set to 'OdbcSink' for type OdbcSink. Set to 'OracleSink' for type OracleSink. Set to 'OrcSink' for type OrcSink. Set to 'ParquetSink' for type ParquetSink. Set to 'RestSink' for type RestSink. Set to 'SalesforceServiceCloudSink' for type SalesforceServiceCloudSink. Set to 'SalesforceServiceCloudV2Sink' for type SalesforceServiceCloudV2Sink. Set to 'SalesforceSink' for type SalesforceSink. Set to 'SalesforceV2Sink' for type SalesforceV2Sink. Set to 'SapCloudForCustomerSink' for type SapCloudForCustomerSink. Set to 'SnowflakeSink' for type SnowflakeSink. Set to 'SnowflakeV2Sink' for type SnowflakeV2Sink. Set to 'SqlDWSink' for type SqlDWSink. Set to 'SqlMISink' for type SqlMISink. Set to 'SqlServerSink' for type SqlServerSink. Set to 'SqlSink' for type SqlSink. Set to 'WarehouseSink' for type WarehouseSink. 'AvroSink'
'AzureBlobFSSink'
'AzureDatabricksDeltaLakeSink'
'AzureDataExplorerSink'
'AzureDataLakeStoreSink'
'AzureMySqlSink'
'AzurePostgreSqlSink'
'AzureQueueSink'
'AzureSearchIndexSink'
'AzureSqlSink'
'AzureTableSink'
'BinarySink'
'BlobSink'
'CommonDataServiceForAppsSink'
'CosmosDbMongoDbApiSink'
'CosmosDbSqlApiSink'
'DelimitedTextSink'
'DocumentDbCollectionSink'
'DynamicsCrmSink'
'DynamicsSink'
'FileSystemSink'
'IcebergSink'
'InformixSink'
'JsonSink'
'LakeHouseTableSink'
'MicrosoftAccessSink'
'MongoDbAtlasSink'
'MongoDbV2Sink'
'OdbcSink'
'OracleSink'
'OrcSink'
'ParquetSink'
'RestSink'
'SalesforceServiceCloudSink'
'SalesforceServiceCloudV2Sink'
'SalesforceSink'
'SalesforceV2Sink'
'SapCloudForCustomerSink'
'SnowflakeSink'
'SnowflakeV2Sink'
'SqlDWSink'
'SqlMISink'
'SqlServerSink'
'SqlSink'
'WarehouseSink' (required)
writeBatchSize Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. any
writeBatchTimeout Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

CopySource

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
sourceRetryCount Source retry count. Type: integer (or Expression with resultType integer). any
sourceRetryWait Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Set to 'AmazonMWSSource' for type AmazonMWSSource. Set to 'AmazonRdsForOracleSource' for type AmazonRdsForOracleSource. Set to 'AmazonRdsForSqlServerSource' for type AmazonRdsForSqlServerSource. Set to 'AmazonRedshiftSource' for type AmazonRedshiftSource. Set to 'AvroSource' for type AvroSource. Set to 'AzureBlobFSSource' for type AzureBlobFSSource. Set to 'AzureDataExplorerSource' for type AzureDataExplorerSource. Set to 'AzureDataLakeStoreSource' for type AzureDataLakeStoreSource. Set to 'AzureDatabricksDeltaLakeSource' for type AzureDatabricksDeltaLakeSource. Set to 'AzureMariaDBSource' for type AzureMariaDBSource. Set to 'AzureMySqlSource' for type AzureMySqlSource. Set to 'AzurePostgreSqlSource' for type AzurePostgreSqlSource. Set to 'AzureSqlSource' for type AzureSqlSource. Set to 'AzureTableSource' for type AzureTableSource. Set to 'BinarySource' for type BinarySource. Set to 'BlobSource' for type BlobSource. Set to 'CassandraSource' for type CassandraSource. Set to 'CommonDataServiceForAppsSource' for type CommonDataServiceForAppsSource. Set to 'ConcurSource' for type ConcurSource. Set to 'CosmosDbMongoDbApiSource' for type CosmosDbMongoDbApiSource. Set to 'CosmosDbSqlApiSource' for type CosmosDbSqlApiSource. Set to 'CouchbaseSource' for type CouchbaseSource. Set to 'Db2Source' for type Db2Source. Set to 'DelimitedTextSource' for type DelimitedTextSource. Set to 'DocumentDbCollectionSource' for type DocumentDbCollectionSource. Set to 'DrillSource' for type DrillSource. Set to 'DynamicsAXSource' for type DynamicsAXSource. Set to 'DynamicsCrmSource' for type DynamicsCrmSource. Set to 'DynamicsSource' for type DynamicsSource. Set to 'EloquaSource' for type EloquaSource. Set to 'ExcelSource' for type ExcelSource. Set to 'FileSystemSource' for type FileSystemSource. Set to 'GoogleAdWordsSource' for type GoogleAdWordsSource. Set to 'GoogleBigQuerySource' for type GoogleBigQuerySource. Set to 'GoogleBigQueryV2Source' for type GoogleBigQueryV2Source. Set to 'GreenplumSource' for type GreenplumSource. Set to 'HBaseSource' for type HBaseSource. Set to 'HdfsSource' for type HdfsSource. Set to 'HiveSource' for type HiveSource. Set to 'HttpSource' for type HttpSource. Set to 'HubspotSource' for type HubspotSource. Set to 'ImpalaSource' for type ImpalaSource. Set to 'InformixSource' for type InformixSource. Set to 'JiraSource' for type JiraSource. Set to 'JsonSource' for type JsonSource. Set to 'LakeHouseTableSource' for type LakeHouseTableSource. Set to 'MagentoSource' for type MagentoSource. Set to 'MariaDBSource' for type MariaDBSource. Set to 'MarketoSource' for type MarketoSource. Set to 'MicrosoftAccessSource' for type MicrosoftAccessSource. Set to 'MongoDbAtlasSource' for type MongoDbAtlasSource. Set to 'MongoDbSource' for type MongoDbSource. Set to 'MongoDbV2Source' for type MongoDbV2Source. Set to 'MySqlSource' for type MySqlSource. Set to 'NetezzaSource' for type NetezzaSource. Set to 'ODataSource' for type ODataSource. Set to 'OdbcSource' for type OdbcSource. Set to 'Office365Source' for type Office365Source. Set to 'OracleServiceCloudSource' for type OracleServiceCloudSource. Set to 'OracleSource' for type OracleSource. Set to 'OrcSource' for type OrcSource. Set to 'ParquetSource' for type ParquetSource. Set to 'PaypalSource' for type PaypalSource. Set to 'PhoenixSource' for type PhoenixSource. Set to 'PostgreSqlSource' for type PostgreSqlSource. Set to 'PostgreSqlV2Source' for type PostgreSqlV2Source. Set to 'PrestoSource' for type PrestoSource. Set to 'QuickBooksSource' for type QuickBooksSource. Set to 'RelationalSource' for type RelationalSource. Set to 'ResponsysSource' for type ResponsysSource. Set to 'RestSource' for type RestSource. Set to 'SalesforceMarketingCloudSource' for type SalesforceMarketingCloudSource. Set to 'SalesforceServiceCloudSource' for type SalesforceServiceCloudSource. Set to 'SalesforceServiceCloudV2Source' for type SalesforceServiceCloudV2Source. Set to 'SalesforceSource' for type SalesforceSource. Set to 'SalesforceV2Source' for type SalesforceV2Source. Set to 'SapBwSource' for type SapBwSource. Set to 'SapCloudForCustomerSource' for type SapCloudForCustomerSource. Set to 'SapEccSource' for type SapEccSource. Set to 'SapHanaSource' for type SapHanaSource. Set to 'SapOdpSource' for type SapOdpSource. Set to 'SapOpenHubSource' for type SapOpenHubSource. Set to 'SapTableSource' for type SapTableSource. Set to 'ServiceNowSource' for type ServiceNowSource. Set to 'ServiceNowV2Source' for type ServiceNowV2Source. Set to 'SharePointOnlineListSource' for type SharePointOnlineListSource. Set to 'ShopifySource' for type ShopifySource. Set to 'SnowflakeSource' for type SnowflakeSource. Set to 'SnowflakeV2Source' for type SnowflakeV2Source. Set to 'SparkSource' for type SparkSource. Set to 'SqlDWSource' for type SqlDWSource. Set to 'SqlMISource' for type SqlMISource. Set to 'SqlServerSource' for type SqlServerSource. Set to 'SqlSource' for type SqlSource. Set to 'SquareSource' for type SquareSource. Set to 'SybaseSource' for type SybaseSource. Set to 'TeradataSource' for type TeradataSource. Set to 'VerticaSource' for type VerticaSource. Set to 'WarehouseSource' for type WarehouseSource. Set to 'XeroSource' for type XeroSource. Set to 'ZohoSource' for type ZohoSource. Set to 'WebSource' for type WebSource. Set to 'XmlSource' for type XmlSource. 'AmazonMWSSource'
'AmazonRdsForOracleSource'
'AmazonRdsForSqlServerSource'
'AmazonRedshiftSource'
'AvroSource'
'AzureBlobFSSource'
'AzureDatabricksDeltaLakeSource'
'AzureDataExplorerSource'
'AzureDataLakeStoreSource'
'AzureMariaDBSource'
'AzureMySqlSource'
'AzurePostgreSqlSource'
'AzureSqlSource'
'AzureTableSource'
'BinarySource'
'BlobSource'
'CassandraSource'
'CommonDataServiceForAppsSource'
'ConcurSource'
'CosmosDbMongoDbApiSource'
'CosmosDbSqlApiSource'
'CouchbaseSource'
'Db2Source'
'DelimitedTextSource'
'DocumentDbCollectionSource'
'DrillSource'
'DynamicsAXSource'
'DynamicsCrmSource'
'DynamicsSource'
'EloquaSource'
'ExcelSource'
'FileSystemSource'
'GoogleAdWordsSource'
'GoogleBigQuerySource'
'GoogleBigQueryV2Source'
'GreenplumSource'
'HBaseSource'
'HdfsSource'
'HiveSource'
'HttpSource'
'HubspotSource'
'ImpalaSource'
'InformixSource'
'JiraSource'
'JsonSource'
'LakeHouseTableSource'
'MagentoSource'
'MariaDBSource'
'MarketoSource'
'MicrosoftAccessSource'
'MongoDbAtlasSource'
'MongoDbSource'
'MongoDbV2Source'
'MySqlSource'
'NetezzaSource'
'ODataSource'
'OdbcSource'
'Office365Source'
'OracleServiceCloudSource'
'OracleSource'
'OrcSource'
'ParquetSource'
'PaypalSource'
'PhoenixSource'
'PostgreSqlSource'
'PostgreSqlV2Source'
'PrestoSource'
'QuickBooksSource'
'RelationalSource'
'ResponsysSource'
'RestSource'
'SalesforceMarketingCloudSource'
'SalesforceServiceCloudSource'
'SalesforceServiceCloudV2Source'
'SalesforceSource'
'SalesforceV2Source'
'SapBwSource'
'SapCloudForCustomerSource'
'SapEccSource'
'SapHanaSource'
'SapOdpSource'
'SapOpenHubSource'
'SapTableSource'
'ServiceNowSource'
'ServiceNowV2Source'
'SharePointOnlineListSource'
'ShopifySource'
'SnowflakeSource'
'SnowflakeV2Source'
'SparkSource'
'SqlDWSource'
'SqlMISource'
'SqlServerSource'
'SqlSource'
'SquareSource'
'SybaseSource'
'TeradataSource'
'VerticaSource'
'WarehouseSource'
'WebSource'
'XeroSource'
'XmlSource'
'ZohoSource' (required)

CosmosDbMongoDbApiSink

Name Description Value
type Copy sink type. 'CosmosDbMongoDbApiSink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

CosmosDbMongoDbApiSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query. MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CosmosDbMongoDbApiSource' (required)

CosmosDbSqlApiSink

Name Description Value
type Copy sink type. 'CosmosDbSqlApiSink' (required)
writeBehavior Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. any

CosmosDbSqlApiSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
detectDatetime Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). any
pageSize Page size of the result. Type: integer (or Expression with resultType integer). any
preferredRegions Preferred regions. Type: array of strings (or Expression with resultType array of strings). any
query SQL API query. Type: string (or Expression with resultType string). any
type Copy source type. 'CosmosDbSqlApiSource' (required)

CouchbaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'CouchbaseSource' (required)

CredentialReference

Name Description Value
referenceName Reference credential name. string (required)
type Credential reference type. 'CredentialReference' (required)

CustomActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Custom' (required)
typeProperties Custom activity properties. CustomActivityTypeProperties (required)

CustomActivityReferenceObject

Name Description Value
datasets Dataset references. DatasetReference[]
linkedServices Linked service references. LinkedServiceReference[]

CustomActivityTypeProperties

Name Description Value
autoUserSpecification Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). any
command Command for custom activity Type: string (or Expression with resultType string). any (required)
extendedProperties User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. CustomActivityTypePropertiesExtendedProperties
folderPath Folder path for resource files Type: string (or Expression with resultType string). any
referenceObjects Reference objects CustomActivityReferenceObject
resourceLinkedService Resource linked service reference. LinkedServiceReference
retentionTimeInDays The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). any

CustomActivityTypePropertiesExtendedProperties

Name Description Value

DatabricksNotebookActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksNotebook' (required)
typeProperties Databricks Notebook activity properties. DatabricksNotebookActivityTypeProperties (required)

DatabricksNotebookActivityTypeProperties

Name Description Value
baseParameters Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. DatabricksNotebookActivityTypePropertiesBaseParameters
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksNotebookActivityTypePropertiesLibrariesItem[]
notebookPath The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). any (required)

DatabricksNotebookActivityTypePropertiesBaseParameters

Name Description Value

DatabricksNotebookActivityTypePropertiesLibrariesItem

Name Description Value

DatabricksSparkJarActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksSparkJar' (required)
typeProperties Databricks SparkJar activity properties. DatabricksSparkJarActivityTypeProperties (required)

DatabricksSparkJarActivityTypeProperties

Name Description Value
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksSparkJarActivityTypePropertiesLibrariesItem[]
mainClassName The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). any (required)
parameters Parameters that will be passed to the main method. any[]

DatabricksSparkJarActivityTypePropertiesLibrariesItem

Name Description Value

DatabricksSparkPythonActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DatabricksSparkPython' (required)
typeProperties Databricks SparkPython activity properties. DatabricksSparkPythonActivityTypeProperties (required)

DatabricksSparkPythonActivityTypeProperties

Name Description Value
libraries A list of libraries to be installed on the cluster that will execute the job. DatabricksSparkPythonActivityTypePropertiesLibrariesItem[]
parameters Command line parameters that will be passed to the Python file. any[]
pythonFile The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). any (required)

DatabricksSparkPythonActivityTypePropertiesLibrariesItem

Name Description Value

DataFlowReference

Name Description Value
datasetParameters Reference data flow parameters from dataset. any
parameters Data flow parameters ParameterValueSpecification
referenceName Reference data flow name. string (required)
type Data flow reference type. 'DataFlowReference' (required)

DataFlowStagingInfo

Name Description Value
folderPath Folder path for staging blob. Type: string (or Expression with resultType string) any
linkedService Staging linked service reference. LinkedServiceReference

DataLakeAnalyticsUsqlActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'DataLakeAnalyticsU-SQL' (required)
typeProperties Data Lake Analytics U-SQL activity properties. DataLakeAnalyticsUsqlActivityTypeProperties (required)

DataLakeAnalyticsUsqlActivityTypeProperties

Name Description Value
compilationMode Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). any
degreeOfParallelism The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. any
parameters Parameters for U-SQL job request. DataLakeAnalyticsUsqlActivityTypePropertiesParameters
priority Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. any
runtimeVersion Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). any
scriptLinkedService Script linked service reference. LinkedServiceReference (required)
scriptPath Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). any (required)

DataLakeAnalyticsUsqlActivityTypePropertiesParameters

Name Description Value

DatasetReference

Name Description Value
parameters Arguments for dataset. ParameterValueSpecification
referenceName Reference dataset name. string (required)
type Dataset reference type. 'DatasetReference' (required)

Db2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'Db2Source' (required)

DeleteActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Delete' (required)
typeProperties Delete activity properties. DeleteActivityTypeProperties (required)

DeleteActivityTypeProperties

Name Description Value
dataset Delete activity dataset reference. DatasetReference (required)
enableLogging Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). any
logStorageSettings Log storage settings customer need to provide when enableLogging is true. LogStorageSettings
maxConcurrentConnections The max concurrent connections to connect data source at the same time. int

Constraints:
Min value = 1
recursive If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). any
storeSettings Delete activity store settings. StoreReadSettings

DelimitedTextReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
skipLineCount Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). any
type The read setting type. 'DelimitedTextReadSettings' (required)

DelimitedTextReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
skipLineCount Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). any
type The read setting type. string (required)

DelimitedTextSink

Name Description Value
formatSettings DelimitedText format settings. DelimitedTextWriteSettings
storeSettings DelimitedText store settings. StoreWriteSettings
type Copy sink type. 'DelimitedTextSink' (required)

DelimitedTextSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings DelimitedText format settings. DelimitedTextReadSettings
storeSettings DelimitedText store settings. StoreReadSettings
type Copy source type. 'DelimitedTextSource' (required)

DelimitedTextWriteSettings

Name Description Value
fileExtension The file extension used to create the files. Type: string (or Expression with resultType string). any (required)
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
quoteAllText Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). any
type The write setting type. string (required)

DistcpSettings

Name Description Value
distcpOptions Specifies the Distcp options. Type: string (or Expression with resultType string). any
resourceManagerEndpoint Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). any (required)
tempScriptPath Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). any (required)

DocumentDbCollectionSink

Name Description Value
nestingSeparator Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). any
type Copy sink type. 'DocumentDbCollectionSink' (required)
writeBehavior Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. any

DocumentDbCollectionSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
nestingSeparator Nested properties separator. Type: string (or Expression with resultType string). any
query Documents query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DocumentDbCollectionSource' (required)

DrillSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DrillSource' (required)

DWCopyCommandDefaultValue

Name Description Value
columnName Column name. Type: object (or Expression with resultType string). any
defaultValue The default value of the column. Type: object (or Expression with resultType string). any

DWCopyCommandSettings

Name Description Value
additionalOptions Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } DWCopyCommandSettingsAdditionalOptions
defaultValues Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). DWCopyCommandDefaultValue[]

DWCopyCommandSettingsAdditionalOptions

Name Description Value

DynamicsAXSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'DynamicsAXSource' (required)

DynamicsCrmSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'DynamicsCrmSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

DynamicsCrmSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'DynamicsCrmSource' (required)

DynamicsSink

Name Description Value
alternateKeyName The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'DynamicsSink' (required)
writeBehavior The write behavior for the operation. 'Upsert' (required)

DynamicsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). any
type Copy source type. 'DynamicsSource' (required)

EloquaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'EloquaSource' (required)

ExcelSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings Excel store settings. StoreReadSettings
type Copy source type. 'ExcelSource' (required)

ExecuteDataFlowActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteDataFlow' (required)
typeProperties Execute data flow activity properties. ExecuteDataFlowActivityTypeProperties (required)

ExecuteDataFlowActivityTypeProperties

Name Description Value
compute Compute properties for data flow activity. ExecuteDataFlowActivityTypePropertiesCompute
continuationSettings Continuation settings for execute data flow activity. ContinuationSettingsReference
continueOnError Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) any
dataFlow Data flow reference. DataFlowReference (required)
integrationRuntime The integration runtime reference. IntegrationRuntimeReference
runConcurrently Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) any
sourceStagingConcurrency Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) any
staging Staging info for execute data flow activity. DataFlowStagingInfo
traceLevel Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) any

ExecuteDataFlowActivityTypePropertiesCompute

Name Description Value
computeType Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string) any
coreCount Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer) any

ExecutePipelineActivity

Name Description Value
policy Execute pipeline activity policy. ExecutePipelineActivityPolicy
type Type of activity. 'ExecutePipeline' (required)
typeProperties Execute pipeline activity properties. ExecutePipelineActivityTypeProperties (required)

ExecutePipelineActivityPolicy

Name Description Value
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool

ExecutePipelineActivityTypeProperties

Name Description Value
parameters Pipeline parameters. ParameterValueSpecification
pipeline Pipeline reference. PipelineReference (required)
waitOnCompletion Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. bool

ExecutePowerQueryActivityTypeProperties

Name Description Value
compute Compute properties for data flow activity. ExecuteDataFlowActivityTypePropertiesCompute
continuationSettings Continuation settings for execute data flow activity. ContinuationSettingsReference
continueOnError Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) any
dataFlow Data flow reference. DataFlowReference (required)
integrationRuntime The integration runtime reference. IntegrationRuntimeReference
queries List of mapping for Power Query mashup query to sink dataset(s). PowerQuerySinkMapping[]
runConcurrently Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) any
sinks (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. ExecutePowerQueryActivityTypePropertiesSinks
sourceStagingConcurrency Specify number of parallel staging for sources applicable to the sink. Type: integer (or Expression with resultType integer) any
staging Staging info for execute data flow activity. DataFlowStagingInfo
traceLevel Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) any

ExecutePowerQueryActivityTypePropertiesSinks

Name Description Value

ExecuteSsisPackageActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteSSISPackage' (required)
typeProperties Execute SSIS package activity properties. ExecuteSsisPackageActivityTypeProperties (required)

ExecuteSsisPackageActivityTypeProperties

Name Description Value
connectVia The integration runtime reference. IntegrationRuntimeReference (required)
environmentPath The environment path to execute the SSIS package. Type: string (or Expression with resultType string). any
executionCredential The package execution credential. SsisExecutionCredential
loggingLevel The logging level of SSIS package execution. Type: string (or Expression with resultType string). any
logLocation SSIS package execution log location. SsisLogLocation
packageConnectionManagers The package level connection managers to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPackageConnectionManagers
packageLocation SSIS package location. SsisPackageLocation (required)
packageParameters The package level parameters to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPackageParameters
projectConnectionManagers The project level connection managers to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesProjectConnectionManagers
projectParameters The project level parameters to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesProjectParameters
propertyOverrides The property overrides to execute the SSIS package. ExecuteSsisPackageActivityTypePropertiesPropertyOverrides
runtime Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). any

ExecuteSsisPackageActivityTypePropertiesPackageConnectionManagers

Name Description Value

ExecuteSsisPackageActivityTypePropertiesPackageParameters

Name Description Value

ExecuteSsisPackageActivityTypePropertiesProjectConnectionManagers

Name Description Value

ExecuteSsisPackageActivityTypePropertiesProjectParameters

Name Description Value

ExecuteSsisPackageActivityTypePropertiesPropertyOverrides

Name Description Value

ExecuteWranglingDataflowActivity

Name Description Value
policy Activity policy. ActivityPolicy
type Type of activity. 'ExecuteWranglingDataflow' (required)
typeProperties Execute power query activity properties. ExecutePowerQueryActivityTypeProperties (required)

Expression

Name Description Value
type Expression type. 'Expression' (required)
value Expression value. string (required)

ExpressionV2

Name Description Value
operands List of nested expressions. ExpressionV2[]
operators Expression operator value Type: list of strings. string[]
type Type of expressions supported by the system. Type: string. 'Binary'
'Constant'
'Field'
'NAry'
'Unary'
value Value for Constant/Field Type: string. string

FailActivity

Name Description Value
type Type of activity. 'Fail' (required)
typeProperties Fail activity properties. FailActivityTypeProperties (required)

FailActivityTypeProperties

Name Description Value
errorCode The error code that categorizes the error type of the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). any (required)
message The error message that surfaced in the Fail activity. It can be dynamic content that's evaluated to a non empty/blank string at runtime. Type: string (or Expression with resultType string). any (required)

FileServerReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileFilter Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'FileServerReadSettings' (required)
wildcardFileName FileServer wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath FileServer wildcardFolderPath. Type: string (or Expression with resultType string). any

FileServerWriteSettings

Name Description Value
type The write setting type. 'FileServerWriteSettings' (required)

FileSystemSink

Name Description Value
copyBehavior The type of copy behavior for copy sink. any
type Copy sink type. 'FileSystemSink' (required)

FileSystemSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'FileSystemSource' (required)

FilterActivity

Name Description Value
type Type of activity. 'Filter' (required)
typeProperties Filter activity properties. FilterActivityTypeProperties (required)

FilterActivityTypeProperties

Name Description Value
condition Condition to be used for filtering the input. Expression (required)
items Input array on which filter should be applied. Expression (required)

ForEachActivity

Name Description Value
type Type of activity. 'ForEach' (required)
typeProperties ForEach activity properties. ForEachActivityTypeProperties (required)

ForEachActivityTypeProperties

Name Description Value
activities List of activities to execute . Activity[] (required)
batchCount Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). int

Constraints:
Max value = 50
isSequential Should the loop be executed in sequence or in parallel (max 50) bool
items Collection to iterate. Expression (required)

FormatReadSettings

Name Description Value
type Set to 'BinaryReadSettings' for type BinaryReadSettings. Set to 'DelimitedTextReadSettings' for type DelimitedTextReadSettings. Set to 'JsonReadSettings' for type JsonReadSettings. Set to 'ParquetReadSettings' for type ParquetReadSettings. Set to 'XmlReadSettings' for type XmlReadSettings. 'BinaryReadSettings'
'DelimitedTextReadSettings'
'JsonReadSettings'
'ParquetReadSettings'
'XmlReadSettings' (required)

FtpReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
disableChunking If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'FtpReadSettings' (required)
useBinaryTransfer Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType boolean). any
wildcardFileName Ftp wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Ftp wildcardFolderPath. Type: string (or Expression with resultType string). any

GetMetadataActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'GetMetadata' (required)
typeProperties GetMetadata activity properties. GetMetadataActivityTypeProperties (required)

GetMetadataActivityTypeProperties

Name Description Value
dataset GetMetadata activity dataset reference. DatasetReference (required)
fieldList Fields of metadata to get from dataset. any[]
formatSettings GetMetadata activity format settings. FormatReadSettings
storeSettings GetMetadata activity store settings. StoreReadSettings

GoogleAdWordsSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleAdWordsSource' (required)

GoogleBigQuerySource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleBigQuerySource' (required)

GoogleBigQueryV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GoogleBigQueryV2Source' (required)

GoogleCloudStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'GoogleCloudStorageReadSettings' (required)
wildcardFileName Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

GreenplumSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'GreenplumSource' (required)

HBaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HBaseSource' (required)

HdfsReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
distcpSettings Specifies Distcp-related settings. DistcpSettings
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'HdfsReadSettings' (required)
wildcardFileName HDFS wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath HDFS wildcardFolderPath. Type: string (or Expression with resultType string). any

HdfsSource

Name Description Value
distcpSettings Specifies Distcp-related settings. DistcpSettings
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type Copy source type. 'HdfsSource' (required)

HDInsightHiveActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightHive' (required)
typeProperties HDInsight Hive activity properties. HDInsightHiveActivityTypeProperties (required)

HDInsightHiveActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
defines Allows user to specify defines for Hive job request. HDInsightHiveActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
queryTimeout Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) int
scriptLinkedService Script linked service reference. LinkedServiceReference
scriptPath Script path. Type: string (or Expression with resultType string). any
storageLinkedServices Storage linked service references. LinkedServiceReference[]
variables User specified arguments under hivevar namespace. HDInsightHiveActivityTypePropertiesVariables

HDInsightHiveActivityTypePropertiesDefines

Name Description Value

HDInsightHiveActivityTypePropertiesVariables

Name Description Value

HDInsightMapReduceActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightMapReduce' (required)
typeProperties HDInsight MapReduce activity properties. HDInsightMapReduceActivityTypeProperties (required)

HDInsightMapReduceActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
className Class name. Type: string (or Expression with resultType string). any (required)
defines Allows user to specify defines for the MapReduce job request. HDInsightMapReduceActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
jarFilePath Jar path. Type: string (or Expression with resultType string). any (required)
jarLibs Jar libs. any[]
jarLinkedService Jar linked service reference. LinkedServiceReference
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightMapReduceActivityTypePropertiesDefines

Name Description Value

HDInsightPigActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightPig' (required)
typeProperties HDInsight Pig activity properties. HDInsightPigActivityTypeProperties (required)

HDInsightPigActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). any
defines Allows user to specify defines for Pig job request. HDInsightPigActivityTypePropertiesDefines
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
scriptLinkedService Script linked service reference. LinkedServiceReference
scriptPath Script path. Type: string (or Expression with resultType string). any
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightPigActivityTypePropertiesDefines

Name Description Value

HDInsightSparkActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightSpark' (required)
typeProperties HDInsight spark activity properties. HDInsightSparkActivityTypeProperties (required)

HDInsightSparkActivityTypeProperties

Name Description Value
arguments The user-specified arguments to HDInsightSparkActivity. any[]
className The application's Java/Spark main class. string
entryFilePath The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). any (required)
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
proxyUser The user to impersonate that will execute the job. Type: string (or Expression with resultType string). any
rootPath The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). any (required)
sparkConfig Spark configuration property. HDInsightSparkActivityTypePropertiesSparkConfig
sparkJobLinkedService The storage linked service for uploading the entry file and dependencies, and for receiving logs. LinkedServiceReference

HDInsightSparkActivityTypePropertiesSparkConfig

Name Description Value

HDInsightStreamingActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'HDInsightStreaming' (required)
typeProperties HDInsight streaming activity properties. HDInsightStreamingActivityTypeProperties (required)

HDInsightStreamingActivityTypeProperties

Name Description Value
arguments User specified arguments to HDInsightActivity. any[]
combiner Combiner executable name. Type: string (or Expression with resultType string). any
commandEnvironment Command line environment values. any[]
defines Allows user to specify defines for streaming job request. HDInsightStreamingActivityTypePropertiesDefines
fileLinkedService Linked service reference where the files are located. LinkedServiceReference
filePaths Paths to streaming job files. Can be directories. any[] (required)
getDebugInfo Debug info option. 'Always'
'Failure'
'None'
input Input blob path. Type: string (or Expression with resultType string). any (required)
mapper Mapper executable name. Type: string (or Expression with resultType string). any (required)
output Output blob path. Type: string (or Expression with resultType string). any (required)
reducer Reducer executable name. Type: string (or Expression with resultType string). any (required)
storageLinkedServices Storage linked service references. LinkedServiceReference[]

HDInsightStreamingActivityTypePropertiesDefines

Name Description Value

HiveSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HiveSource' (required)

HttpReadSettings

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). any
requestBody The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). any
requestMethod The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). any
requestTimeout Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with resultType string). any
type The read setting type. 'HttpReadSettings' (required)

HttpSource

Name Description Value
httpRequestTimeout Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HttpSource' (required)

HubspotSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'HubspotSource' (required)

IcebergSink

Name Description Value
formatSettings Iceberg format settings. IcebergWriteSettings
storeSettings Iceberg store settings. StoreWriteSettings
type Copy sink type. 'IcebergSink' (required)

IcebergWriteSettings

Name Description Value
type The write setting type. string (required)

IfConditionActivity

Name Description Value
type Type of activity. 'IfCondition' (required)
typeProperties IfCondition activity properties. IfConditionActivityTypeProperties (required)

IfConditionActivityTypeProperties

Name Description Value
expression An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. Expression (required)
ifFalseActivities List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. Activity[]
ifTrueActivities List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. Activity[]

ImpalaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ImpalaSource' (required)

InformixSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'InformixSink' (required)

InformixSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'InformixSource' (required)

IntegrationRuntimeReference

Name Description Value
parameters Arguments for integration runtime. ParameterValueSpecification
referenceName Reference integration runtime name. string (required)
type Type of integration runtime. 'IntegrationRuntimeReference' (required)

JiraSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'JiraSource' (required)

JsonReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'JsonReadSettings' (required)

JsonReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

JsonSink

Name Description Value
formatSettings Json format settings. JsonWriteSettings
storeSettings Json store settings. StoreWriteSettings
type Copy sink type. 'JsonSink' (required)

JsonSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Json format settings. JsonReadSettings
storeSettings Json store settings. StoreReadSettings
type Copy source type. 'JsonSource' (required)

JsonWriteSettings

Name Description Value
filePattern File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. any
type The write setting type. string (required)

LakeHouseReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'LakeHouseReadSettings' (required)
wildcardFileName Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string (or Expression with resultType string). any

LakeHouseTableSink

Name Description Value
partitionNameList Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType array of objects). any
partitionOption Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be a new partition. Possible values include: "None", "PartitionByKey". any
tableActionOption The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". any
type Copy sink type. 'LakeHouseTableSink' (required)

LakeHouseTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
timestampAsOf Query an older snapshot by timestamp. Type: string (or Expression with resultType string). any
type Copy source type. 'LakeHouseTableSource' (required)
versionAsOf Query an older snapshot by version. Type: integer (or Expression with resultType integer). any

LakeHouseWriteSettings

Name Description Value
type The write setting type. 'LakeHouseWriteSettings' (required)

LinkedServiceReference

Name Description Value
parameters Arguments for LinkedService. ParameterValueSpecification
referenceName Reference LinkedService name. string (required)
type Linked service reference type. 'LinkedServiceReference' (required)

LogLocationSettings

Name Description Value
linkedServiceName Log storage linked service reference. LinkedServiceReference (required)
path The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). any

LogSettings

Name Description Value
copyActivityLogSettings Specifies settings for copy activity log. CopyActivityLogSettings
enableCopyActivityLog Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). any
logLocationSettings Log location settings customer needs to provide when enabling log. LogLocationSettings (required)

LogStorageSettings

Name Description Value
enableReliableLogging Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). any
linkedServiceName Log storage linked service reference. LinkedServiceReference (required)
logLevel Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). any
path The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). any

LookupActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Lookup' (required)
typeProperties Lookup activity properties. LookupActivityTypeProperties (required)

LookupActivityTypeProperties

Name Description Value
dataset Lookup activity dataset reference. DatasetReference (required)
firstRowOnly Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). any
source Dataset-specific source properties, same as copy activity source. CopySource (required)

MagentoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MagentoSource' (required)

MariaDBSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MariaDBSource' (required)

MarketoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MarketoSource' (required)

MetadataItem

Name Description Value
name Metadata item key name. Type: string (or Expression with resultType string). any
value Metadata item value. Type: string (or Expression with resultType string). any

Microsoft.DataFactory/factories/pipelines

Name Description Value
apiVersion The api version '2018-06-01'
name The resource name string

Constraints:
Min length = 1
Max length = 1
Pattern = ^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$ (required)
properties Properties of the pipeline. Pipeline (required)
type The resource type 'Microsoft.DataFactory/factories/pipelines'

MicrosoftAccessSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'MicrosoftAccessSink' (required)

MicrosoftAccessSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
type Copy source type. 'MicrosoftAccessSource' (required)

MongoDbAtlasSink

Name Description Value
type Copy sink type. 'MongoDbAtlasSink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbAtlasSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MongoDbAtlasSource' (required)

MongoDbCursorMethodsProperties

Name Description Value
limit Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). any
project Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). any
skip Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). any
sort Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). any
type Copy source type. 'MongoDbSource' (required)

MongoDbV2Sink

Name Description Value
type Copy sink type. 'MongoDbV2Sink' (required)
writeBehavior Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). any

MongoDbV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). any
cursorMethods Cursor methods for Mongodb query MongoDbCursorMethodsProperties
filter Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MongoDbV2Source' (required)

MySqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'MySqlSource' (required)

NetezzaPartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

NetezzaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
partitionOption The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". any
partitionSettings The settings that will be leveraged for Netezza source partitioning. NetezzaPartitionSettings
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'NetezzaSource' (required)

NotebookParameter

Name Description Value
type Notebook parameter type. 'bool'
'float'
'int'
'string'
value Notebook parameter value. Type: string (or Expression with resultType string). any

ODataSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
type Copy source type. 'ODataSource' (required)

OdbcSink

Name Description Value
preCopyScript A query to execute before starting the copy. Type: string (or Expression with resultType string). any
type Copy sink type. 'OdbcSink' (required)

OdbcSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OdbcSource' (required)

Office365Source

Name Description Value
allowedGroups The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). any
dateFilterColumn The Column to apply the <paramref name="StartTime"/> and <paramref name="EndTime"/>. Type: string (or Expression with resultType string). any
endTime End time of the requested range for this dataset. Type: string (or Expression with resultType string). any
outputColumns The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] any
startTime Start time of the requested range for this dataset. Type: string (or Expression with resultType string). any
type Copy source type. 'Office365Source' (required)
userScopeFilterUri The user scope uri. Type: string (or Expression with resultType string). any

OracleCloudStorageReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
prefix The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'OracleCloudStorageReadSettings' (required)
wildcardFileName Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). any

OraclePartitionSettings

Name Description Value
partitionColumnName The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionNames Names of the physical partitions of Oracle table. any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

OracleServiceCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OracleServiceCloudSource' (required)

OracleSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'OracleSink' (required)

OracleSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
oracleReaderQuery Oracle reader query. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Oracle source partitioning. OraclePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'OracleSource' (required)

OrcSink

Name Description Value
formatSettings ORC format settings. OrcWriteSettings
storeSettings ORC store settings. StoreWriteSettings
type Copy sink type. 'OrcSink' (required)

OrcSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
storeSettings ORC store settings. StoreReadSettings
type Copy source type. 'OrcSource' (required)

OrcWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
type The write setting type. string (required)

ParameterDefinitionSpecification

Name Description Value

ParameterSpecification

Name Description Value
defaultValue Default value of parameter. any
type Parameter type. 'Array'
'Bool'
'Float'
'Int'
'Object'
'SecureString'
'String' (required)

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParameterValueSpecification

Name Description Value

ParquetReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. 'ParquetReadSettings' (required)

ParquetReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
type The read setting type. string (required)

ParquetSink

Name Description Value
formatSettings Parquet format settings. ParquetWriteSettings
storeSettings Parquet store settings. StoreWriteSettings
type Copy sink type. 'ParquetSink' (required)

ParquetSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Parquet format settings. ParquetReadSettings
storeSettings Parquet store settings. StoreReadSettings
type Copy source type. 'ParquetSource' (required)

ParquetWriteSettings

Name Description Value
fileNamePrefix Specifies the file name pattern <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). any
maxRowsPerFile Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). any
type The write setting type. string (required)

PaypalSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PaypalSource' (required)

PhoenixSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PhoenixSource' (required)

Pipeline

Name Description Value
activities List of activities in pipeline. Activity[]
annotations List of tags that can be used for describing the Pipeline. any[]
concurrency The max number of concurrent runs for the pipeline. int

Constraints:
Min value = 1
description The description of the pipeline. string
folder The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. PipelineFolder
parameters List of parameters for pipeline. ParameterDefinitionSpecification
policy Pipeline Policy. PipelinePolicy
runDimensions Dimensions emitted by Pipeline. PipelineRunDimensions
variables List of variables for pipeline. VariableDefinitionSpecification

PipelineElapsedTimeMetricPolicy

Name Description Value
duration TimeSpan value, after which an Azure Monitoring Metric is fired. any

PipelineFolder

Name Description Value
name The name of the folder that this Pipeline is in. string

PipelinePolicy

Name Description Value
elapsedTimeMetric Pipeline ElapsedTime Metric Policy. PipelineElapsedTimeMetricPolicy

PipelineReference

Name Description Value
name Reference name. string
referenceName Reference pipeline name. string (required)
type Pipeline reference type. 'PipelineReference' (required)

PipelineRunDimensions

Name Description Value

PolybaseSettings

Name Description Value
rejectSampleValue Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. any
rejectType Reject type. 'percentage'
'value'
rejectValue Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. any
useTypeDefault Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). any

PostgreSqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PostgreSqlSource' (required)

PostgreSqlV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PostgreSqlV2Source' (required)

PowerQuerySink

Name Description Value
dataset Dataset reference. DatasetReference
description Transformation description. string
flowlet Flowlet Reference DataFlowReference
linkedService Linked service reference. LinkedServiceReference
name Transformation name. string (required)
rejectedDataLinkedService Rejected data linked service reference. LinkedServiceReference
schemaLinkedService Schema linked service reference. LinkedServiceReference
script sink script. string

PowerQuerySinkMapping

Name Description Value
dataflowSinks List of sinks mapped to Power Query mashup query. PowerQuerySink[]
queryName Name of the query in Power Query mashup document. string

PrestoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'PrestoSource' (required)

QuickBooksSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'QuickBooksSource' (required)

RedirectIncompatibleRowSettings

Name Description Value
linkedServiceName Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). any (required)
path The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). any

RedshiftUnloadSettings

Name Description Value
bucketName The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). any (required)
s3LinkedServiceName The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. LinkedServiceReference (required)

RelationalSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
type Copy source type. 'RelationalSource' (required)

ResponsysSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ResponsysSource' (required)

RestSink

Name Description Value
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string type). any
httpCompressionType Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string (or Expression with resultType string). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
requestInterval The time to await before sending next request, in milliseconds any
requestMethod The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). any
type Copy sink type. 'RestSink' (required)

RestSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). any
additionalHeaders The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
paginationRules The pagination rules to compose next page requests. Type: string (or Expression with resultType string). any
requestBody The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). any
requestInterval The time to await before sending next page request. any
requestMethod The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). any
type Copy source type. 'RestSource' (required)

SalesforceMarketingCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SalesforceMarketingCloudSource' (required)

SalesforceServiceCloudSink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceServiceCloudSink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceServiceCloudSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
readBehavior The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceServiceCloudSource' (required)

SalesforceServiceCloudV2Sink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceServiceCloudV2Sink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceServiceCloudV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
includeDeletedObjects This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). any
query You can only use Salesforce Object Query Language (SOQL) query with limitations. For SOQL limitations, see this article: https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/queries.htm#SOQL%20Considerations. If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset will be retrieved. Type: string (or Expression with resultType string). any
SOQLQuery Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceServiceCloudV2Source' (required)

SalesforceSink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceSink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
readBehavior The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceSource' (required)

SalesforceV2Sink

Name Description Value
externalIdFieldName The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). any
ignoreNullValues The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). any
type Copy sink type. 'SalesforceV2Sink' (required)
writeBehavior The write behavior for the operation. Default is Insert. 'Insert'
'Upsert'

SalesforceV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
includeDeletedObjects This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). any
pageSize Page size for each http request, too large pageSize will caused timeout, default 300,000. Type: integer (or Expression with resultType integer). any
query You can only use Salesforce Object Query Language (SOQL) query with limitations. For SOQL limitations, see this article: https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/queries.htm#SOQL%20Considerations. If query is not specified, all the data of the Salesforce object specified in ObjectApiName/reportId in dataset will be retrieved. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
SOQLQuery Deprecating, please use 'query' property instead. Type: string (or Expression with resultType string). any
type Copy source type. 'SalesforceV2Source' (required)

SapBwSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query MDX query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapBwSource' (required)

SapCloudForCustomerSink

Name Description Value
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy sink type. 'SapCloudForCustomerSink' (required)
writeBehavior The write behavior for the operation. Default is 'Insert'. 'Insert'
'Update'

SapCloudForCustomerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapCloudForCustomerSource' (required)

SapEccSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
httpRequestTimeout The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapEccSource' (required)

SapHanaPartitionSettings

Name Description Value
partitionColumnName The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

SapHanaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
packetSize The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). any
partitionOption The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". any
partitionSettings The settings that will be leveraged for SAP HANA source partitioning. SapHanaPartitionSettings
query SAP HANA Sql query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SapHanaSource' (required)

SapOdpSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
extractionMode The extraction mode. Allowed value include: Full, Delta and Recovery. The default value is Full. Type: string (or Expression with resultType string). any
projection Specifies the columns to be selected from source data. Type: array of objects(projection) (or Expression with resultType array of objects). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
selection Specifies the selection conditions from source data. Type: array of objects(selection) (or Expression with resultType array of objects). any
subscriberProcess The subscriber process to manage the delta process. Type: string (or Expression with resultType string). any
type Copy source type. 'SapOdpSource' (required)

SapOpenHubSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
baseRequestId The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). any
customRfcReadTableFunctionModule Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). any
excludeLastRequest Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sapDataColumnDelimiter The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). any
type Copy source type. 'SapOpenHubSource' (required)

SapTablePartitionSettings

Name Description Value
maxPartitionsNumber The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). any
partitionColumnName The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

SapTableSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
batchSize Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). any
customRfcReadTableFunctionModule Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". any
partitionSettings The settings that will be leveraged for SAP table source partitioning. SapTablePartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
rfcTableFields The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). any
rfcTableOptions The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). any
rowCount The number of rows to be retrieved. Type: integer(or Expression with resultType integer). any
rowSkips The number of rows that will be skipped. Type: integer (or Expression with resultType integer). any
sapDataColumnDelimiter The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). any
type Copy source type. 'SapTableSource' (required)

ScriptActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'Script' (required)
typeProperties Script activity properties. ScriptActivityTypeProperties (required)

ScriptActivityParameter

Name Description Value
direction The direction of the parameter. 'Input'
'InputOutput'
'Output'
name The name of the parameter. Type: string (or Expression with resultType string). any
size The size of the output direction parameter. int
type The type of the parameter. 'Boolean'
'DateTime'
'DateTimeOffset'
'Decimal'
'Double'
'Guid'
'Int16'
'Int32'
'Int64'
'Single'
'String'
'Timespan'
value The value of the parameter. Type: string (or Expression with resultType string). any

ScriptActivityScriptBlock

Name Description Value
parameters Array of script parameters. Type: array. ScriptActivityParameter[]
text The query text. Type: string (or Expression with resultType string). any (required)
type The type of the query. Please refer to the ScriptType for valid options. Type: string (or Expression with resultType string). any (required)

ScriptActivityTypeProperties

Name Description Value
logSettings Log settings of script activity. ScriptActivityTypePropertiesLogSettings
scriptBlockExecutionTimeout ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
scripts Array of script blocks. Type: array. ScriptActivityScriptBlock[]

ScriptActivityTypePropertiesLogSettings

Name Description Value
logDestination The destination of logs. Type: string. 'ActivityOutput'
'ExternalStore' (required)
logLocationSettings Log location settings customer needs to provide when enabling log. LogLocationSettings

SecretBase

Name Description Value
type Set to 'AzureKeyVaultSecret' for type AzureKeyVaultSecretReference. Set to 'SecureString' for type SecureString. 'AzureKeyVaultSecret'
'SecureString' (required)

SecureInputOutputPolicy

Name Description Value
secureInput When set to true, Input from activity is considered as secure and will not be logged to monitoring. bool
secureOutput When set to true, Output from activity is considered as secure and will not be logged to monitoring. bool

SecureString

Name Description Value
type Type of the secret. string (required)
value Value of secure string. string (required)

SecureString

Name Description Value
type Type of the secret. 'SecureString' (required)
value Value of secure string. string (required)

ServiceNowSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ServiceNowSource' (required)

ServiceNowV2Source

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
expression Expression to filter data from source. ExpressionV2
pageSize Page size of the result. Type: integer (or Expression with resultType integer). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ServiceNowV2Source' (required)

SetVariableActivity

Name Description Value
policy Activity policy. SecureInputOutputPolicy
type Type of activity. 'SetVariable' (required)
typeProperties Set Variable activity properties. SetVariableActivityTypeProperties (required)

SetVariableActivityTypeProperties

Name Description Value
setSystemVariable If set to true, it sets the pipeline run return value. bool
value Value to be set. Could be a static value or Expression. any
variableName Name of the variable whose value needs to be set. string

SftpReadSettings

Name Description Value
deleteFilesAfterCompletion Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). any
disableChunking If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). any
enablePartitionDiscovery Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). any
fileListPath Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). any
modifiedDatetimeEnd The end of file's modified datetime. Type: string (or Expression with resultType string). any
modifiedDatetimeStart The start of file's modified datetime. Type: string (or Expression with resultType string). any
partitionRootPath Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). any
recursive If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'SftpReadSettings' (required)
wildcardFileName Sftp wildcardFileName. Type: string (or Expression with resultType string). any
wildcardFolderPath Sftp wildcardFolderPath. Type: string (or Expression with resultType string). any

SftpWriteSettings

Name Description Value
operationTimeout Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). any
type The write setting type. 'SftpWriteSettings' (required)
useTempFileRename Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). any

SharePointOnlineListSource

Name Description Value
httpRequestTimeout The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
query The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). any
type Copy source type. 'SharePointOnlineListSource' (required)

ShopifySource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ShopifySource' (required)

SkipErrorFile

Name Description Value
dataInconsistency Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). any
fileMissing Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). any

SnowflakeExportCopyCommand

Name Description Value
additionalCopyOptions Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } SnowflakeExportCopyCommandAdditionalCopyOptions
additionalFormatOptions Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" } SnowflakeExportCopyCommandAdditionalFormatOptions
storageIntegration The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with resultType string). any
type The export setting type. string (required)

SnowflakeExportCopyCommandAdditionalCopyOptions

Name Description Value

SnowflakeExportCopyCommandAdditionalFormatOptions

Name Description Value

SnowflakeImportCopyCommand

Name Description Value
additionalCopyOptions Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } SnowflakeImportCopyCommandAdditionalCopyOptions
additionalFormatOptions Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" } SnowflakeImportCopyCommandAdditionalFormatOptions
storageIntegration The name of the snowflake storage integration to use for the copy operation. Type: string (or Expression with resultType string). any
type The import setting type. string (required)

SnowflakeImportCopyCommandAdditionalCopyOptions

Name Description Value

SnowflakeImportCopyCommandAdditionalFormatOptions

Name Description Value

SnowflakeSink

Name Description Value
importSettings Snowflake import settings. SnowflakeImportCopyCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'SnowflakeSink' (required)

SnowflakeSource

Name Description Value
exportSettings Snowflake export settings. SnowflakeExportCopyCommand (required)
query Snowflake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'SnowflakeSource' (required)

SnowflakeV2Sink

Name Description Value
importSettings Snowflake import settings. SnowflakeImportCopyCommand
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
type Copy sink type. 'SnowflakeV2Sink' (required)

SnowflakeV2Source

Name Description Value
exportSettings Snowflake export settings. SnowflakeExportCopyCommand (required)
query Snowflake Sql query. Type: string (or Expression with resultType string). any
type Copy source type. 'SnowflakeV2Source' (required)

SparkConfigurationParametrizationReference

Name Description Value
referenceName Reference spark configuration name. Type: string (or Expression with resultType string). any (required)
type Spark configuration reference type. 'SparkConfigurationReference' (required)

SparkSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SparkSource' (required)

SqlDWSink

Name Description Value
allowCopyCommand Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). any
allowPolyBase Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). any
copyCommandSettings Specifies Copy Command related settings when allowCopyCommand is true. DWCopyCommandSettings
polyBaseSettings Specifies PolyBase-related settings when allowPolyBase is true. PolybaseSettings
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlDWSink' (required)
upsertSettings SQL DW upsert settings. SqlDWUpsertSettings
writeBehavior Write behavior when copying data into azure SQL DW. Type: SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum) any

SqlDWSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL Data Warehouse reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. any
type Copy source type. 'SqlDWSource' (required)

SqlDWUpsertSettings

Name Description Value
interimSchemaName Schema name for interim table. Type: string (or Expression with resultType string). any
keys Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). any

SqlMISink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlMISink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior White behavior when copying data into azure SQL MI. Type: string (or Expression with resultType string) any

SqlMISource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlMISource' (required)

SqlPartitionSettings

Name Description Value
partitionColumnName The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). any

SqlServerSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlServerSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into sql server. Type: string (or Expression with resultType string). any

SqlServerSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
produceAdditionalTypes Which additional types to produce. any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlServerSource' (required)

SqlServerStoredProcedureActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SqlServerStoredProcedure' (required)
typeProperties SQL stored procedure activity properties. SqlServerStoredProcedureActivityTypeProperties (required)

SqlServerStoredProcedureActivityTypeProperties

Name Description Value
storedProcedureName Stored procedure name. Type: string (or Expression with resultType string). any (required)
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any

SqlSink

Name Description Value
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
sqlWriterStoredProcedureName SQL writer stored procedure name. Type: string (or Expression with resultType string). any
sqlWriterTableType SQL writer table type. Type: string (or Expression with resultType string). any
sqlWriterUseTableLock Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). any
storedProcedureParameters SQL stored procedure parameters. any
storedProcedureTableTypeParameterName The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'SqlSink' (required)
upsertSettings SQL upsert settings. SqlUpsertSettings
writeBehavior Write behavior when copying data into sql. Type: string (or Expression with resultType string). any

SqlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery SQL reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". any
type Copy source type. 'SqlSource' (required)

SqlUpsertSettings

Name Description Value
interimSchemaName Schema name for interim table. Type: string (or Expression with resultType string). any
keys Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). any
useTempDB Specifies whether to use temp db for upsert interim table. Type: boolean (or Expression with resultType boolean). any

SquareSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SquareSource' (required)

SsisAccessCredential

Name Description Value
domain Domain for windows authentication. Type: string (or Expression with resultType string). any (required)
password Password for windows authentication. SecretBase (required)
userName UseName for windows authentication. Type: string (or Expression with resultType string). any (required)

SsisChildPackage

Name Description Value
packageContent Content for embedded child package. Type: string (or Expression with resultType string). any (required)
packageLastModifiedDate Last modified date for embedded child package. string
packageName Name for embedded child package. string
packagePath Path for embedded child package. Type: string (or Expression with resultType string). any (required)

SsisConnectionManager

Name Description Value

SsisConnectionManager

Name Description Value

SsisExecutionCredential

Name Description Value
domain Domain for windows authentication. Type: string (or Expression with resultType string). any (required)
password Password for windows authentication. SecureString (required)
userName UseName for windows authentication. Type: string (or Expression with resultType string). any (required)

SsisExecutionParameter

Name Description Value
value SSIS package execution parameter value. Type: string (or Expression with resultType string). any (required)

SsisLogLocation

Name Description Value
logPath The SSIS package execution log path. Type: string (or Expression with resultType string). any (required)
type The type of SSIS log location. 'File' (required)
typeProperties SSIS package execution log location properties. SsisLogLocationTypeProperties (required)

SsisLogLocationTypeProperties

Name Description Value
accessCredential The package execution log access credential. SsisAccessCredential
logRefreshInterval Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

SsisPackageLocation

Name Description Value
packagePath The SSIS package path. Type: string (or Expression with resultType string). any
type The type of SSIS package location. 'File'
'InlinePackage'
'PackageStore'
'SSISDB'
typeProperties SSIS package location properties. SsisPackageLocationTypeProperties

SsisPackageLocationTypeProperties

Name Description Value
accessCredential The package access credential. SsisAccessCredential
childPackages The embedded child package list. SsisChildPackage[]
configurationAccessCredential The configuration file access credential. SsisAccessCredential
configurationPath The configuration file of the package execution. Type: string (or Expression with resultType string). any
packageContent The embedded package content. Type: string (or Expression with resultType string). any
packageLastModifiedDate The embedded package last modified date. string
packageName The package name. string
packagePassword Password of the package. SecretBase

SsisPropertyOverride

Name Description Value
isSensitive Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true bool
value SSIS package property override value. Type: string (or Expression with resultType string). any (required)

StagingSettings

Name Description Value
enableCompression Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). any
linkedServiceName Staging linked service reference. LinkedServiceReference (required)
path The path to storage for storing the interim data. Type: string (or Expression with resultType string). any

StoreReadSettings

Name Description Value
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
type Set to 'AmazonS3CompatibleReadSettings' for type AmazonS3CompatibleReadSettings. Set to 'AmazonS3ReadSettings' for type AmazonS3ReadSettings. Set to 'AzureBlobFSReadSettings' for type AzureBlobFSReadSettings. Set to 'AzureBlobStorageReadSettings' for type AzureBlobStorageReadSettings. Set to 'AzureDataLakeStoreReadSettings' for type AzureDataLakeStoreReadSettings. Set to 'AzureFileStorageReadSettings' for type AzureFileStorageReadSettings. Set to 'FileServerReadSettings' for type FileServerReadSettings. Set to 'FtpReadSettings' for type FtpReadSettings. Set to 'GoogleCloudStorageReadSettings' for type GoogleCloudStorageReadSettings. Set to 'HdfsReadSettings' for type HdfsReadSettings. Set to 'HttpReadSettings' for type HttpReadSettings. Set to 'LakeHouseReadSettings' for type LakeHouseReadSettings. Set to 'OracleCloudStorageReadSettings' for type OracleCloudStorageReadSettings. Set to 'SftpReadSettings' for type SftpReadSettings. 'AmazonS3CompatibleReadSettings'
'AmazonS3ReadSettings'
'AzureBlobFSReadSettings'
'AzureBlobStorageReadSettings'
'AzureDataLakeStoreReadSettings'
'AzureFileStorageReadSettings'
'FileServerReadSettings'
'FtpReadSettings'
'GoogleCloudStorageReadSettings'
'HdfsReadSettings'
'HttpReadSettings'
'LakeHouseReadSettings'
'OracleCloudStorageReadSettings'
'SftpReadSettings' (required)

StoreWriteSettings

Name Description Value
copyBehavior The type of copy behavior for copy sink. any
disableMetricsCollection If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). any
maxConcurrentConnections The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). any
metadata Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). MetadataItem[]
type Set to 'AzureBlobFSWriteSettings' for type AzureBlobFSWriteSettings. Set to 'AzureBlobStorageWriteSettings' for type AzureBlobStorageWriteSettings. Set to 'AzureDataLakeStoreWriteSettings' for type AzureDataLakeStoreWriteSettings. Set to 'AzureFileStorageWriteSettings' for type AzureFileStorageWriteSettings. Set to 'FileServerWriteSettings' for type FileServerWriteSettings. Set to 'LakeHouseWriteSettings' for type LakeHouseWriteSettings. Set to 'SftpWriteSettings' for type SftpWriteSettings. 'AzureBlobFSWriteSettings'
'AzureBlobStorageWriteSettings'
'AzureDataLakeStoreWriteSettings'
'AzureFileStorageWriteSettings'
'FileServerWriteSettings'
'LakeHouseWriteSettings'
'SftpWriteSettings' (required)

SwitchActivity

Name Description Value
type Type of activity. 'Switch' (required)
typeProperties Switch activity properties. SwitchActivityTypeProperties (required)

SwitchActivityTypeProperties

Name Description Value
cases List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. SwitchCase[]
defaultActivities List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. Activity[]
on An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. Expression (required)

SwitchCase

Name Description Value
activities List of activities to execute for satisfied case condition. Activity[]
value Expected value that satisfies the expression result of the 'on' property. string

SybaseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query Database query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'SybaseSource' (required)

SynapseNotebookActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SynapseNotebook' (required)
typeProperties Execute Synapse notebook activity properties. SynapseNotebookActivityTypeProperties (required)

SynapseNotebookActivityTypeProperties

Name Description Value
conf Spark configuration properties, which will override the 'conf' of the notebook you provide. any
configurationType The type of the spark config. 'Artifact'
'Customized'
'Default'
driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you provide. Type: string (or Expression with resultType string). any
executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the notebook you provide. Type: string (or Expression with resultType string). any
notebook Synapse notebook reference. SynapseNotebookReference (required)
numExecutors Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType integer). any
parameters Notebook parameters. SynapseNotebookActivityTypePropertiesParameters
sparkConfig Spark configuration property. SynapseNotebookActivityTypePropertiesSparkConfig
sparkPool The name of the big data pool which will be used to execute the notebook. BigDataPoolParametrizationReference
targetSparkConfiguration The spark configuration of the spark job. SparkConfigurationParametrizationReference

SynapseNotebookActivityTypePropertiesParameters

Name Description Value

SynapseNotebookActivityTypePropertiesSparkConfig

Name Description Value

SynapseNotebookReference

Name Description Value
referenceName Reference notebook name. Type: string (or Expression with resultType string). any (required)
type Synapse notebook reference type. 'NotebookReference' (required)

SynapseSparkJobActivityTypeProperties

Name Description Value
args User specified arguments to SynapseSparkJobDefinitionActivity. any[]
className The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string). any
conf Spark configuration properties, which will override the 'conf' of the spark job definition you provide. any
configurationType The type of the spark config. 'Artifact'
'Customized'
'Default'
driverSize Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). any
executorSize Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string). any
file The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string). any
files (Deprecated. Please use pythonCodeReference and filesV2) Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide. any[]
filesV2 Additional files used for reference in the main definition file, which will override the 'jars' and 'files' of the spark job definition you provide. any[]
numExecutors Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer). any
pythonCodeReference Additional python code files used for reference in the main definition file, which will override the 'pyFiles' of the spark job definition you provide. any[]
scanFolder Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean). any
sparkConfig Spark configuration property. SynapseSparkJobActivityTypePropertiesSparkConfig
sparkJob Synapse spark job reference. SynapseSparkJobReference (required)
targetBigDataPool The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide. BigDataPoolParametrizationReference
targetSparkConfiguration The spark configuration of the spark job. SparkConfigurationParametrizationReference

SynapseSparkJobActivityTypePropertiesSparkConfig

Name Description Value

SynapseSparkJobDefinitionActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'SparkJob' (required)
typeProperties Execute spark job activity properties. SynapseSparkJobActivityTypeProperties (required)

SynapseSparkJobReference

Name Description Value
referenceName Reference spark job name. Expression with resultType string. any (required)
type Synapse spark job reference type. 'SparkJobDefinitionReference' (required)

TarGZipReadSettings

Name Description Value
preserveCompressionFileNameAsFolder Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'TarGZipReadSettings' (required)

TarReadSettings

Name Description Value
preserveCompressionFileNameAsFolder Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'TarReadSettings' (required)

TeradataPartitionSettings

Name Description Value
partitionColumnName The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). any
partitionLowerBound The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any
partitionUpperBound The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). any

TeradataSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
partitionOption The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". any
partitionSettings The settings that will be leveraged for teradata source partitioning. TeradataPartitionSettings
query Teradata query. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'TeradataSource' (required)

UntilActivity

Name Description Value
type Type of activity. 'Until' (required)
typeProperties Until activity properties. UntilActivityTypeProperties (required)

UntilActivityTypeProperties

Name Description Value
activities List of activities to execute. Activity[] (required)
expression An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true Expression (required)
timeout Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

UserProperty

Name Description Value
name User property name. string (required)
value User property value. Type: string (or Expression with resultType string). any (required)

ValidationActivity

Name Description Value
type Type of activity. 'Validation' (required)
typeProperties Validation activity properties. ValidationActivityTypeProperties (required)

ValidationActivityTypeProperties

Name Description Value
childItems Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). any
dataset Validation activity dataset reference. DatasetReference (required)
minimumSize Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). any
sleep A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). any
timeout Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any

VariableDefinitionSpecification

Name Description Value

VariableSpecification

Name Description Value
defaultValue Default value of variable. any
type Variable type. 'Array'
'Bool'
'String' (required)

VerticaSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'VerticaSource' (required)

WaitActivity

Name Description Value
type Type of activity. 'Wait' (required)
typeProperties Wait activity properties. WaitActivityTypeProperties (required)

WaitActivityTypeProperties

Name Description Value
waitTimeInSeconds Duration in seconds. Type: integer (or Expression with resultType integer). any (required)

WarehouseSink

Name Description Value
allowCopyCommand Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). any
copyCommandSettings Specifies Copy Command related settings when allowCopyCommand is true. DWCopyCommandSettings
preCopyScript SQL pre-copy script. Type: string (or Expression with resultType string). any
tableOption The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). any
type Copy sink type. 'WarehouseSink' (required)
writeBehavior Write behavior when copying data into azure Microsoft Fabric Data Warehouse. Type: DWWriteBehaviorEnum (or Expression with resultType DWWriteBehaviorEnum) any

WarehouseSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
isolationLevel Specifies the transaction locking behavior for the Microsoft Fabric Warehouse source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). any
partitionOption The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". any
partitionSettings The settings that will be leveraged for Sql source partitioning. SqlPartitionSettings
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
sqlReaderQuery Microsoft Fabric Warehouse reader query. Type: string (or Expression with resultType string). any
sqlReaderStoredProcedureName Name of the stored procedure for a Microsoft Fabric Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). any
storedProcedureParameters Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. any
type Copy source type. 'WarehouseSource' (required)

WebActivity

Name Description Value
linkedServiceName Linked service reference. LinkedServiceReference
policy Activity policy. ActivityPolicy
type Type of activity. 'WebActivity' (required)
typeProperties Web activity properties. WebActivityTypeProperties (required)

WebActivityAuthentication

Name Description Value
credential The credential reference containing authentication information. CredentialReference
password Password for the PFX file or basic authentication / Secret when used for ServicePrincipal SecretBase
pfx Base64-encoded contents of a PFX file or Certificate when used for ServicePrincipal SecretBase
resource Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). any
type Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal) string
username Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). any
userTenant TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). any

WebActivityTypeProperties

Name Description Value
authentication Authentication method used for calling the endpoint. WebActivityAuthentication
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
connectVia The integration runtime reference. IntegrationRuntimeReference
datasets List of datasets passed to web endpoint. DatasetReference[]
disableCertValidation When set to true, Certificate validation will be disabled. bool
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). WebActivityTypePropertiesHeaders
httpRequestTimeout Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes any
linkedServices List of linked services passed to web endpoint. LinkedServiceReference[]
method Rest API method for target endpoint. 'DELETE'
'GET'
'POST'
'PUT' (required)
turnOffAsync Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET call on location given in http response headers. bool
url Web activity target endpoint and path. Type: string (or Expression with resultType string). any (required)

WebActivityTypePropertiesHeaders

Name Description Value

WebHookActivity

Name Description Value
policy Activity policy. SecureInputOutputPolicy
type Type of activity. 'WebHook' (required)
typeProperties WebHook activity properties. WebHookActivityTypeProperties (required)

WebHookActivityTypeProperties

Name Description Value
authentication Authentication method used for calling the endpoint. WebActivityAuthentication
body Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). any
headers Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). WebHookActivityTypePropertiesHeaders
method Rest API method for target endpoint. 'POST' (required)
reportStatusOnCallBack When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). any
timeout The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). string
url WebHook activity target endpoint and path. Type: string (or Expression with resultType string). any (required)

WebHookActivityTypePropertiesHeaders

Name Description Value

WebSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
type Copy source type. 'WebSource' (required)

XeroSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'XeroSource' (required)

XmlReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
detectDataType Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
namespacePrefixes Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). any
namespaces Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
type The read setting type. 'XmlReadSettings' (required)
validationMode Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). any

XmlReadSettings

Name Description Value
compressionProperties Compression settings. CompressionReadSettings
detectDataType Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
namespacePrefixes Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). any
namespaces Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). any
type The read setting type. string (required)
validationMode Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). any

XmlSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
formatSettings Xml format settings. XmlReadSettings
storeSettings Xml store settings. StoreReadSettings
type Copy source type. 'XmlSource' (required)

ZipDeflateReadSettings

Name Description Value
preserveZipFileNameAsFolder Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). any
type The Compression setting type. 'ZipDeflateReadSettings' (required)

ZohoSource

Name Description Value
additionalColumns Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). any
query A query to retrieve data from source. Type: string (or Expression with resultType string). any
queryTimeout Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60 ([0-5][0-9])):(60 ([0-5][0-9])). any
type Copy source type. 'ZohoSource' (required)

Quickstart templates

The following quickstart templates deploy this resource type.

Template Description
Create a V2 data factory

Deploy to Azure
This template creates a V2 data factory that copies data from a folder in an Azure Blob Storage to another folder in the storage.
More is possible with Azure Data Factory - One click to try Azure Data Factory

Deploy to Azure
This template creates a data factory pipeline for a copy activity from Azure Blob into another Azure Blob

Terraform (AzAPI provider) resource definition

The factories/pipelines resource type can be deployed with operations that target:

  • Resource groups

For a list of changed properties in each API version, see change log.

Resource format

To create a Microsoft.DataFactory/factories/pipelines resource, add the following Terraform to your template.

resource "azapi_resource" "symbolicname" {
  type = "Microsoft.DataFactory/factories/pipelines@2018-06-01"
  name = "string"
  body = jsonencode({
    properties = {
      activities = [
        {
          dependsOn = [
            {
              activity = "string"
              dependencyConditions = [
                "string"
              ]
            }
          ]
          description = "string"
          name = "string"
          onInactiveMarkAs = "string"
          state = "string"
          userProperties = [
            {
              name = "string"
              value = ?
            }
          ]
          type = "string"
          // For remaining properties, see Activity objects
        }
      ]
      annotations = [
        ?
      ]
      concurrency = int
      description = "string"
      folder = {
        name = "string"
      }
      parameters = {
        {customized property} = {
          defaultValue = ?
          type = "string"
        }
      }
      policy = {
        elapsedTimeMetric = {
          duration = ?
        }
      }
      runDimensions = {
        {customized property} = ?
      }
      variables = {
        {customized property} = {
          defaultValue = ?
          type = "string"
        }
      }
    }
  })
}

StoreReadSettings objects

Set the type property to specify the type of object.

For AmazonS3CompatibleReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "AmazonS3CompatibleReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For AmazonS3ReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "AmazonS3ReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For AzureBlobFSReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "AzureBlobFSReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For AzureBlobStorageReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "AzureBlobStorageReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For AzureDataLakeStoreReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  listAfter = ?
  listBefore = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "AzureDataLakeStoreReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For AzureFileStorageReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "AzureFileStorageReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For FileServerReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileFilter = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "FileServerReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For FtpReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  disableChunking = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  partitionRootPath = ?
  recursive = ?
  type = "FtpReadSettings"
  useBinaryTransfer = ?
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For GoogleCloudStorageReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "GoogleCloudStorageReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For HdfsReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  distcpSettings = {
    distcpOptions = ?
    resourceManagerEndpoint = ?
    tempScriptPath = ?
  }
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "HdfsReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For HttpReadSettings, use:

{
  additionalColumns = ?
  additionalHeaders = ?
  requestBody = ?
  requestMethod = ?
  requestTimeout = ?
  type = "HttpReadSettings"
}

For LakeHouseReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "LakeHouseReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For OracleCloudStorageReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  prefix = ?
  recursive = ?
  type = "OracleCloudStorageReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

For SftpReadSettings, use:

{
  deleteFilesAfterCompletion = ?
  disableChunking = ?
  enablePartitionDiscovery = ?
  fileListPath = ?
  modifiedDatetimeEnd = ?
  modifiedDatetimeStart = ?
  partitionRootPath = ?
  recursive = ?
  type = "SftpReadSettings"
  wildcardFileName = ?
  wildcardFolderPath = ?
}

Activity objects

Set the type property to specify the type of object.

For AppendVariable, use:

{
  type = "AppendVariable"
  typeProperties = {
    value = ?
    variableName = "string"
  }
}

For AzureDataExplorerCommand, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "AzureDataExplorerCommand"
  typeProperties = {
    command = ?
    commandTimeout = ?
  }
}

For AzureFunctionActivity, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "AzureFunctionActivity"
  typeProperties = {
    body = ?
    functionName = ?
    headers = {
      {customized property} = ?
    }
    method = "string"
  }
}

For AzureMLBatchExecution, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "AzureMLBatchExecution"
  typeProperties = {
    globalParameters = {
      {customized property} = ?
    }
    webServiceInputs = {
      {customized property} = {
        filePath = ?
        linkedServiceName = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
      }
    }
    webServiceOutputs = {
      {customized property} = {
        filePath = ?
        linkedServiceName = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
      }
    }
  }
}

For AzureMLExecutePipeline, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "AzureMLExecutePipeline"
  typeProperties = {
    continueOnStepFailure = ?
    dataPathAssignments = ?
    experimentName = ?
    mlParentRunId = ?
    mlPipelineEndpointId = ?
    mlPipelineId = ?
    mlPipelineParameters = ?
    version = ?
  }
}

For AzureMLUpdateResource, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "AzureMLUpdateResource"
  typeProperties = {
    trainedModelFilePath = ?
    trainedModelLinkedServiceName = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    trainedModelName = ?
  }
}

For ExecutePipeline, use:

{
  policy = {
    secureInput = bool
  }
  type = "ExecutePipeline"
  typeProperties = {
    parameters = {
      {customized property} = ?
    }
    pipeline = {
      name = "string"
      referenceName = "string"
      type = "string"
    }
    waitOnCompletion = bool
  }
}

For Fail, use:

{
  type = "Fail"
  typeProperties = {
    errorCode = ?
    message = ?
  }
}

For Filter, use:

{
  type = "Filter"
  typeProperties = {
    condition = {
      type = "string"
      value = "string"
    }
    items = {
      type = "string"
      value = "string"
    }
  }
}

For ForEach, use:

{
  type = "ForEach"
  typeProperties = {
    activities = [
      {
        dependsOn = [
          {
            activity = "string"
            dependencyConditions = [
              "string"
            ]
          }
        ]
        description = "string"
        name = "string"
        onInactiveMarkAs = "string"
        state = "string"
        userProperties = [
          {
            name = "string"
            value = ?
          }
        ]
        type = "string"
        // For remaining properties, see Activity objects
      }
    ]
    batchCount = int
    isSequential = bool
    items = {
      type = "string"
      value = "string"
    }
  }
}

For IfCondition, use:

{
  type = "IfCondition"
  typeProperties = {
    expression = {
      type = "string"
      value = "string"
    }
    ifFalseActivities = [
      {
        dependsOn = [
          {
            activity = "string"
            dependencyConditions = [
              "string"
            ]
          }
        ]
        description = "string"
        name = "string"
        onInactiveMarkAs = "string"
        state = "string"
        userProperties = [
          {
            name = "string"
            value = ?
          }
        ]
        type = "string"
        // For remaining properties, see Activity objects
      }
    ]
    ifTrueActivities = [
      {
        dependsOn = [
          {
            activity = "string"
            dependencyConditions = [
              "string"
            ]
          }
        ]
        description = "string"
        name = "string"
        onInactiveMarkAs = "string"
        state = "string"
        userProperties = [
          {
            name = "string"
            value = ?
          }
        ]
        type = "string"
        // For remaining properties, see Activity objects
      }
    ]
  }
}

For SetVariable, use:

{
  policy = {
    secureInput = bool
    secureOutput = bool
  }
  type = "SetVariable"
  typeProperties = {
    setSystemVariable = bool
    value = ?
    variableName = "string"
  }
}

For Switch, use:

{
  type = "Switch"
  typeProperties = {
    cases = [
      {
        activities = [
          {
            dependsOn = [
              {
                activity = "string"
                dependencyConditions = [
                  "string"
                ]
              }
            ]
            description = "string"
            name = "string"
            onInactiveMarkAs = "string"
            state = "string"
            userProperties = [
              {
                name = "string"
                value = ?
              }
            ]
            type = "string"
            // For remaining properties, see Activity objects
          }
        ]
        value = "string"
      }
    ]
    defaultActivities = [
      {
        dependsOn = [
          {
            activity = "string"
            dependencyConditions = [
              "string"
            ]
          }
        ]
        description = "string"
        name = "string"
        onInactiveMarkAs = "string"
        state = "string"
        userProperties = [
          {
            name = "string"
            value = ?
          }
        ]
        type = "string"
        // For remaining properties, see Activity objects
      }
    ]
    on = {
      type = "string"
      value = "string"
    }
  }
}

For Until, use:

{
  type = "Until"
  typeProperties = {
    activities = [
      {
        dependsOn = [
          {
            activity = "string"
            dependencyConditions = [
              "string"
            ]
          }
        ]
        description = "string"
        name = "string"
        onInactiveMarkAs = "string"
        state = "string"
        userProperties = [
          {
            name = "string"
            value = ?
          }
        ]
        type = "string"
        // For remaining properties, see Activity objects
      }
    ]
    expression = {
      type = "string"
      value = "string"
    }
    timeout = ?
  }
}

For Validation, use:

{
  type = "Validation"
  typeProperties = {
    childItems = ?
    dataset = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    minimumSize = ?
    sleep = ?
    timeout = ?
  }
}

For Wait, use:

{
  type = "Wait"
  typeProperties = {
    waitTimeInSeconds = ?
  }
}

For WebHook, use:

{
  policy = {
    secureInput = bool
    secureOutput = bool
  }
  type = "WebHook"
  typeProperties = {
    authentication = {
      credential = {
        referenceName = "string"
        type = "string"
      }
      password = {
        type = "string"
        // For remaining properties, see SecretBase objects
      }
      pfx = {
        type = "string"
        // For remaining properties, see SecretBase objects
      }
      resource = ?
      type = "string"
      username = ?
      userTenant = ?
    }
    body = ?
    headers = {
      {customized property} = ?
    }
    method = "string"
    reportStatusOnCallBack = ?
    timeout = "string"
    url = ?
  }
}

For Copy, use:

{
  inputs = [
    {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
  ]
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  outputs = [
    {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
  ]
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "Copy"
  typeProperties = {
    dataIntegrationUnits = ?
    enableSkipIncompatibleRow = ?
    enableStaging = ?
    logSettings = {
      copyActivityLogSettings = {
        enableReliableLogging = ?
        logLevel = ?
      }
      enableCopyActivityLog = ?
      logLocationSettings = {
        linkedServiceName = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        path = ?
      }
    }
    logStorageSettings = {
      enableReliableLogging = ?
      linkedServiceName = {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
      logLevel = ?
      path = ?
    }
    parallelCopies = ?
    preserve = [
      ?
    ]
    preserveRules = [
      ?
    ]
    redirectIncompatibleRowSettings = {
      linkedServiceName = ?
      path = ?
    }
    sink = {
      disableMetricsCollection = ?
      maxConcurrentConnections = ?
      sinkRetryCount = ?
      sinkRetryWait = ?
      writeBatchSize = ?
      writeBatchTimeout = ?
      type = "string"
      // For remaining properties, see CopySink objects
    }
    skipErrorFile = {
      dataInconsistency = ?
      fileMissing = ?
    }
    source = {
      disableMetricsCollection = ?
      maxConcurrentConnections = ?
      sourceRetryCount = ?
      sourceRetryWait = ?
      type = "string"
      // For remaining properties, see CopySource objects
    }
    stagingSettings = {
      enableCompression = ?
      linkedServiceName = {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
      path = ?
    }
    translator = ?
    validateDataConsistency = ?
  }
}

For Custom, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "Custom"
  typeProperties = {
    autoUserSpecification = ?
    command = ?
    extendedProperties = {
      {customized property} = ?
    }
    folderPath = ?
    referenceObjects = {
      datasets = [
        {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
      ]
      linkedServices = [
        {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
      ]
    }
    resourceLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    retentionTimeInDays = ?
  }
}

For DataLakeAnalyticsU-SQL, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "DataLakeAnalyticsU-SQL"
  typeProperties = {
    compilationMode = ?
    degreeOfParallelism = ?
    parameters = {
      {customized property} = ?
    }
    priority = ?
    runtimeVersion = ?
    scriptLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    scriptPath = ?
  }
}

For DatabricksNotebook, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "DatabricksNotebook"
  typeProperties = {
    baseParameters = {
      {customized property} = ?
    }
    libraries = [
      {
        {customized property} = ?
      }
    ]
    notebookPath = ?
  }
}

For DatabricksSparkJar, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "DatabricksSparkJar"
  typeProperties = {
    libraries = [
      {
        {customized property} = ?
      }
    ]
    mainClassName = ?
    parameters = [
      ?
    ]
  }
}

For DatabricksSparkPython, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "DatabricksSparkPython"
  typeProperties = {
    libraries = [
      {
        {customized property} = ?
      }
    ]
    parameters = [
      ?
    ]
    pythonFile = ?
  }
}

For Delete, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "Delete"
  typeProperties = {
    dataset = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    enableLogging = ?
    logStorageSettings = {
      enableReliableLogging = ?
      linkedServiceName = {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
      logLevel = ?
      path = ?
    }
    maxConcurrentConnections = int
    recursive = ?
    storeSettings = {
      disableMetricsCollection = ?
      maxConcurrentConnections = ?
      type = "string"
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For ExecuteDataFlow, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "ExecuteDataFlow"
  typeProperties = {
    compute = {
      computeType = ?
      coreCount = ?
    }
    continuationSettings = {
      continuationTtlInMinutes = ?
      customizedCheckpointKey = ?
      idleCondition = ?
    }
    continueOnError = ?
    dataFlow = {
      datasetParameters = ?
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    integrationRuntime = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    runConcurrently = ?
    sourceStagingConcurrency = ?
    staging = {
      folderPath = ?
      linkedService = {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    }
    traceLevel = ?
  }
}

For ExecuteSSISPackage, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "ExecuteSSISPackage"
  typeProperties = {
    connectVia = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    environmentPath = ?
    executionCredential = {
      domain = ?
      password = {
        type = "string"
        value = "string"
      }
      userName = ?
    }
    loggingLevel = ?
    logLocation = {
      logPath = ?
      type = "string"
      typeProperties = {
        accessCredential = {
          domain = ?
          password = {
            type = "string"
            // For remaining properties, see SecretBase objects
          }
          userName = ?
        }
        logRefreshInterval = ?
      }
    }
    packageConnectionManagers = {
      {customized property} = {
        {customized property} = {
          value = ?
        }
      }
    }
    packageLocation = {
      packagePath = ?
      type = "string"
      typeProperties = {
        accessCredential = {
          domain = ?
          password = {
            type = "string"
            // For remaining properties, see SecretBase objects
          }
          userName = ?
        }
        childPackages = [
          {
            packageContent = ?
            packageLastModifiedDate = "string"
            packageName = "string"
            packagePath = ?
          }
        ]
        configurationAccessCredential = {
          domain = ?
          password = {
            type = "string"
            // For remaining properties, see SecretBase objects
          }
          userName = ?
        }
        configurationPath = ?
        packageContent = ?
        packageLastModifiedDate = "string"
        packageName = "string"
        packagePassword = {
          type = "string"
          // For remaining properties, see SecretBase objects
        }
      }
    }
    packageParameters = {
      {customized property} = {
        value = ?
      }
    }
    projectConnectionManagers = {
      {customized property} = {
        {customized property} = {
          value = ?
        }
      }
    }
    projectParameters = {
      {customized property} = {
        value = ?
      }
    }
    propertyOverrides = {
      {customized property} = {
        isSensitive = bool
        value = ?
      }
    }
    runtime = ?
  }
}

For ExecuteWranglingDataflow, use:

{
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "ExecuteWranglingDataflow"
  typeProperties = {
    compute = {
      computeType = ?
      coreCount = ?
    }
    continuationSettings = {
      continuationTtlInMinutes = ?
      customizedCheckpointKey = ?
      idleCondition = ?
    }
    continueOnError = ?
    dataFlow = {
      datasetParameters = ?
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    integrationRuntime = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    queries = [
      {
        dataflowSinks = [
          {
            dataset = {
              parameters = {
                {customized property} = ?
              }
              referenceName = "string"
              type = "string"
            }
            description = "string"
            flowlet = {
              datasetParameters = ?
              parameters = {
                {customized property} = ?
              }
              referenceName = "string"
              type = "string"
            }
            linkedService = {
              parameters = {
                {customized property} = ?
              }
              referenceName = "string"
              type = "string"
            }
            name = "string"
            rejectedDataLinkedService = {
              parameters = {
                {customized property} = ?
              }
              referenceName = "string"
              type = "string"
            }
            schemaLinkedService = {
              parameters = {
                {customized property} = ?
              }
              referenceName = "string"
              type = "string"
            }
            script = "string"
          }
        ]
        queryName = "string"
      }
    ]
    runConcurrently = ?
    sinks = {
      {customized property} = {
        dataset = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        description = "string"
        flowlet = {
          datasetParameters = ?
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        linkedService = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        name = "string"
        rejectedDataLinkedService = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        schemaLinkedService = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        script = "string"
      }
    }
    sourceStagingConcurrency = ?
    staging = {
      folderPath = ?
      linkedService = {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    }
    traceLevel = ?
  }
}

For GetMetadata, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "GetMetadata"
  typeProperties = {
    dataset = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    fieldList = [
      ?
    ]
    formatSettings = {
      type = "string"
      // For remaining properties, see FormatReadSettings objects
    }
    storeSettings = {
      disableMetricsCollection = ?
      maxConcurrentConnections = ?
      type = "string"
      // For remaining properties, see StoreReadSettings objects
    }
  }
}

For HDInsightHive, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "HDInsightHive"
  typeProperties = {
    arguments = [
      ?
    ]
    defines = {
      {customized property} = ?
    }
    getDebugInfo = "string"
    queryTimeout = int
    scriptLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    scriptPath = ?
    storageLinkedServices = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
    variables = {
      {customized property} = ?
    }
  }
}

For HDInsightMapReduce, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "HDInsightMapReduce"
  typeProperties = {
    arguments = [
      ?
    ]
    className = ?
    defines = {
      {customized property} = ?
    }
    getDebugInfo = "string"
    jarFilePath = ?
    jarLibs = [
      ?
    ]
    jarLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    storageLinkedServices = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
  }
}

For HDInsightPig, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "HDInsightPig"
  typeProperties = {
    arguments = ?
    defines = {
      {customized property} = ?
    }
    getDebugInfo = "string"
    scriptLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    scriptPath = ?
    storageLinkedServices = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
  }
}

For HDInsightSpark, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "HDInsightSpark"
  typeProperties = {
    arguments = [
      ?
    ]
    className = "string"
    entryFilePath = ?
    getDebugInfo = "string"
    proxyUser = ?
    rootPath = ?
    sparkConfig = {
      {customized property} = ?
    }
    sparkJobLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
  }
}

For HDInsightStreaming, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "HDInsightStreaming"
  typeProperties = {
    arguments = [
      ?
    ]
    combiner = ?
    commandEnvironment = [
      ?
    ]
    defines = {
      {customized property} = ?
    }
    fileLinkedService = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    filePaths = [
      ?
    ]
    getDebugInfo = "string"
    input = ?
    mapper = ?
    output = ?
    reducer = ?
    storageLinkedServices = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
  }
}

For Lookup, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "Lookup"
  typeProperties = {
    dataset = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    firstRowOnly = ?
    source = {
      disableMetricsCollection = ?
      maxConcurrentConnections = ?
      sourceRetryCount = ?
      sourceRetryWait = ?
      type = "string"
      // For remaining properties, see CopySource objects
    }
  }
}

For Script, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "Script"
  typeProperties = {
    logSettings = {
      logDestination = "string"
      logLocationSettings = {
        linkedServiceName = {
          parameters = {
            {customized property} = ?
          }
          referenceName = "string"
          type = "string"
        }
        path = ?
      }
    }
    scriptBlockExecutionTimeout = ?
    scripts = [
      {
        parameters = [
          {
            direction = "string"
            name = ?
            size = int
            type = "string"
            value = ?
          }
        ]
        text = ?
        type = ?
      }
    ]
  }
}

For SparkJob, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "SparkJob"
  typeProperties = {
    args = [
      ?
    ]
    className = ?
    conf = ?
    configurationType = "string"
    driverSize = ?
    executorSize = ?
    file = ?
    files = [
      ?
    ]
    filesV2 = [
      ?
    ]
    numExecutors = ?
    pythonCodeReference = [
      ?
    ]
    scanFolder = ?
    sparkConfig = {
      {customized property} = ?
    }
    sparkJob = {
      referenceName = ?
      type = "string"
    }
    targetBigDataPool = {
      referenceName = ?
      type = "string"
    }
    targetSparkConfiguration = {
      referenceName = ?
      type = "string"
    }
  }
}

For SqlServerStoredProcedure, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "SqlServerStoredProcedure"
  typeProperties = {
    storedProcedureName = ?
    storedProcedureParameters = ?
  }
}

For SynapseNotebook, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "SynapseNotebook"
  typeProperties = {
    conf = ?
    configurationType = "string"
    driverSize = ?
    executorSize = ?
    notebook = {
      referenceName = ?
      type = "string"
    }
    numExecutors = ?
    parameters = {
      {customized property} = {
        type = "string"
        value = ?
      }
    }
    sparkConfig = {
      {customized property} = ?
    }
    sparkPool = {
      referenceName = ?
      type = "string"
    }
    targetSparkConfiguration = {
      referenceName = ?
      type = "string"
    }
  }
}

For WebActivity, use:

{
  linkedServiceName = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  policy = {
    retry = ?
    retryIntervalInSeconds = int
    secureInput = bool
    secureOutput = bool
    timeout = ?
  }
  type = "WebActivity"
  typeProperties = {
    authentication = {
      credential = {
        referenceName = "string"
        type = "string"
      }
      password = {
        type = "string"
        // For remaining properties, see SecretBase objects
      }
      pfx = {
        type = "string"
        // For remaining properties, see SecretBase objects
      }
      resource = ?
      type = "string"
      username = ?
      userTenant = ?
    }
    body = ?
    connectVia = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
    datasets = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
    disableCertValidation = bool
    headers = {
      {customized property} = ?
    }
    httpRequestTimeout = ?
    linkedServices = [
      {
        parameters = {
          {customized property} = ?
        }
        referenceName = "string"
        type = "string"
      }
    ]
    method = "string"
    turnOffAsync = bool
    url = ?
  }
}

CompressionReadSettings objects

Set the type property to specify the type of object.

For TarGZipReadSettings, use:

{
  preserveCompressionFileNameAsFolder = ?
  type = "TarGZipReadSettings"
}

For TarReadSettings, use:

{
  preserveCompressionFileNameAsFolder = ?
  type = "TarReadSettings"
}

For ZipDeflateReadSettings, use:

{
  preserveZipFileNameAsFolder = ?
  type = "ZipDeflateReadSettings"
}

StoreWriteSettings objects

Set the type property to specify the type of object.

For AzureBlobFSWriteSettings, use:

{
  blockSizeInMB = ?
  type = "AzureBlobFSWriteSettings"
}

For AzureBlobStorageWriteSettings, use:

{
  blockSizeInMB = ?
  type = "AzureBlobStorageWriteSettings"
}

For AzureDataLakeStoreWriteSettings, use:

{
  expiryDateTime = ?
  type = "AzureDataLakeStoreWriteSettings"
}

For AzureFileStorageWriteSettings, use:

{
  type = "AzureFileStorageWriteSettings"
}

For FileServerWriteSettings, use:

{
  type = "FileServerWriteSettings"
}

For LakeHouseWriteSettings, use:

{
  type = "LakeHouseWriteSettings"
}

For SftpWriteSettings, use:

{
  operationTimeout = ?
  type = "SftpWriteSettings"
  useTempFileRename = ?
}

CopySink objects

Set the type property to specify the type of object.

For AvroSink, use:

{
  formatSettings = {
    fileNamePrefix = ?
    maxRowsPerFile = ?
    recordName = "string"
    recordNamespace = "string"
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "AvroSink"
}

For AzureBlobFSSink, use:

{
  copyBehavior = ?
  metadata = [
    {
      name = ?
      value = ?
    }
  ]
  type = "AzureBlobFSSink"
}

For AzureDataExplorerSink, use:

{
  flushImmediately = ?
  ingestionMappingAsJson = ?
  ingestionMappingName = ?
  type = "AzureDataExplorerSink"
}

For AzureDataLakeStoreSink, use:

{
  copyBehavior = ?
  enableAdlsSingleFileParallel = ?
  type = "AzureDataLakeStoreSink"
}

For AzureDatabricksDeltaLakeSink, use:

{
  importSettings = {
    dateFormat = ?
    timestampFormat = ?
    type = "string"
  }
  preCopyScript = ?
  type = "AzureDatabricksDeltaLakeSink"
}

For AzureMySqlSink, use:

{
  preCopyScript = ?
  type = "AzureMySqlSink"
}

For AzurePostgreSqlSink, use:

{
  preCopyScript = ?
  type = "AzurePostgreSqlSink"
}

For AzureQueueSink, use:

{
  type = "AzureQueueSink"
}

For AzureSearchIndexSink, use:

{
  type = "AzureSearchIndexSink"
  writeBehavior = "string"
}

For AzureSqlSink, use:

{
  preCopyScript = ?
  sqlWriterStoredProcedureName = ?
  sqlWriterTableType = ?
  sqlWriterUseTableLock = ?
  storedProcedureParameters = ?
  storedProcedureTableTypeParameterName = ?
  tableOption = ?
  type = "AzureSqlSink"
  upsertSettings = {
    interimSchemaName = ?
    keys = ?
    useTempDB = ?
  }
  writeBehavior = ?
}

For AzureTableSink, use:

{
  azureTableDefaultPartitionKeyValue = ?
  azureTableInsertType = ?
  azureTablePartitionKeyName = ?
  azureTableRowKeyName = ?
  type = "AzureTableSink"
}

For BinarySink, use:

{
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "BinarySink"
}

For BlobSink, use:

{
  blobWriterAddHeader = ?
  blobWriterDateTimeFormat = ?
  blobWriterOverwriteFiles = ?
  copyBehavior = ?
  metadata = [
    {
      name = ?
      value = ?
    }
  ]
  type = "BlobSink"
}

For CommonDataServiceForAppsSink, use:

{
  alternateKeyName = ?
  ignoreNullValues = ?
  type = "CommonDataServiceForAppsSink"
  writeBehavior = "string"
}

For CosmosDbMongoDbApiSink, use:

{
  type = "CosmosDbMongoDbApiSink"
  writeBehavior = ?
}

For CosmosDbSqlApiSink, use:

{
  type = "CosmosDbSqlApiSink"
  writeBehavior = ?
}

For DelimitedTextSink, use:

{
  formatSettings = {
    fileExtension = ?
    fileNamePrefix = ?
    maxRowsPerFile = ?
    quoteAllText = ?
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "DelimitedTextSink"
}

For DocumentDbCollectionSink, use:

{
  nestingSeparator = ?
  type = "DocumentDbCollectionSink"
  writeBehavior = ?
}

For DynamicsCrmSink, use:

{
  alternateKeyName = ?
  ignoreNullValues = ?
  type = "DynamicsCrmSink"
  writeBehavior = "string"
}

For DynamicsSink, use:

{
  alternateKeyName = ?
  ignoreNullValues = ?
  type = "DynamicsSink"
  writeBehavior = "string"
}

For FileSystemSink, use:

{
  copyBehavior = ?
  type = "FileSystemSink"
}

For IcebergSink, use:

{
  formatSettings = {
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "IcebergSink"
}

For InformixSink, use:

{
  preCopyScript = ?
  type = "InformixSink"
}

For JsonSink, use:

{
  formatSettings = {
    filePattern = ?
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "JsonSink"
}

For LakeHouseTableSink, use:

{
  partitionNameList = ?
  partitionOption = ?
  tableActionOption = ?
  type = "LakeHouseTableSink"
}

For MicrosoftAccessSink, use:

{
  preCopyScript = ?
  type = "MicrosoftAccessSink"
}

For MongoDbAtlasSink, use:

{
  type = "MongoDbAtlasSink"
  writeBehavior = ?
}

For MongoDbV2Sink, use:

{
  type = "MongoDbV2Sink"
  writeBehavior = ?
}

For OdbcSink, use:

{
  preCopyScript = ?
  type = "OdbcSink"
}

For OracleSink, use:

{
  preCopyScript = ?
  type = "OracleSink"
}

For OrcSink, use:

{
  formatSettings = {
    fileNamePrefix = ?
    maxRowsPerFile = ?
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "OrcSink"
}

For ParquetSink, use:

{
  formatSettings = {
    fileNamePrefix = ?
    maxRowsPerFile = ?
    type = "string"
  }
  storeSettings = {
    copyBehavior = ?
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    metadata = [
      {
        name = ?
        value = ?
      }
    ]
    type = "string"
    // For remaining properties, see StoreWriteSettings objects
  }
  type = "ParquetSink"
}

For RestSink, use:

{
  additionalHeaders = ?
  httpCompressionType = ?
  httpRequestTimeout = ?
  requestInterval = ?
  requestMethod = ?
  type = "RestSink"
}

For SalesforceServiceCloudSink, use:

{
  externalIdFieldName = ?
  ignoreNullValues = ?
  type = "SalesforceServiceCloudSink"
  writeBehavior = "string"
}

For SalesforceServiceCloudV2Sink, use:

{
  externalIdFieldName = ?
  ignoreNullValues = ?
  type = "SalesforceServiceCloudV2Sink"
  writeBehavior = "string"
}

For SalesforceSink, use:

{
  externalIdFieldName = ?
  ignoreNullValues = ?
  type = "SalesforceSink"
  writeBehavior = "string"
}

For SalesforceV2Sink, use:

{
  externalIdFieldName = ?
  ignoreNullValues = ?
  type = "SalesforceV2Sink"
  writeBehavior = "string"
}

For SapCloudForCustomerSink, use:

{
  httpRequestTimeout = ?
  type = "SapCloudForCustomerSink"
  writeBehavior = "string"
}

For SnowflakeSink, use:

{
  importSettings = {
    additionalCopyOptions = {
      {customized property} = ?
    }
    additionalFormatOptions = {
      {customized property} = ?
    }
    storageIntegration = ?
    type = "string"
  }
  preCopyScript = ?
  type = "SnowflakeSink"
}

For SnowflakeV2Sink, use:

{
  importSettings = {
    additionalCopyOptions = {
      {customized property} = ?
    }
    additionalFormatOptions = {
      {customized property} = ?
    }
    storageIntegration = ?
    type = "string"
  }
  preCopyScript = ?
  type = "SnowflakeV2Sink"
}

For SqlDWSink, use:

{
  allowCopyCommand = ?
  allowPolyBase = ?
  copyCommandSettings = {
    additionalOptions = {
      {customized property} = "string"
    }
    defaultValues = [
      {
        columnName = ?
        defaultValue = ?
      }
    ]
  }
  polyBaseSettings = {
    rejectSampleValue = ?
    rejectType = "string"
    rejectValue = ?
    useTypeDefault = ?
  }
  preCopyScript = ?
  sqlWriterUseTableLock = ?
  tableOption = ?
  type = "SqlDWSink"
  upsertSettings = {
    interimSchemaName = ?
    keys = ?
  }
  writeBehavior = ?
}

For SqlMISink, use:

{
  preCopyScript = ?
  sqlWriterStoredProcedureName = ?
  sqlWriterTableType = ?
  sqlWriterUseTableLock = ?
  storedProcedureParameters = ?
  storedProcedureTableTypeParameterName = ?
  tableOption = ?
  type = "SqlMISink"
  upsertSettings = {
    interimSchemaName = ?
    keys = ?
    useTempDB = ?
  }
  writeBehavior = ?
}

For SqlServerSink, use:

{
  preCopyScript = ?
  sqlWriterStoredProcedureName = ?
  sqlWriterTableType = ?
  sqlWriterUseTableLock = ?
  storedProcedureParameters = ?
  storedProcedureTableTypeParameterName = ?
  tableOption = ?
  type = "SqlServerSink"
  upsertSettings = {
    interimSchemaName = ?
    keys = ?
    useTempDB = ?
  }
  writeBehavior = ?
}

For SqlSink, use:

{
  preCopyScript = ?
  sqlWriterStoredProcedureName = ?
  sqlWriterTableType = ?
  sqlWriterUseTableLock = ?
  storedProcedureParameters = ?
  storedProcedureTableTypeParameterName = ?
  tableOption = ?
  type = "SqlSink"
  upsertSettings = {
    interimSchemaName = ?
    keys = ?
    useTempDB = ?
  }
  writeBehavior = ?
}

For WarehouseSink, use:

{
  allowCopyCommand = ?
  copyCommandSettings = {
    additionalOptions = {
      {customized property} = "string"
    }
    defaultValues = [
      {
        columnName = ?
        defaultValue = ?
      }
    ]
  }
  preCopyScript = ?
  tableOption = ?
  type = "WarehouseSink"
  writeBehavior = ?
}

FormatReadSettings objects

Set the type property to specify the type of object.

For BinaryReadSettings, use:

{
  compressionProperties = {
    type = "string"
    // For remaining properties, see CompressionReadSettings objects
  }
  type = "BinaryReadSettings"
}

For DelimitedTextReadSettings, use:

{
  compressionProperties = {
    type = "string"
    // For remaining properties, see CompressionReadSettings objects
  }
  skipLineCount = ?
  type = "DelimitedTextReadSettings"
}

For JsonReadSettings, use:

{
  compressionProperties = {
    type = "string"
    // For remaining properties, see CompressionReadSettings objects
  }
  type = "JsonReadSettings"
}

For ParquetReadSettings, use:

{
  compressionProperties = {
    type = "string"
    // For remaining properties, see CompressionReadSettings objects
  }
  type = "ParquetReadSettings"
}

For XmlReadSettings, use:

{
  compressionProperties = {
    type = "string"
    // For remaining properties, see CompressionReadSettings objects
  }
  detectDataType = ?
  namespacePrefixes = ?
  namespaces = ?
  type = "XmlReadSettings"
  validationMode = ?
}

CopySource objects

Set the type property to specify the type of object.

For AmazonMWSSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "AmazonMWSSource"
}

For AmazonRdsForOracleSource, use:

{
  additionalColumns = ?
  oracleReaderQuery = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionNames = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  type = "AmazonRdsForOracleSource"
}

For AmazonRdsForSqlServerSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  produceAdditionalTypes = ?
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "AmazonRdsForSqlServerSource"
}

For AmazonRedshiftSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  redshiftUnloadSettings = {
    bucketName = ?
    s3LinkedServiceName = {
      parameters = {
        {customized property} = ?
      }
      referenceName = "string"
      type = "string"
    }
  }
  type = "AmazonRedshiftSource"
}

For AvroSource, use:

{
  additionalColumns = ?
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "AvroSource"
}

For AzureBlobFSSource, use:

{
  recursive = ?
  skipHeaderLineCount = ?
  treatEmptyAsNull = ?
  type = "AzureBlobFSSource"
}

For AzureDataExplorerSource, use:

{
  additionalColumns = ?
  noTruncation = ?
  query = ?
  queryTimeout = ?
  type = "AzureDataExplorerSource"
}

For AzureDataLakeStoreSource, use:

{
  recursive = ?
  type = "AzureDataLakeStoreSource"
}

For AzureDatabricksDeltaLakeSource, use:

{
  exportSettings = {
    dateFormat = ?
    timestampFormat = ?
    type = "string"
  }
  query = ?
  type = "AzureDatabricksDeltaLakeSource"
}

For AzureMariaDBSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "AzureMariaDBSource"
}

For AzureMySqlSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "AzureMySqlSource"
}

For AzurePostgreSqlSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "AzurePostgreSqlSource"
}

For AzureSqlSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  produceAdditionalTypes = ?
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "AzureSqlSource"
}

For AzureTableSource, use:

{
  additionalColumns = ?
  azureTableSourceIgnoreTableNotFound = ?
  azureTableSourceQuery = ?
  queryTimeout = ?
  type = "AzureTableSource"
}

For BinarySource, use:

{
  formatSettings = {
    compressionProperties = {
      type = "string"
      // For remaining properties, see CompressionReadSettings objects
    }
    type = "string"
  }
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "BinarySource"
}

For BlobSource, use:

{
  recursive = ?
  skipHeaderLineCount = ?
  treatEmptyAsNull = ?
  type = "BlobSource"
}

For CassandraSource, use:

{
  additionalColumns = ?
  consistencyLevel = "string"
  query = ?
  queryTimeout = ?
  type = "CassandraSource"
}

For CommonDataServiceForAppsSource, use:

{
  additionalColumns = ?
  query = ?
  type = "CommonDataServiceForAppsSource"
}

For ConcurSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ConcurSource"
}

For CosmosDbMongoDbApiSource, use:

{
  additionalColumns = ?
  batchSize = ?
  cursorMethods = {
    limit = ?
    project = ?
    skip = ?
    sort = ?
  }
  filter = ?
  queryTimeout = ?
  type = "CosmosDbMongoDbApiSource"
}

For CosmosDbSqlApiSource, use:

{
  additionalColumns = ?
  detectDatetime = ?
  pageSize = ?
  preferredRegions = ?
  query = ?
  type = "CosmosDbSqlApiSource"
}

For CouchbaseSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "CouchbaseSource"
}

For Db2Source, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "Db2Source"
}

For DelimitedTextSource, use:

{
  additionalColumns = ?
  formatSettings = {
    compressionProperties = {
      type = "string"
      // For remaining properties, see CompressionReadSettings objects
    }
    skipLineCount = ?
    type = "string"
  }
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "DelimitedTextSource"
}

For DocumentDbCollectionSource, use:

{
  additionalColumns = ?
  nestingSeparator = ?
  query = ?
  queryTimeout = ?
  type = "DocumentDbCollectionSource"
}

For DrillSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "DrillSource"
}

For DynamicsAXSource, use:

{
  additionalColumns = ?
  httpRequestTimeout = ?
  query = ?
  queryTimeout = ?
  type = "DynamicsAXSource"
}

For DynamicsCrmSource, use:

{
  additionalColumns = ?
  query = ?
  type = "DynamicsCrmSource"
}

For DynamicsSource, use:

{
  additionalColumns = ?
  query = ?
  type = "DynamicsSource"
}

For EloquaSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "EloquaSource"
}

For ExcelSource, use:

{
  additionalColumns = ?
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "ExcelSource"
}

For FileSystemSource, use:

{
  additionalColumns = ?
  recursive = ?
  type = "FileSystemSource"
}

For GoogleAdWordsSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "GoogleAdWordsSource"
}

For GoogleBigQuerySource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "GoogleBigQuerySource"
}

For GoogleBigQueryV2Source, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "GoogleBigQueryV2Source"
}

For GreenplumSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "GreenplumSource"
}

For HBaseSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "HBaseSource"
}

For HdfsSource, use:

{
  distcpSettings = {
    distcpOptions = ?
    resourceManagerEndpoint = ?
    tempScriptPath = ?
  }
  recursive = ?
  type = "HdfsSource"
}

For HiveSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "HiveSource"
}

For HttpSource, use:

{
  httpRequestTimeout = ?
  type = "HttpSource"
}

For HubspotSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "HubspotSource"
}

For ImpalaSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ImpalaSource"
}

For InformixSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "InformixSource"
}

For JiraSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "JiraSource"
}

For JsonSource, use:

{
  additionalColumns = ?
  formatSettings = {
    compressionProperties = {
      type = "string"
      // For remaining properties, see CompressionReadSettings objects
    }
    type = "string"
  }
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "JsonSource"
}

For LakeHouseTableSource, use:

{
  additionalColumns = ?
  timestampAsOf = ?
  type = "LakeHouseTableSource"
  versionAsOf = ?
}

For MagentoSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "MagentoSource"
}

For MariaDBSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "MariaDBSource"
}

For MarketoSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "MarketoSource"
}

For MicrosoftAccessSource, use:

{
  additionalColumns = ?
  query = ?
  type = "MicrosoftAccessSource"
}

For MongoDbAtlasSource, use:

{
  additionalColumns = ?
  batchSize = ?
  cursorMethods = {
    limit = ?
    project = ?
    skip = ?
    sort = ?
  }
  filter = ?
  queryTimeout = ?
  type = "MongoDbAtlasSource"
}

For MongoDbSource, use:

{
  additionalColumns = ?
  query = ?
  type = "MongoDbSource"
}

For MongoDbV2Source, use:

{
  additionalColumns = ?
  batchSize = ?
  cursorMethods = {
    limit = ?
    project = ?
    skip = ?
    sort = ?
  }
  filter = ?
  queryTimeout = ?
  type = "MongoDbV2Source"
}

For MySqlSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "MySqlSource"
}

For NetezzaSource, use:

{
  additionalColumns = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  query = ?
  queryTimeout = ?
  type = "NetezzaSource"
}

For ODataSource, use:

{
  additionalColumns = ?
  httpRequestTimeout = ?
  query = ?
  type = "ODataSource"
}

For OdbcSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "OdbcSource"
}

For Office365Source, use:

{
  allowedGroups = ?
  dateFilterColumn = ?
  endTime = ?
  outputColumns = ?
  startTime = ?
  type = "Office365Source"
  userScopeFilterUri = ?
}

For OracleServiceCloudSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "OracleServiceCloudSource"
}

For OracleSource, use:

{
  additionalColumns = ?
  oracleReaderQuery = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionNames = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  type = "OracleSource"
}

For OrcSource, use:

{
  additionalColumns = ?
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "OrcSource"
}

For ParquetSource, use:

{
  additionalColumns = ?
  formatSettings = {
    compressionProperties = {
      type = "string"
      // For remaining properties, see CompressionReadSettings objects
    }
    type = "string"
  }
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "ParquetSource"
}

For PaypalSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "PaypalSource"
}

For PhoenixSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "PhoenixSource"
}

For PostgreSqlSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "PostgreSqlSource"
}

For PostgreSqlV2Source, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "PostgreSqlV2Source"
}

For PrestoSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "PrestoSource"
}

For QuickBooksSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "QuickBooksSource"
}

For RelationalSource, use:

{
  additionalColumns = ?
  query = ?
  type = "RelationalSource"
}

For ResponsysSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ResponsysSource"
}

For RestSource, use:

{
  additionalColumns = ?
  additionalHeaders = ?
  httpRequestTimeout = ?
  paginationRules = ?
  requestBody = ?
  requestInterval = ?
  requestMethod = ?
  type = "RestSource"
}

For SalesforceMarketingCloudSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "SalesforceMarketingCloudSource"
}

For SalesforceServiceCloudSource, use:

{
  additionalColumns = ?
  query = ?
  readBehavior = ?
  type = "SalesforceServiceCloudSource"
}

For SalesforceServiceCloudV2Source, use:

{
  additionalColumns = ?
  includeDeletedObjects = ?
  query = ?
  SOQLQuery = ?
  type = "SalesforceServiceCloudV2Source"
}

For SalesforceSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  readBehavior = ?
  type = "SalesforceSource"
}

For SalesforceV2Source, use:

{
  additionalColumns = ?
  includeDeletedObjects = ?
  pageSize = ?
  query = ?
  queryTimeout = ?
  SOQLQuery = ?
  type = "SalesforceV2Source"
}

For SapBwSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "SapBwSource"
}

For SapCloudForCustomerSource, use:

{
  additionalColumns = ?
  httpRequestTimeout = ?
  query = ?
  queryTimeout = ?
  type = "SapCloudForCustomerSource"
}

For SapEccSource, use:

{
  additionalColumns = ?
  httpRequestTimeout = ?
  query = ?
  queryTimeout = ?
  type = "SapEccSource"
}

For SapHanaSource, use:

{
  additionalColumns = ?
  packetSize = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
  }
  query = ?
  queryTimeout = ?
  type = "SapHanaSource"
}

For SapOdpSource, use:

{
  additionalColumns = ?
  extractionMode = ?
  projection = ?
  queryTimeout = ?
  selection = ?
  subscriberProcess = ?
  type = "SapOdpSource"
}

For SapOpenHubSource, use:

{
  additionalColumns = ?
  baseRequestId = ?
  customRfcReadTableFunctionModule = ?
  excludeLastRequest = ?
  queryTimeout = ?
  sapDataColumnDelimiter = ?
  type = "SapOpenHubSource"
}

For SapTableSource, use:

{
  additionalColumns = ?
  batchSize = ?
  customRfcReadTableFunctionModule = ?
  partitionOption = ?
  partitionSettings = {
    maxPartitionsNumber = ?
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  rfcTableFields = ?
  rfcTableOptions = ?
  rowCount = ?
  rowSkips = ?
  sapDataColumnDelimiter = ?
  type = "SapTableSource"
}

For ServiceNowSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ServiceNowSource"
}

For ServiceNowV2Source, use:

{
  additionalColumns = ?
  expression = {
    operands = [
      ...
    ]
    operators = [
      "string"
    ]
    type = "string"
    value = "string"
  }
  pageSize = ?
  queryTimeout = ?
  type = "ServiceNowV2Source"
}

For SharePointOnlineListSource, use:

{
  httpRequestTimeout = ?
  query = ?
  type = "SharePointOnlineListSource"
}

For ShopifySource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ShopifySource"
}

For SnowflakeSource, use:

{
  exportSettings = {
    additionalCopyOptions = {
      {customized property} = ?
    }
    additionalFormatOptions = {
      {customized property} = ?
    }
    storageIntegration = ?
    type = "string"
  }
  query = ?
  type = "SnowflakeSource"
}

For SnowflakeV2Source, use:

{
  exportSettings = {
    additionalCopyOptions = {
      {customized property} = ?
    }
    additionalFormatOptions = {
      {customized property} = ?
    }
    storageIntegration = ?
    type = "string"
  }
  query = ?
  type = "SnowflakeV2Source"
}

For SparkSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "SparkSource"
}

For SqlDWSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "SqlDWSource"
}

For SqlMISource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  produceAdditionalTypes = ?
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "SqlMISource"
}

For SqlServerSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  produceAdditionalTypes = ?
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "SqlServerSource"
}

For SqlSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "SqlSource"
}

For SquareSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "SquareSource"
}

For SybaseSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "SybaseSource"
}

For TeradataSource, use:

{
  additionalColumns = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  query = ?
  queryTimeout = ?
  type = "TeradataSource"
}

For VerticaSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "VerticaSource"
}

For WarehouseSource, use:

{
  additionalColumns = ?
  isolationLevel = ?
  partitionOption = ?
  partitionSettings = {
    partitionColumnName = ?
    partitionLowerBound = ?
    partitionUpperBound = ?
  }
  queryTimeout = ?
  sqlReaderQuery = ?
  sqlReaderStoredProcedureName = ?
  storedProcedureParameters = ?
  type = "WarehouseSource"
}

For XeroSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "XeroSource"
}

For ZohoSource, use:

{
  additionalColumns = ?
  query = ?
  queryTimeout = ?
  type = "ZohoSource"
}

For WebSource, use:

{
  additionalColumns = ?
  type = "WebSource"
}

For XmlSource, use:

{
  additionalColumns = ?
  formatSettings = {
    compressionProperties = {
      type = "string"
      // For remaining properties, see CompressionReadSettings objects
    }
    detectDataType = ?
    namespacePrefixes = ?
    namespaces = ?
    type = "string"
    validationMode = ?
  }
  storeSettings = {
    disableMetricsCollection = ?
    maxConcurrentConnections = ?
    type = "string"
    // For remaining properties, see StoreReadSettings objects
  }
  type = "XmlSource"
}

SecretBase objects

Set the type property to specify the type of object.

For AzureKeyVaultSecret, use:

{
  secretName = ?
  secretVersion = ?
  store = {
    parameters = {
      {customized property} = ?
    }
    referenceName = "string"
    type = "string"
  }
  type = "AzureKeyVaultSecret"
}

For SecureString, use:

{
  type = "SecureString"
  value = "string"
}