Note
Access to this page requires authorization. You can try signing in or changing directories.
Access to this page requires authorization. You can try changing directories.
Bicep resource definition
The workspaces/bigDataPools resource type can be deployed with operations that target:
- Resource groups - See resource group deployment commands
For a list of changed properties in each API version, see change log.
Resource format
To create a Microsoft.Synapse/workspaces/bigDataPools resource, add the following Bicep to your template.
resource symbolicname 'Microsoft.Synapse/workspaces/bigDataPools@2021-06-01' = {
parent: resourceSymbolicName
location: 'string'
name: 'string'
properties: {
autoPause: {
delayInMinutes: int
enabled: bool
}
autoScale: {
enabled: bool
maxNodeCount: int
minNodeCount: int
}
cacheSize: int
customLibraries: [
{
containerName: 'string'
name: 'string'
path: 'string'
type: 'string'
}
]
defaultSparkLogFolder: 'string'
dynamicExecutorAllocation: {
enabled: bool
maxExecutors: int
minExecutors: int
}
isAutotuneEnabled: bool
isComputeIsolationEnabled: bool
libraryRequirements: {
content: 'string'
filename: 'string'
}
nodeCount: int
nodeSize: 'string'
nodeSizeFamily: 'string'
provisioningState: 'string'
sessionLevelPackagesEnabled: bool
sparkConfigProperties: {
configurationType: 'string'
content: 'string'
filename: 'string'
}
sparkEventsFolder: 'string'
sparkVersion: 'string'
}
tags: {
{customized property}: 'string'
}
}
Property Values
Microsoft.Synapse/workspaces/bigDataPools
| Name | Description | Value |
|---|---|---|
| location | The geo-location where the resource lives | string (required) |
| name | The resource name | string (required) |
| parent | In Bicep, you can specify the parent resource for a child resource. You only need to add this property when the child resource is declared outside of the parent resource. For more information, see Child resource outside parent resource. |
Symbolic name for resource of type: workspaces |
| properties | Big Data pool properties | BigDataPoolResourceProperties |
| tags | Resource tags | Dictionary of tag names and values. See Tags in templates |
AutoPauseProperties
| Name | Description | Value |
|---|---|---|
| delayInMinutes | Number of minutes of idle time before the Big Data pool is automatically paused. | int |
| enabled | Whether auto-pausing is enabled for the Big Data pool. | bool |
AutoScaleProperties
| Name | Description | Value |
|---|---|---|
| enabled | Whether automatic scaling is enabled for the Big Data pool. | bool |
| maxNodeCount | The maximum number of nodes the Big Data pool can support. | int |
| minNodeCount | The minimum number of nodes the Big Data pool can support. | int |
BigDataPoolResourceProperties
| Name | Description | Value |
|---|---|---|
| autoPause | Auto-pausing properties | AutoPauseProperties |
| autoScale | Auto-scaling properties | AutoScaleProperties |
| cacheSize | The cache size | int |
| customLibraries | List of custom libraries/packages associated with the spark pool. | LibraryInfo[] |
| defaultSparkLogFolder | The default folder where Spark logs will be written. | string |
| dynamicExecutorAllocation | Dynamic Executor Allocation | DynamicExecutorAllocation |
| isAutotuneEnabled | Whether autotune is required or not. | bool |
| isComputeIsolationEnabled | Whether compute isolation is required or not. | bool |
| libraryRequirements | Library version requirements | LibraryRequirements |
| nodeCount | The number of nodes in the Big Data pool. | int |
| nodeSize | The level of compute power that each node in the Big Data pool has. | 'Large' 'Medium' 'None' 'Small' 'XLarge' 'XXLarge' 'XXXLarge' |
| nodeSizeFamily | The kind of nodes that the Big Data pool provides. | 'HardwareAcceleratedFPGA' 'HardwareAcceleratedGPU' 'MemoryOptimized' 'None' |
| provisioningState | The state of the Big Data pool. | string |
| sessionLevelPackagesEnabled | Whether session level packages enabled. | bool |
| sparkConfigProperties | Spark configuration file to specify additional properties | SparkConfigProperties |
| sparkEventsFolder | The Spark events folder | string |
| sparkVersion | The Apache Spark version. | string |
DynamicExecutorAllocation
| Name | Description | Value |
|---|---|---|
| enabled | Indicates whether Dynamic Executor Allocation is enabled or not. | bool |
| maxExecutors | The maximum number of executors alloted | int |
| minExecutors | The minimum number of executors alloted | int |
LibraryInfo
| Name | Description | Value |
|---|---|---|
| containerName | Storage blob container name. | string |
| name | Name of the library. | string |
| path | Storage blob path of library. | string |
| type | Type of the library. | string |
LibraryRequirements
| Name | Description | Value |
|---|---|---|
| content | The library requirements. | string |
| filename | The filename of the library requirements file. | string |
SparkConfigProperties
| Name | Description | Value |
|---|---|---|
| configurationType | The type of the spark config properties file. | 'Artifact' 'File' |
| content | The spark config properties. | string |
| filename | The filename of the spark config properties file. | string |
TrackedResourceTags
| Name | Description | Value |
|---|
Usage Examples
Bicep Samples
A basic example of deploying Synapse Spark Pool.
param resourceName string = 'acctest0001'
param location string = 'westeurope'
@description('The SQL administrator login for the Synapse workspace')
param sqlAdministratorLogin string
@secure()
@description('The SQL administrator login password for the Synapse workspace')
param sqlAdministratorLoginPassword string
resource blobService 'Microsoft.Storage/storageAccounts/blobServices@2022-09-01' existing = {
parent: storageAccount
name: 'default'
}
resource storageAccount 'Microsoft.Storage/storageAccounts@2021-09-01' = {
name: resourceName
location: location
kind: 'StorageV2'
properties: {}
sku: {
name: 'Standard_LRS'
}
}
resource workspace 'Microsoft.Synapse/workspaces@2021-06-01' = {
name: resourceName
location: location
properties: {
defaultDataLakeStorage: {
accountUrl: storageAccount.properties.primaryEndpoints.dfs
filesystem: container.name
}
managedVirtualNetwork: ''
publicNetworkAccess: 'Enabled'
sqlAdministratorLogin: sqlAdministratorLogin
sqlAdministratorLoginPassword: sqlAdministratorLoginPassword
}
}
resource bigDataPool 'Microsoft.Synapse/workspaces/bigDataPools@2021-06-01-preview' = {
parent: workspace
name: resourceName
location: location
properties: {
autoPause: {
enabled: false
}
autoScale: {
enabled: false
}
cacheSize: 0
defaultSparkLogFolder: '/logs'
dynamicExecutorAllocation: {
enabled: false
maxExecutors: 0
minExecutors: 0
}
isComputeIsolationEnabled: false
nodeCount: 3
nodeSize: 'Small'
nodeSizeFamily: 'MemoryOptimized'
sessionLevelPackagesEnabled: false
sparkEventsFolder: '/events'
sparkVersion: '2.4'
}
}
resource container 'Microsoft.Storage/storageAccounts/blobServices/containers@2022-09-01' = {
parent: blobService
name: resourceName
properties: {
metadata: {
key: 'value'
}
}
}
ARM template resource definition
The workspaces/bigDataPools resource type can be deployed with operations that target:
- Resource groups - See resource group deployment commands
For a list of changed properties in each API version, see change log.
Resource format
To create a Microsoft.Synapse/workspaces/bigDataPools resource, add the following JSON to your template.
{
"type": "Microsoft.Synapse/workspaces/bigDataPools",
"apiVersion": "2021-06-01",
"name": "string",
"location": "string",
"properties": {
"autoPause": {
"delayInMinutes": "int",
"enabled": "bool"
},
"autoScale": {
"enabled": "bool",
"maxNodeCount": "int",
"minNodeCount": "int"
},
"cacheSize": "int",
"customLibraries": [
{
"containerName": "string",
"name": "string",
"path": "string",
"type": "string"
}
],
"defaultSparkLogFolder": "string",
"dynamicExecutorAllocation": {
"enabled": "bool",
"maxExecutors": "int",
"minExecutors": "int"
},
"isAutotuneEnabled": "bool",
"isComputeIsolationEnabled": "bool",
"libraryRequirements": {
"content": "string",
"filename": "string"
},
"nodeCount": "int",
"nodeSize": "string",
"nodeSizeFamily": "string",
"provisioningState": "string",
"sessionLevelPackagesEnabled": "bool",
"sparkConfigProperties": {
"configurationType": "string",
"content": "string",
"filename": "string"
},
"sparkEventsFolder": "string",
"sparkVersion": "string"
},
"tags": {
"{customized property}": "string"
}
}
Property Values
Microsoft.Synapse/workspaces/bigDataPools
| Name | Description | Value |
|---|---|---|
| apiVersion | The api version | '2021-06-01' |
| location | The geo-location where the resource lives | string (required) |
| name | The resource name | string (required) |
| properties | Big Data pool properties | BigDataPoolResourceProperties |
| tags | Resource tags | Dictionary of tag names and values. See Tags in templates |
| type | The resource type | 'Microsoft.Synapse/workspaces/bigDataPools' |
AutoPauseProperties
| Name | Description | Value |
|---|---|---|
| delayInMinutes | Number of minutes of idle time before the Big Data pool is automatically paused. | int |
| enabled | Whether auto-pausing is enabled for the Big Data pool. | bool |
AutoScaleProperties
| Name | Description | Value |
|---|---|---|
| enabled | Whether automatic scaling is enabled for the Big Data pool. | bool |
| maxNodeCount | The maximum number of nodes the Big Data pool can support. | int |
| minNodeCount | The minimum number of nodes the Big Data pool can support. | int |
BigDataPoolResourceProperties
| Name | Description | Value |
|---|---|---|
| autoPause | Auto-pausing properties | AutoPauseProperties |
| autoScale | Auto-scaling properties | AutoScaleProperties |
| cacheSize | The cache size | int |
| customLibraries | List of custom libraries/packages associated with the spark pool. | LibraryInfo[] |
| defaultSparkLogFolder | The default folder where Spark logs will be written. | string |
| dynamicExecutorAllocation | Dynamic Executor Allocation | DynamicExecutorAllocation |
| isAutotuneEnabled | Whether autotune is required or not. | bool |
| isComputeIsolationEnabled | Whether compute isolation is required or not. | bool |
| libraryRequirements | Library version requirements | LibraryRequirements |
| nodeCount | The number of nodes in the Big Data pool. | int |
| nodeSize | The level of compute power that each node in the Big Data pool has. | 'Large' 'Medium' 'None' 'Small' 'XLarge' 'XXLarge' 'XXXLarge' |
| nodeSizeFamily | The kind of nodes that the Big Data pool provides. | 'HardwareAcceleratedFPGA' 'HardwareAcceleratedGPU' 'MemoryOptimized' 'None' |
| provisioningState | The state of the Big Data pool. | string |
| sessionLevelPackagesEnabled | Whether session level packages enabled. | bool |
| sparkConfigProperties | Spark configuration file to specify additional properties | SparkConfigProperties |
| sparkEventsFolder | The Spark events folder | string |
| sparkVersion | The Apache Spark version. | string |
DynamicExecutorAllocation
| Name | Description | Value |
|---|---|---|
| enabled | Indicates whether Dynamic Executor Allocation is enabled or not. | bool |
| maxExecutors | The maximum number of executors alloted | int |
| minExecutors | The minimum number of executors alloted | int |
LibraryInfo
| Name | Description | Value |
|---|---|---|
| containerName | Storage blob container name. | string |
| name | Name of the library. | string |
| path | Storage blob path of library. | string |
| type | Type of the library. | string |
LibraryRequirements
| Name | Description | Value |
|---|---|---|
| content | The library requirements. | string |
| filename | The filename of the library requirements file. | string |
SparkConfigProperties
| Name | Description | Value |
|---|---|---|
| configurationType | The type of the spark config properties file. | 'Artifact' 'File' |
| content | The spark config properties. | string |
| filename | The filename of the spark config properties file. | string |
TrackedResourceTags
| Name | Description | Value |
|---|
Usage Examples
Azure Quickstart Templates
The following Azure Quickstart templates deploy this resource type.
| Template | Description |
|---|---|
| Azure Synapse Proof-of-Concept |
This template creates a proof of concept environment for Azure Synapse, including SQL Pools and optional Apache Spark Pools |
Terraform (AzAPI provider) resource definition
The workspaces/bigDataPools resource type can be deployed with operations that target:
- Resource groups
For a list of changed properties in each API version, see change log.
Resource format
To create a Microsoft.Synapse/workspaces/bigDataPools resource, add the following Terraform to your template.
resource "azapi_resource" "symbolicname" {
type = "Microsoft.Synapse/workspaces/bigDataPools@2021-06-01"
name = "string"
parent_id = "string"
location = "string"
tags = {
{customized property} = "string"
}
body = {
properties = {
autoPause = {
delayInMinutes = int
enabled = bool
}
autoScale = {
enabled = bool
maxNodeCount = int
minNodeCount = int
}
cacheSize = int
customLibraries = [
{
containerName = "string"
name = "string"
path = "string"
type = "string"
}
]
defaultSparkLogFolder = "string"
dynamicExecutorAllocation = {
enabled = bool
maxExecutors = int
minExecutors = int
}
isAutotuneEnabled = bool
isComputeIsolationEnabled = bool
libraryRequirements = {
content = "string"
filename = "string"
}
nodeCount = int
nodeSize = "string"
nodeSizeFamily = "string"
provisioningState = "string"
sessionLevelPackagesEnabled = bool
sparkConfigProperties = {
configurationType = "string"
content = "string"
filename = "string"
}
sparkEventsFolder = "string"
sparkVersion = "string"
}
}
}
Property Values
Microsoft.Synapse/workspaces/bigDataPools
| Name | Description | Value |
|---|---|---|
| location | The geo-location where the resource lives | string (required) |
| name | The resource name | string (required) |
| parent_id | The ID of the resource that is the parent for this resource. | ID for resource of type: workspaces |
| properties | Big Data pool properties | BigDataPoolResourceProperties |
| tags | Resource tags | Dictionary of tag names and values. |
| type | The resource type | "Microsoft.Synapse/workspaces/bigDataPools@2021-06-01" |
AutoPauseProperties
| Name | Description | Value |
|---|---|---|
| delayInMinutes | Number of minutes of idle time before the Big Data pool is automatically paused. | int |
| enabled | Whether auto-pausing is enabled for the Big Data pool. | bool |
AutoScaleProperties
| Name | Description | Value |
|---|---|---|
| enabled | Whether automatic scaling is enabled for the Big Data pool. | bool |
| maxNodeCount | The maximum number of nodes the Big Data pool can support. | int |
| minNodeCount | The minimum number of nodes the Big Data pool can support. | int |
BigDataPoolResourceProperties
| Name | Description | Value |
|---|---|---|
| autoPause | Auto-pausing properties | AutoPauseProperties |
| autoScale | Auto-scaling properties | AutoScaleProperties |
| cacheSize | The cache size | int |
| customLibraries | List of custom libraries/packages associated with the spark pool. | LibraryInfo[] |
| defaultSparkLogFolder | The default folder where Spark logs will be written. | string |
| dynamicExecutorAllocation | Dynamic Executor Allocation | DynamicExecutorAllocation |
| isAutotuneEnabled | Whether autotune is required or not. | bool |
| isComputeIsolationEnabled | Whether compute isolation is required or not. | bool |
| libraryRequirements | Library version requirements | LibraryRequirements |
| nodeCount | The number of nodes in the Big Data pool. | int |
| nodeSize | The level of compute power that each node in the Big Data pool has. | 'Large' 'Medium' 'None' 'Small' 'XLarge' 'XXLarge' 'XXXLarge' |
| nodeSizeFamily | The kind of nodes that the Big Data pool provides. | 'HardwareAcceleratedFPGA' 'HardwareAcceleratedGPU' 'MemoryOptimized' 'None' |
| provisioningState | The state of the Big Data pool. | string |
| sessionLevelPackagesEnabled | Whether session level packages enabled. | bool |
| sparkConfigProperties | Spark configuration file to specify additional properties | SparkConfigProperties |
| sparkEventsFolder | The Spark events folder | string |
| sparkVersion | The Apache Spark version. | string |
DynamicExecutorAllocation
| Name | Description | Value |
|---|---|---|
| enabled | Indicates whether Dynamic Executor Allocation is enabled or not. | bool |
| maxExecutors | The maximum number of executors alloted | int |
| minExecutors | The minimum number of executors alloted | int |
LibraryInfo
| Name | Description | Value |
|---|---|---|
| containerName | Storage blob container name. | string |
| name | Name of the library. | string |
| path | Storage blob path of library. | string |
| type | Type of the library. | string |
LibraryRequirements
| Name | Description | Value |
|---|---|---|
| content | The library requirements. | string |
| filename | The filename of the library requirements file. | string |
SparkConfigProperties
| Name | Description | Value |
|---|---|---|
| configurationType | The type of the spark config properties file. | 'Artifact' 'File' |
| content | The spark config properties. | string |
| filename | The filename of the spark config properties file. | string |
TrackedResourceTags
| Name | Description | Value |
|---|
Usage Examples
Terraform Samples
A basic example of deploying Synapse Spark Pool.
terraform {
required_providers {
azapi = {
source = "Azure/azapi"
}
}
}
provider "azapi" {
skip_provider_registration = false
}
variable "resource_name" {
type = string
default = "acctest0001"
}
variable "location" {
type = string
default = "westeurope"
}
variable "sql_administrator_login" {
type = string
description = "The SQL administrator login for the Synapse workspace"
}
variable "sql_administrator_login_password" {
type = string
description = "The SQL administrator login password for the Synapse workspace"
sensitive = true
}
resource "azapi_resource" "resourceGroup" {
type = "Microsoft.Resources/resourceGroups@2020-06-01"
name = var.resource_name
location = var.location
}
resource "azapi_resource" "storageAccount" {
type = "Microsoft.Storage/storageAccounts@2021-09-01"
parent_id = azapi_resource.resourceGroup.id
name = var.resource_name
location = var.location
body = {
kind = "StorageV2"
properties = {
}
sku = {
name = "Standard_LRS"
}
}
schema_validation_enabled = false
response_export_values = ["*"]
}
data "azapi_resource" "blobService" {
type = "Microsoft.Storage/storageAccounts/blobServices@2022-09-01"
parent_id = azapi_resource.storageAccount.id
name = "default"
}
resource "azapi_resource" "container" {
type = "Microsoft.Storage/storageAccounts/blobServices/containers@2022-09-01"
name = var.resource_name
parent_id = data.azapi_resource.blobService.id
body = {
properties = {
metadata = {
key = "value"
}
}
}
response_export_values = ["*"]
}
resource "azapi_resource" "workspace" {
type = "Microsoft.Synapse/workspaces@2021-06-01"
parent_id = azapi_resource.resourceGroup.id
name = var.resource_name
location = var.location
identity {
type = "SystemAssigned"
identity_ids = []
}
body = {
properties = {
defaultDataLakeStorage = {
accountUrl = azapi_resource.storageAccount.output.properties.primaryEndpoints.dfs
filesystem = azapi_resource.container.name
}
managedVirtualNetwork = ""
publicNetworkAccess = "Enabled"
sqlAdministratorLogin = var.sql_administrator_login
sqlAdministratorLoginPassword = var.sql_administrator_login_password
}
}
schema_validation_enabled = false
response_export_values = ["*"]
}
resource "azapi_resource" "bigDataPool" {
type = "Microsoft.Synapse/workspaces/bigDataPools@2021-06-01-preview"
parent_id = azapi_resource.workspace.id
name = var.resource_name
location = var.location
body = {
properties = {
autoPause = {
enabled = false
}
autoScale = {
enabled = false
}
cacheSize = 0
defaultSparkLogFolder = "/logs"
dynamicExecutorAllocation = {
enabled = false
maxExecutors = 0
minExecutors = 0
}
isComputeIsolationEnabled = false
nodeCount = 3
nodeSize = "Small"
nodeSizeFamily = "MemoryOptimized"
sessionLevelPackagesEnabled = false
sparkEventsFolder = "/events"
sparkVersion = "2.4"
}
}
schema_validation_enabled = false
response_export_values = ["*"]
}