1. Packages
  2. Azure Classic
  3. API Docs
  4. synapse
  5. SparkPool

We recommend using Azure Native.

Azure v6.22.0 published on Tuesday, Apr 1, 2025 by Pulumi

azure.synapse.SparkPool

Explore with Pulumi AI

Manages a Synapse Spark Pool.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const example = new azure.core.ResourceGroup("example", {
    name: "example-resources",
    location: "West Europe",
});
const exampleAccount = new azure.storage.Account("example", {
    name: "examplestorageacc",
    resourceGroupName: example.name,
    location: example.location,
    accountTier: "Standard",
    accountReplicationType: "LRS",
    accountKind: "StorageV2",
    isHnsEnabled: true,
});
const exampleDataLakeGen2Filesystem = new azure.storage.DataLakeGen2Filesystem("example", {
    name: "example",
    storageAccountId: exampleAccount.id,
});
const exampleWorkspace = new azure.synapse.Workspace("example", {
    name: "example",
    resourceGroupName: example.name,
    location: example.location,
    storageDataLakeGen2FilesystemId: exampleDataLakeGen2Filesystem.id,
    sqlAdministratorLogin: "sqladminuser",
    sqlAdministratorLoginPassword: "H@Sh1CoR3!",
    identity: {
        type: "SystemAssigned",
    },
});
const exampleSparkPool = new azure.synapse.SparkPool("example", {
    name: "example",
    synapseWorkspaceId: exampleWorkspace.id,
    nodeSizeFamily: "MemoryOptimized",
    nodeSize: "Small",
    cacheSize: 100,
    autoScale: {
        maxNodeCount: 50,
        minNodeCount: 3,
    },
    autoPause: {
        delayInMinutes: 15,
    },
    libraryRequirement: {
        content: `appnope==0.1.0
beautifulsoup4==4.6.3
`,
        filename: "requirements.txt",
    },
    sparkConfig: {
        content: "spark.shuffle.spill                true\n",
        filename: "config.txt",
    },
    sparkVersion: "3.2",
    tags: {
        ENV: "Production",
    },
});
Copy
import pulumi
import pulumi_azure as azure

example = azure.core.ResourceGroup("example",
    name="example-resources",
    location="West Europe")
example_account = azure.storage.Account("example",
    name="examplestorageacc",
    resource_group_name=example.name,
    location=example.location,
    account_tier="Standard",
    account_replication_type="LRS",
    account_kind="StorageV2",
    is_hns_enabled=True)
example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("example",
    name="example",
    storage_account_id=example_account.id)
example_workspace = azure.synapse.Workspace("example",
    name="example",
    resource_group_name=example.name,
    location=example.location,
    storage_data_lake_gen2_filesystem_id=example_data_lake_gen2_filesystem.id,
    sql_administrator_login="sqladminuser",
    sql_administrator_login_password="H@Sh1CoR3!",
    identity={
        "type": "SystemAssigned",
    })
example_spark_pool = azure.synapse.SparkPool("example",
    name="example",
    synapse_workspace_id=example_workspace.id,
    node_size_family="MemoryOptimized",
    node_size="Small",
    cache_size=100,
    auto_scale={
        "max_node_count": 50,
        "min_node_count": 3,
    },
    auto_pause={
        "delay_in_minutes": 15,
    },
    library_requirement={
        "content": """appnope==0.1.0
beautifulsoup4==4.6.3
""",
        "filename": "requirements.txt",
    },
    spark_config={
        "content": "spark.shuffle.spill                true\n",
        "filename": "config.txt",
    },
    spark_version="3.2",
    tags={
        "ENV": "Production",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/storage"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/synapse"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("example-resources"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
			Name:                   pulumi.String("examplestorageacc"),
			ResourceGroupName:      example.Name,
			Location:               example.Location,
			AccountTier:            pulumi.String("Standard"),
			AccountReplicationType: pulumi.String("LRS"),
			AccountKind:            pulumi.String("StorageV2"),
			IsHnsEnabled:           pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		exampleDataLakeGen2Filesystem, err := storage.NewDataLakeGen2Filesystem(ctx, "example", &storage.DataLakeGen2FilesystemArgs{
			Name:             pulumi.String("example"),
			StorageAccountId: exampleAccount.ID(),
		})
		if err != nil {
			return err
		}
		exampleWorkspace, err := synapse.NewWorkspace(ctx, "example", &synapse.WorkspaceArgs{
			Name:                            pulumi.String("example"),
			ResourceGroupName:               example.Name,
			Location:                        example.Location,
			StorageDataLakeGen2FilesystemId: exampleDataLakeGen2Filesystem.ID(),
			SqlAdministratorLogin:           pulumi.String("sqladminuser"),
			SqlAdministratorLoginPassword:   pulumi.String("H@Sh1CoR3!"),
			Identity: &synapse.WorkspaceIdentityArgs{
				Type: pulumi.String("SystemAssigned"),
			},
		})
		if err != nil {
			return err
		}
		_, err = synapse.NewSparkPool(ctx, "example", &synapse.SparkPoolArgs{
			Name:               pulumi.String("example"),
			SynapseWorkspaceId: exampleWorkspace.ID(),
			NodeSizeFamily:     pulumi.String("MemoryOptimized"),
			NodeSize:           pulumi.String("Small"),
			CacheSize:          pulumi.Int(100),
			AutoScale: &synapse.SparkPoolAutoScaleArgs{
				MaxNodeCount: pulumi.Int(50),
				MinNodeCount: pulumi.Int(3),
			},
			AutoPause: &synapse.SparkPoolAutoPauseArgs{
				DelayInMinutes: pulumi.Int(15),
			},
			LibraryRequirement: &synapse.SparkPoolLibraryRequirementArgs{
				Content:  pulumi.String("appnope==0.1.0\nbeautifulsoup4==4.6.3\n"),
				Filename: pulumi.String("requirements.txt"),
			},
			SparkConfig: &synapse.SparkPoolSparkConfigArgs{
				Content:  pulumi.String("spark.shuffle.spill                true\n"),
				Filename: pulumi.String("config.txt"),
			},
			SparkVersion: pulumi.String("3.2"),
			Tags: pulumi.StringMap{
				"ENV": pulumi.String("Production"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;

return await Deployment.RunAsync(() => 
{
    var example = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "example-resources",
        Location = "West Europe",
    });

    var exampleAccount = new Azure.Storage.Account("example", new()
    {
        Name = "examplestorageacc",
        ResourceGroupName = example.Name,
        Location = example.Location,
        AccountTier = "Standard",
        AccountReplicationType = "LRS",
        AccountKind = "StorageV2",
        IsHnsEnabled = true,
    });

    var exampleDataLakeGen2Filesystem = new Azure.Storage.DataLakeGen2Filesystem("example", new()
    {
        Name = "example",
        StorageAccountId = exampleAccount.Id,
    });

    var exampleWorkspace = new Azure.Synapse.Workspace("example", new()
    {
        Name = "example",
        ResourceGroupName = example.Name,
        Location = example.Location,
        StorageDataLakeGen2FilesystemId = exampleDataLakeGen2Filesystem.Id,
        SqlAdministratorLogin = "sqladminuser",
        SqlAdministratorLoginPassword = "H@Sh1CoR3!",
        Identity = new Azure.Synapse.Inputs.WorkspaceIdentityArgs
        {
            Type = "SystemAssigned",
        },
    });

    var exampleSparkPool = new Azure.Synapse.SparkPool("example", new()
    {
        Name = "example",
        SynapseWorkspaceId = exampleWorkspace.Id,
        NodeSizeFamily = "MemoryOptimized",
        NodeSize = "Small",
        CacheSize = 100,
        AutoScale = new Azure.Synapse.Inputs.SparkPoolAutoScaleArgs
        {
            MaxNodeCount = 50,
            MinNodeCount = 3,
        },
        AutoPause = new Azure.Synapse.Inputs.SparkPoolAutoPauseArgs
        {
            DelayInMinutes = 15,
        },
        LibraryRequirement = new Azure.Synapse.Inputs.SparkPoolLibraryRequirementArgs
        {
            Content = @"appnope==0.1.0
beautifulsoup4==4.6.3
",
            Filename = "requirements.txt",
        },
        SparkConfig = new Azure.Synapse.Inputs.SparkPoolSparkConfigArgs
        {
            Content = @"spark.shuffle.spill                true
",
            Filename = "config.txt",
        },
        SparkVersion = "3.2",
        Tags = 
        {
            { "ENV", "Production" },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.DataLakeGen2Filesystem;
import com.pulumi.azure.storage.DataLakeGen2FilesystemArgs;
import com.pulumi.azure.synapse.Workspace;
import com.pulumi.azure.synapse.WorkspaceArgs;
import com.pulumi.azure.synapse.inputs.WorkspaceIdentityArgs;
import com.pulumi.azure.synapse.SparkPool;
import com.pulumi.azure.synapse.SparkPoolArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolAutoScaleArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolAutoPauseArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolLibraryRequirementArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolSparkConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new ResourceGroup("example", ResourceGroupArgs.builder()
            .name("example-resources")
            .location("West Europe")
            .build());

        var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
            .name("examplestorageacc")
            .resourceGroupName(example.name())
            .location(example.location())
            .accountTier("Standard")
            .accountReplicationType("LRS")
            .accountKind("StorageV2")
            .isHnsEnabled("true")
            .build());

        var exampleDataLakeGen2Filesystem = new DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", DataLakeGen2FilesystemArgs.builder()
            .name("example")
            .storageAccountId(exampleAccount.id())
            .build());

        var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
            .name("example")
            .resourceGroupName(example.name())
            .location(example.location())
            .storageDataLakeGen2FilesystemId(exampleDataLakeGen2Filesystem.id())
            .sqlAdministratorLogin("sqladminuser")
            .sqlAdministratorLoginPassword("H@Sh1CoR3!")
            .identity(WorkspaceIdentityArgs.builder()
                .type("SystemAssigned")
                .build())
            .build());

        var exampleSparkPool = new SparkPool("exampleSparkPool", SparkPoolArgs.builder()
            .name("example")
            .synapseWorkspaceId(exampleWorkspace.id())
            .nodeSizeFamily("MemoryOptimized")
            .nodeSize("Small")
            .cacheSize(100)
            .autoScale(SparkPoolAutoScaleArgs.builder()
                .maxNodeCount(50)
                .minNodeCount(3)
                .build())
            .autoPause(SparkPoolAutoPauseArgs.builder()
                .delayInMinutes(15)
                .build())
            .libraryRequirement(SparkPoolLibraryRequirementArgs.builder()
                .content("""
appnope==0.1.0
beautifulsoup4==4.6.3
                """)
                .filename("requirements.txt")
                .build())
            .sparkConfig(SparkPoolSparkConfigArgs.builder()
                .content("""
spark.shuffle.spill                true
                """)
                .filename("config.txt")
                .build())
            .sparkVersion(3.2)
            .tags(Map.of("ENV", "Production"))
            .build());

    }
}
Copy
resources:
  example:
    type: azure:core:ResourceGroup
    properties:
      name: example-resources
      location: West Europe
  exampleAccount:
    type: azure:storage:Account
    name: example
    properties:
      name: examplestorageacc
      resourceGroupName: ${example.name}
      location: ${example.location}
      accountTier: Standard
      accountReplicationType: LRS
      accountKind: StorageV2
      isHnsEnabled: 'true'
  exampleDataLakeGen2Filesystem:
    type: azure:storage:DataLakeGen2Filesystem
    name: example
    properties:
      name: example
      storageAccountId: ${exampleAccount.id}
  exampleWorkspace:
    type: azure:synapse:Workspace
    name: example
    properties:
      name: example
      resourceGroupName: ${example.name}
      location: ${example.location}
      storageDataLakeGen2FilesystemId: ${exampleDataLakeGen2Filesystem.id}
      sqlAdministratorLogin: sqladminuser
      sqlAdministratorLoginPassword: H@Sh1CoR3!
      identity:
        type: SystemAssigned
  exampleSparkPool:
    type: azure:synapse:SparkPool
    name: example
    properties:
      name: example
      synapseWorkspaceId: ${exampleWorkspace.id}
      nodeSizeFamily: MemoryOptimized
      nodeSize: Small
      cacheSize: 100
      autoScale:
        maxNodeCount: 50
        minNodeCount: 3
      autoPause:
        delayInMinutes: 15
      libraryRequirement:
        content: |
          appnope==0.1.0
          beautifulsoup4==4.6.3          
        filename: requirements.txt
      sparkConfig:
        content: |
          spark.shuffle.spill                true          
        filename: config.txt
      sparkVersion: 3.2
      tags:
        ENV: Production
Copy

Create SparkPool Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new SparkPool(name: string, args: SparkPoolArgs, opts?: CustomResourceOptions);
@overload
def SparkPool(resource_name: str,
              args: SparkPoolArgs,
              opts: Optional[ResourceOptions] = None)

@overload
def SparkPool(resource_name: str,
              opts: Optional[ResourceOptions] = None,
              node_size: Optional[str] = None,
              synapse_workspace_id: Optional[str] = None,
              spark_version: Optional[str] = None,
              node_size_family: Optional[str] = None,
              name: Optional[str] = None,
              compute_isolation_enabled: Optional[bool] = None,
              max_executors: Optional[int] = None,
              min_executors: Optional[int] = None,
              auto_pause: Optional[SparkPoolAutoPauseArgs] = None,
              node_count: Optional[int] = None,
              dynamic_executor_allocation_enabled: Optional[bool] = None,
              library_requirement: Optional[SparkPoolLibraryRequirementArgs] = None,
              session_level_packages_enabled: Optional[bool] = None,
              spark_config: Optional[SparkPoolSparkConfigArgs] = None,
              spark_events_folder: Optional[str] = None,
              spark_log_folder: Optional[str] = None,
              cache_size: Optional[int] = None,
              auto_scale: Optional[SparkPoolAutoScaleArgs] = None,
              tags: Optional[Mapping[str, str]] = None)
func NewSparkPool(ctx *Context, name string, args SparkPoolArgs, opts ...ResourceOption) (*SparkPool, error)
public SparkPool(string name, SparkPoolArgs args, CustomResourceOptions? opts = null)
public SparkPool(String name, SparkPoolArgs args)
public SparkPool(String name, SparkPoolArgs args, CustomResourceOptions options)
type: azure:synapse:SparkPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. SparkPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. SparkPoolArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. SparkPoolArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. SparkPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. SparkPoolArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var sparkPoolResource = new Azure.Synapse.SparkPool("sparkPoolResource", new()
{
    NodeSize = "string",
    SynapseWorkspaceId = "string",
    SparkVersion = "string",
    NodeSizeFamily = "string",
    Name = "string",
    ComputeIsolationEnabled = false,
    MaxExecutors = 0,
    MinExecutors = 0,
    AutoPause = new Azure.Synapse.Inputs.SparkPoolAutoPauseArgs
    {
        DelayInMinutes = 0,
    },
    NodeCount = 0,
    DynamicExecutorAllocationEnabled = false,
    LibraryRequirement = new Azure.Synapse.Inputs.SparkPoolLibraryRequirementArgs
    {
        Content = "string",
        Filename = "string",
    },
    SessionLevelPackagesEnabled = false,
    SparkConfig = new Azure.Synapse.Inputs.SparkPoolSparkConfigArgs
    {
        Content = "string",
        Filename = "string",
    },
    SparkEventsFolder = "string",
    SparkLogFolder = "string",
    CacheSize = 0,
    AutoScale = new Azure.Synapse.Inputs.SparkPoolAutoScaleArgs
    {
        MaxNodeCount = 0,
        MinNodeCount = 0,
    },
    Tags = 
    {
        { "string", "string" },
    },
});
Copy
example, err := synapse.NewSparkPool(ctx, "sparkPoolResource", &synapse.SparkPoolArgs{
	NodeSize:                pulumi.String("string"),
	SynapseWorkspaceId:      pulumi.String("string"),
	SparkVersion:            pulumi.String("string"),
	NodeSizeFamily:          pulumi.String("string"),
	Name:                    pulumi.String("string"),
	ComputeIsolationEnabled: pulumi.Bool(false),
	MaxExecutors:            pulumi.Int(0),
	MinExecutors:            pulumi.Int(0),
	AutoPause: &synapse.SparkPoolAutoPauseArgs{
		DelayInMinutes: pulumi.Int(0),
	},
	NodeCount:                        pulumi.Int(0),
	DynamicExecutorAllocationEnabled: pulumi.Bool(false),
	LibraryRequirement: &synapse.SparkPoolLibraryRequirementArgs{
		Content:  pulumi.String("string"),
		Filename: pulumi.String("string"),
	},
	SessionLevelPackagesEnabled: pulumi.Bool(false),
	SparkConfig: &synapse.SparkPoolSparkConfigArgs{
		Content:  pulumi.String("string"),
		Filename: pulumi.String("string"),
	},
	SparkEventsFolder: pulumi.String("string"),
	SparkLogFolder:    pulumi.String("string"),
	CacheSize:         pulumi.Int(0),
	AutoScale: &synapse.SparkPoolAutoScaleArgs{
		MaxNodeCount: pulumi.Int(0),
		MinNodeCount: pulumi.Int(0),
	},
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
})
Copy
var sparkPoolResource = new SparkPool("sparkPoolResource", SparkPoolArgs.builder()
    .nodeSize("string")
    .synapseWorkspaceId("string")
    .sparkVersion("string")
    .nodeSizeFamily("string")
    .name("string")
    .computeIsolationEnabled(false)
    .maxExecutors(0)
    .minExecutors(0)
    .autoPause(SparkPoolAutoPauseArgs.builder()
        .delayInMinutes(0)
        .build())
    .nodeCount(0)
    .dynamicExecutorAllocationEnabled(false)
    .libraryRequirement(SparkPoolLibraryRequirementArgs.builder()
        .content("string")
        .filename("string")
        .build())
    .sessionLevelPackagesEnabled(false)
    .sparkConfig(SparkPoolSparkConfigArgs.builder()
        .content("string")
        .filename("string")
        .build())
    .sparkEventsFolder("string")
    .sparkLogFolder("string")
    .cacheSize(0)
    .autoScale(SparkPoolAutoScaleArgs.builder()
        .maxNodeCount(0)
        .minNodeCount(0)
        .build())
    .tags(Map.of("string", "string"))
    .build());
Copy
spark_pool_resource = azure.synapse.SparkPool("sparkPoolResource",
    node_size="string",
    synapse_workspace_id="string",
    spark_version="string",
    node_size_family="string",
    name="string",
    compute_isolation_enabled=False,
    max_executors=0,
    min_executors=0,
    auto_pause={
        "delay_in_minutes": 0,
    },
    node_count=0,
    dynamic_executor_allocation_enabled=False,
    library_requirement={
        "content": "string",
        "filename": "string",
    },
    session_level_packages_enabled=False,
    spark_config={
        "content": "string",
        "filename": "string",
    },
    spark_events_folder="string",
    spark_log_folder="string",
    cache_size=0,
    auto_scale={
        "max_node_count": 0,
        "min_node_count": 0,
    },
    tags={
        "string": "string",
    })
Copy
const sparkPoolResource = new azure.synapse.SparkPool("sparkPoolResource", {
    nodeSize: "string",
    synapseWorkspaceId: "string",
    sparkVersion: "string",
    nodeSizeFamily: "string",
    name: "string",
    computeIsolationEnabled: false,
    maxExecutors: 0,
    minExecutors: 0,
    autoPause: {
        delayInMinutes: 0,
    },
    nodeCount: 0,
    dynamicExecutorAllocationEnabled: false,
    libraryRequirement: {
        content: "string",
        filename: "string",
    },
    sessionLevelPackagesEnabled: false,
    sparkConfig: {
        content: "string",
        filename: "string",
    },
    sparkEventsFolder: "string",
    sparkLogFolder: "string",
    cacheSize: 0,
    autoScale: {
        maxNodeCount: 0,
        minNodeCount: 0,
    },
    tags: {
        string: "string",
    },
});
Copy
type: azure:synapse:SparkPool
properties:
    autoPause:
        delayInMinutes: 0
    autoScale:
        maxNodeCount: 0
        minNodeCount: 0
    cacheSize: 0
    computeIsolationEnabled: false
    dynamicExecutorAllocationEnabled: false
    libraryRequirement:
        content: string
        filename: string
    maxExecutors: 0
    minExecutors: 0
    name: string
    nodeCount: 0
    nodeSize: string
    nodeSizeFamily: string
    sessionLevelPackagesEnabled: false
    sparkConfig:
        content: string
        filename: string
    sparkEventsFolder: string
    sparkLogFolder: string
    sparkVersion: string
    synapseWorkspaceId: string
    tags:
        string: string
Copy

SparkPool Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The SparkPool resource accepts the following input properties:

NodeSize This property is required. string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
NodeSizeFamily This property is required. string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
SparkVersion This property is required. string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
SynapseWorkspaceId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
AutoPause SparkPoolAutoPause
An auto_pause block as defined below.
AutoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
CacheSize int
The cache size in the Spark Pool.
ComputeIsolationEnabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
DynamicExecutorAllocationEnabled bool
LibraryRequirement SparkPoolLibraryRequirement
MaxExecutors int
MinExecutors int
Name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
NodeCount int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
SessionLevelPackagesEnabled bool
SparkConfig SparkPoolSparkConfig
SparkEventsFolder string
SparkLogFolder string
Tags Dictionary<string, string>
NodeSize This property is required. string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
NodeSizeFamily This property is required. string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
SparkVersion This property is required. string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
SynapseWorkspaceId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
AutoPause SparkPoolAutoPauseArgs
An auto_pause block as defined below.
AutoScale SparkPoolAutoScaleArgs
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
CacheSize int
The cache size in the Spark Pool.
ComputeIsolationEnabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
DynamicExecutorAllocationEnabled bool
LibraryRequirement SparkPoolLibraryRequirementArgs
MaxExecutors int
MinExecutors int
Name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
NodeCount int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
SessionLevelPackagesEnabled bool
SparkConfig SparkPoolSparkConfigArgs
SparkEventsFolder string
SparkLogFolder string
Tags map[string]string
nodeSize This property is required. String
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily This property is required. String
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sparkVersion This property is required. String
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
autoPause SparkPoolAutoPause
An auto_pause block as defined below.
autoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize Integer
The cache size in the Spark Pool.
computeIsolationEnabled Boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled Boolean
libraryRequirement SparkPoolLibraryRequirement
maxExecutors Integer
minExecutors Integer
name Changes to this property will trigger replacement. String
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount Integer
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
sessionLevelPackagesEnabled Boolean
sparkConfig SparkPoolSparkConfig
sparkEventsFolder String
sparkLogFolder String
tags Map<String,String>
nodeSize This property is required. string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily This property is required. string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sparkVersion This property is required. string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
autoPause SparkPoolAutoPause
An auto_pause block as defined below.
autoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize number
The cache size in the Spark Pool.
computeIsolationEnabled boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled boolean
libraryRequirement SparkPoolLibraryRequirement
maxExecutors number
minExecutors number
name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount number
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
sessionLevelPackagesEnabled boolean
sparkConfig SparkPoolSparkConfig
sparkEventsFolder string
sparkLogFolder string
tags {[key: string]: string}
node_size This property is required. str
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
node_size_family This property is required. str
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
spark_version This property is required. str
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapse_workspace_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
auto_pause SparkPoolAutoPauseArgs
An auto_pause block as defined below.
auto_scale SparkPoolAutoScaleArgs
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cache_size int
The cache size in the Spark Pool.
compute_isolation_enabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamic_executor_allocation_enabled bool
library_requirement SparkPoolLibraryRequirementArgs
max_executors int
min_executors int
name Changes to this property will trigger replacement. str
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
node_count int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
session_level_packages_enabled bool
spark_config SparkPoolSparkConfigArgs
spark_events_folder str
spark_log_folder str
tags Mapping[str, str]
nodeSize This property is required. String
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily This property is required. String
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sparkVersion This property is required. String
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
autoPause Property Map
An auto_pause block as defined below.
autoScale Property Map
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize Number
The cache size in the Spark Pool.
computeIsolationEnabled Boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled Boolean
libraryRequirement Property Map
maxExecutors Number
minExecutors Number
name Changes to this property will trigger replacement. String
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount Number
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
sessionLevelPackagesEnabled Boolean
sparkConfig Property Map
sparkEventsFolder String
sparkLogFolder String
tags Map<String>

Outputs

All input properties are implicitly available as output properties. Additionally, the SparkPool resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing SparkPool Resource

Get an existing SparkPool resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SparkPoolState, opts?: CustomResourceOptions): SparkPool
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        auto_pause: Optional[SparkPoolAutoPauseArgs] = None,
        auto_scale: Optional[SparkPoolAutoScaleArgs] = None,
        cache_size: Optional[int] = None,
        compute_isolation_enabled: Optional[bool] = None,
        dynamic_executor_allocation_enabled: Optional[bool] = None,
        library_requirement: Optional[SparkPoolLibraryRequirementArgs] = None,
        max_executors: Optional[int] = None,
        min_executors: Optional[int] = None,
        name: Optional[str] = None,
        node_count: Optional[int] = None,
        node_size: Optional[str] = None,
        node_size_family: Optional[str] = None,
        session_level_packages_enabled: Optional[bool] = None,
        spark_config: Optional[SparkPoolSparkConfigArgs] = None,
        spark_events_folder: Optional[str] = None,
        spark_log_folder: Optional[str] = None,
        spark_version: Optional[str] = None,
        synapse_workspace_id: Optional[str] = None,
        tags: Optional[Mapping[str, str]] = None) -> SparkPool
func GetSparkPool(ctx *Context, name string, id IDInput, state *SparkPoolState, opts ...ResourceOption) (*SparkPool, error)
public static SparkPool Get(string name, Input<string> id, SparkPoolState? state, CustomResourceOptions? opts = null)
public static SparkPool get(String name, Output<String> id, SparkPoolState state, CustomResourceOptions options)
resources:  _:    type: azure:synapse:SparkPool    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AutoPause SparkPoolAutoPause
An auto_pause block as defined below.
AutoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
CacheSize int
The cache size in the Spark Pool.
ComputeIsolationEnabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
DynamicExecutorAllocationEnabled bool
LibraryRequirement SparkPoolLibraryRequirement
MaxExecutors int
MinExecutors int
Name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
NodeCount int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
NodeSize string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
NodeSizeFamily string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
SessionLevelPackagesEnabled bool
SparkConfig SparkPoolSparkConfig
SparkEventsFolder string
SparkLogFolder string
SparkVersion string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
SynapseWorkspaceId Changes to this property will trigger replacement. string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
Tags Dictionary<string, string>
AutoPause SparkPoolAutoPauseArgs
An auto_pause block as defined below.
AutoScale SparkPoolAutoScaleArgs
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
CacheSize int
The cache size in the Spark Pool.
ComputeIsolationEnabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
DynamicExecutorAllocationEnabled bool
LibraryRequirement SparkPoolLibraryRequirementArgs
MaxExecutors int
MinExecutors int
Name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
NodeCount int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
NodeSize string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
NodeSizeFamily string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
SessionLevelPackagesEnabled bool
SparkConfig SparkPoolSparkConfigArgs
SparkEventsFolder string
SparkLogFolder string
SparkVersion string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
SynapseWorkspaceId Changes to this property will trigger replacement. string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
Tags map[string]string
autoPause SparkPoolAutoPause
An auto_pause block as defined below.
autoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize Integer
The cache size in the Spark Pool.
computeIsolationEnabled Boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled Boolean
libraryRequirement SparkPoolLibraryRequirement
maxExecutors Integer
minExecutors Integer
name Changes to this property will trigger replacement. String
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount Integer
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
nodeSize String
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily String
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sessionLevelPackagesEnabled Boolean
sparkConfig SparkPoolSparkConfig
sparkEventsFolder String
sparkLogFolder String
sparkVersion String
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId Changes to this property will trigger replacement. String
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
tags Map<String,String>
autoPause SparkPoolAutoPause
An auto_pause block as defined below.
autoScale SparkPoolAutoScale
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize number
The cache size in the Spark Pool.
computeIsolationEnabled boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled boolean
libraryRequirement SparkPoolLibraryRequirement
maxExecutors number
minExecutors number
name Changes to this property will trigger replacement. string
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount number
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
nodeSize string
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily string
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sessionLevelPackagesEnabled boolean
sparkConfig SparkPoolSparkConfig
sparkEventsFolder string
sparkLogFolder string
sparkVersion string
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId Changes to this property will trigger replacement. string
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
tags {[key: string]: string}
auto_pause SparkPoolAutoPauseArgs
An auto_pause block as defined below.
auto_scale SparkPoolAutoScaleArgs
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cache_size int
The cache size in the Spark Pool.
compute_isolation_enabled bool
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamic_executor_allocation_enabled bool
library_requirement SparkPoolLibraryRequirementArgs
max_executors int
min_executors int
name Changes to this property will trigger replacement. str
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
node_count int
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
node_size str
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
node_size_family str
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
session_level_packages_enabled bool
spark_config SparkPoolSparkConfigArgs
spark_events_folder str
spark_log_folder str
spark_version str
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapse_workspace_id Changes to this property will trigger replacement. str
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
tags Mapping[str, str]
autoPause Property Map
An auto_pause block as defined below.
autoScale Property Map
An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
cacheSize Number
The cache size in the Spark Pool.
computeIsolationEnabled Boolean
Indicates whether compute isolation is enabled or not. Defaults to false.
dynamicExecutorAllocationEnabled Boolean
libraryRequirement Property Map
maxExecutors Number
minExecutors Number
name Changes to this property will trigger replacement. String
The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
nodeCount Number
The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
nodeSize String
The level of node in the Spark Pool. Possible values are Small, Medium, Large, None, XLarge, XXLarge and XXXLarge.
nodeSizeFamily String
The kind of nodes that the Spark Pool provides. Possible values are HardwareAcceleratedFPGA, HardwareAcceleratedGPU, MemoryOptimized, and None.
sessionLevelPackagesEnabled Boolean
sparkConfig Property Map
sparkEventsFolder String
sparkLogFolder String
sparkVersion String
The Apache Spark version. Possible values are 3.2, 3.3, and 3.4.
synapseWorkspaceId Changes to this property will trigger replacement. String
The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
tags Map<String>

Supporting Types

SparkPoolAutoPause
, SparkPoolAutoPauseArgs

DelayInMinutes This property is required. int
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
DelayInMinutes This property is required. int
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
delayInMinutes This property is required. Integer
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
delayInMinutes This property is required. number
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
delay_in_minutes This property is required. int
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
delayInMinutes This property is required. Number
Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.

SparkPoolAutoScale
, SparkPoolAutoScaleArgs

MaxNodeCount This property is required. int
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
MinNodeCount This property is required. int
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
MaxNodeCount This property is required. int
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
MinNodeCount This property is required. int
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
maxNodeCount This property is required. Integer
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
minNodeCount This property is required. Integer
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
maxNodeCount This property is required. number
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
minNodeCount This property is required. number
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
max_node_count This property is required. int
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
min_node_count This property is required. int
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
maxNodeCount This property is required. Number
The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
minNodeCount This property is required. Number
The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.

SparkPoolLibraryRequirement
, SparkPoolLibraryRequirementArgs

Content This property is required. string
The content of library requirements.
Filename This property is required. string
The name of the library requirements file.
Content This property is required. string
The content of library requirements.
Filename This property is required. string
The name of the library requirements file.
content This property is required. String
The content of library requirements.
filename This property is required. String
The name of the library requirements file.
content This property is required. string
The content of library requirements.
filename This property is required. string
The name of the library requirements file.
content This property is required. str
The content of library requirements.
filename This property is required. str
The name of the library requirements file.
content This property is required. String
The content of library requirements.
filename This property is required. String
The name of the library requirements file.

SparkPoolSparkConfig
, SparkPoolSparkConfigArgs

Content This property is required. string
The contents of a spark configuration.
Filename This property is required. string
The name of the file where the spark configuration content will be stored.
Content This property is required. string
The contents of a spark configuration.
Filename This property is required. string
The name of the file where the spark configuration content will be stored.
content This property is required. String
The contents of a spark configuration.
filename This property is required. String
The name of the file where the spark configuration content will be stored.
content This property is required. string
The contents of a spark configuration.
filename This property is required. string
The name of the file where the spark configuration content will be stored.
content This property is required. str
The contents of a spark configuration.
filename This property is required. str
The name of the file where the spark configuration content will be stored.
content This property is required. String
The contents of a spark configuration.
filename This property is required. String
The name of the file where the spark configuration content will be stored.

Import

Synapse Spark Pool can be imported using the resource id, e.g.

$ pulumi import azure:synapse/sparkPool:SparkPool example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1/bigDataPools/sparkPool1
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Azure Classic pulumi/pulumi-azure
License
Apache-2.0
Notes
This Pulumi package is based on the azurerm Terraform Provider.