1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataplex
  5. Task
Google Cloud v8.26.0 published on Thursday, Apr 10, 2025 by Pulumi

gcp.dataplex.Task

Explore with Pulumi AI

A Dataplex task represents the work that you want Dataplex to do on a schedule. It encapsulates code, parameters, and the schedule.

To get more information about Task, see:

Example Usage

Dataplex Task Basic

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const project = gcp.organizations.getProject({});
const example = new gcp.dataplex.Lake("example", {
    name: "tf-test-lake_72490",
    location: "us-central1",
    project: "my-project-name",
});
const exampleTask = new gcp.dataplex.Task("example", {
    taskId: "tf-test-task_89605",
    location: "us-central1",
    lake: example.name,
    description: "Test Task Basic",
    displayName: "task-basic",
    labels: {
        count: "3",
    },
    triggerSpec: {
        type: "RECURRING",
        disabled: false,
        maxRetries: 3,
        startTime: "2023-10-02T15:01:23Z",
        schedule: "1 * * * *",
    },
    executionSpec: {
        serviceAccount: project.then(project => `${project.number}-compute@developer.gserviceaccount.com`),
        project: "my-project-name",
        maxJobExecutionLifetime: "100s",
        kmsKey: "234jn2kjn42k3n423",
    },
    spark: {
        pythonScriptFile: "gs://dataproc-examples/pyspark/hello-world/hello-world.py",
    },
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

project = gcp.organizations.get_project()
example = gcp.dataplex.Lake("example",
    name="tf-test-lake_72490",
    location="us-central1",
    project="my-project-name")
example_task = gcp.dataplex.Task("example",
    task_id="tf-test-task_89605",
    location="us-central1",
    lake=example.name,
    description="Test Task Basic",
    display_name="task-basic",
    labels={
        "count": "3",
    },
    trigger_spec={
        "type": "RECURRING",
        "disabled": False,
        "max_retries": 3,
        "start_time": "2023-10-02T15:01:23Z",
        "schedule": "1 * * * *",
    },
    execution_spec={
        "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
        "project": "my-project-name",
        "max_job_execution_lifetime": "100s",
        "kms_key": "234jn2kjn42k3n423",
    },
    spark={
        "python_script_file": "gs://dataproc-examples/pyspark/hello-world/hello-world.py",
    },
    project="my-project-name")
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		example, err := dataplex.NewLake(ctx, "example", &dataplex.LakeArgs{
			Name:     pulumi.String("tf-test-lake_72490"),
			Location: pulumi.String("us-central1"),
			Project:  pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		_, err = dataplex.NewTask(ctx, "example", &dataplex.TaskArgs{
			TaskId:      pulumi.String("tf-test-task_89605"),
			Location:    pulumi.String("us-central1"),
			Lake:        example.Name,
			Description: pulumi.String("Test Task Basic"),
			DisplayName: pulumi.String("task-basic"),
			Labels: pulumi.StringMap{
				"count": pulumi.String("3"),
			},
			TriggerSpec: &dataplex.TaskTriggerSpecArgs{
				Type:       pulumi.String("RECURRING"),
				Disabled:   pulumi.Bool(false),
				MaxRetries: pulumi.Int(3),
				StartTime:  pulumi.String("2023-10-02T15:01:23Z"),
				Schedule:   pulumi.String("1 * * * *"),
			},
			ExecutionSpec: &dataplex.TaskExecutionSpecArgs{
				ServiceAccount:          pulumi.Sprintf("%v-compute@developer.gserviceaccount.com", project.Number),
				Project:                 pulumi.String("my-project-name"),
				MaxJobExecutionLifetime: pulumi.String("100s"),
				KmsKey:                  pulumi.String("234jn2kjn42k3n423"),
			},
			Spark: &dataplex.TaskSparkArgs{
				PythonScriptFile: pulumi.String("gs://dataproc-examples/pyspark/hello-world/hello-world.py"),
			},
			Project: pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var project = Gcp.Organizations.GetProject.Invoke();

    var example = new Gcp.DataPlex.Lake("example", new()
    {
        Name = "tf-test-lake_72490",
        Location = "us-central1",
        Project = "my-project-name",
    });

    var exampleTask = new Gcp.DataPlex.Task("example", new()
    {
        TaskId = "tf-test-task_89605",
        Location = "us-central1",
        Lake = example.Name,
        Description = "Test Task Basic",
        DisplayName = "task-basic",
        Labels = 
        {
            { "count", "3" },
        },
        TriggerSpec = new Gcp.DataPlex.Inputs.TaskTriggerSpecArgs
        {
            Type = "RECURRING",
            Disabled = false,
            MaxRetries = 3,
            StartTime = "2023-10-02T15:01:23Z",
            Schedule = "1 * * * *",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.TaskExecutionSpecArgs
        {
            ServiceAccount = $"{project.Apply(getProjectResult => getProjectResult.Number)}-compute@developer.gserviceaccount.com",
            Project = "my-project-name",
            MaxJobExecutionLifetime = "100s",
            KmsKey = "234jn2kjn42k3n423",
        },
        Spark = new Gcp.DataPlex.Inputs.TaskSparkArgs
        {
            PythonScriptFile = "gs://dataproc-examples/pyspark/hello-world/hello-world.py",
        },
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.dataplex.Lake;
import com.pulumi.gcp.dataplex.LakeArgs;
import com.pulumi.gcp.dataplex.Task;
import com.pulumi.gcp.dataplex.TaskArgs;
import com.pulumi.gcp.dataplex.inputs.TaskTriggerSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
            .build());

        var example = new Lake("example", LakeArgs.builder()
            .name("tf-test-lake_72490")
            .location("us-central1")
            .project("my-project-name")
            .build());

        var exampleTask = new Task("exampleTask", TaskArgs.builder()
            .taskId("tf-test-task_89605")
            .location("us-central1")
            .lake(example.name())
            .description("Test Task Basic")
            .displayName("task-basic")
            .labels(Map.of("count", "3"))
            .triggerSpec(TaskTriggerSpecArgs.builder()
                .type("RECURRING")
                .disabled(false)
                .maxRetries(3)
                .startTime("2023-10-02T15:01:23Z")
                .schedule("1 * * * *")
                .build())
            .executionSpec(TaskExecutionSpecArgs.builder()
                .serviceAccount(String.format("%s-compute@developer.gserviceaccount.com", project.number()))
                .project("my-project-name")
                .maxJobExecutionLifetime("100s")
                .kmsKey("234jn2kjn42k3n423")
                .build())
            .spark(TaskSparkArgs.builder()
                .pythonScriptFile("gs://dataproc-examples/pyspark/hello-world/hello-world.py")
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  example:
    type: gcp:dataplex:Lake
    properties:
      name: tf-test-lake_72490
      location: us-central1
      project: my-project-name
  exampleTask:
    type: gcp:dataplex:Task
    name: example
    properties:
      taskId: tf-test-task_89605
      location: us-central1
      lake: ${example.name}
      description: Test Task Basic
      displayName: task-basic
      labels:
        count: '3'
      triggerSpec:
        type: RECURRING
        disabled: false
        maxRetries: 3
        startTime: 2023-10-02T15:01:23Z
        schedule: 1 * * * *
      executionSpec:
        serviceAccount: ${project.number}-compute@developer.gserviceaccount.com
        project: my-project-name
        maxJobExecutionLifetime: 100s
        kmsKey: 234jn2kjn42k3n423
      spark:
        pythonScriptFile: gs://dataproc-examples/pyspark/hello-world/hello-world.py
      project: my-project-name
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Dataplex Task Spark

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

// VPC network
const _default = new gcp.compute.Network("default", {
    name: "tf-test-workstation-cluster_56730",
    autoCreateSubnetworks: true,
});
const project = gcp.organizations.getProject({});
const exampleSpark = new gcp.dataplex.Lake("example_spark", {
    name: "tf-test-lake_95154",
    location: "us-central1",
    project: "my-project-name",
});
const exampleSparkTask = new gcp.dataplex.Task("example_spark", {
    taskId: "tf-test-task_64336",
    location: "us-central1",
    lake: exampleSpark.name,
    triggerSpec: {
        type: "ON_DEMAND",
    },
    description: "task-spark-terraform",
    executionSpec: {
        serviceAccount: project.then(project => `${project.number}-compute@developer.gserviceaccount.com`),
        args: {
            TASK_ARGS: "--output_location,gs://spark-job/task-result, --output_format, json",
        },
    },
    spark: {
        infrastructureSpec: {
            batch: {
                executorsCount: 2,
                maxExecutorsCount: 100,
            },
            containerImage: {
                image: "test-image",
                javaJars: ["test-java-jars.jar"],
                pythonPackages: ["gs://bucket-name/my/path/to/lib.tar.gz"],
                properties: {
                    name: "wrench",
                    mass: "1.3kg",
                    count: "3",
                },
            },
            vpcNetwork: {
                networkTags: ["test-network-tag"],
                subNetwork: _default.id,
            },
        },
        fileUris: ["gs://terrafrom-test/test.csv"],
        archiveUris: ["gs://terraform-test/test.csv"],
        sqlScript: "show databases",
    },
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

# VPC network
default = gcp.compute.Network("default",
    name="tf-test-workstation-cluster_56730",
    auto_create_subnetworks=True)
project = gcp.organizations.get_project()
example_spark = gcp.dataplex.Lake("example_spark",
    name="tf-test-lake_95154",
    location="us-central1",
    project="my-project-name")
example_spark_task = gcp.dataplex.Task("example_spark",
    task_id="tf-test-task_64336",
    location="us-central1",
    lake=example_spark.name,
    trigger_spec={
        "type": "ON_DEMAND",
    },
    description="task-spark-terraform",
    execution_spec={
        "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
        "args": {
            "TASK_ARGS": "--output_location,gs://spark-job/task-result, --output_format, json",
        },
    },
    spark={
        "infrastructure_spec": {
            "batch": {
                "executors_count": 2,
                "max_executors_count": 100,
            },
            "container_image": {
                "image": "test-image",
                "java_jars": ["test-java-jars.jar"],
                "python_packages": ["gs://bucket-name/my/path/to/lib.tar.gz"],
                "properties": {
                    "name": "wrench",
                    "mass": "1.3kg",
                    "count": "3",
                },
            },
            "vpc_network": {
                "network_tags": ["test-network-tag"],
                "sub_network": default.id,
            },
        },
        "file_uris": ["gs://terrafrom-test/test.csv"],
        "archive_uris": ["gs://terraform-test/test.csv"],
        "sql_script": "show databases",
    },
    project="my-project-name")
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/compute"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		// VPC network
		_default, err := compute.NewNetwork(ctx, "default", &compute.NetworkArgs{
			Name:                  pulumi.String("tf-test-workstation-cluster_56730"),
			AutoCreateSubnetworks: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		exampleSpark, err := dataplex.NewLake(ctx, "example_spark", &dataplex.LakeArgs{
			Name:     pulumi.String("tf-test-lake_95154"),
			Location: pulumi.String("us-central1"),
			Project:  pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		_, err = dataplex.NewTask(ctx, "example_spark", &dataplex.TaskArgs{
			TaskId:   pulumi.String("tf-test-task_64336"),
			Location: pulumi.String("us-central1"),
			Lake:     exampleSpark.Name,
			TriggerSpec: &dataplex.TaskTriggerSpecArgs{
				Type: pulumi.String("ON_DEMAND"),
			},
			Description: pulumi.String("task-spark-terraform"),
			ExecutionSpec: &dataplex.TaskExecutionSpecArgs{
				ServiceAccount: pulumi.Sprintf("%v-compute@developer.gserviceaccount.com", project.Number),
				Args: pulumi.StringMap{
					"TASK_ARGS": pulumi.String("--output_location,gs://spark-job/task-result, --output_format, json"),
				},
			},
			Spark: &dataplex.TaskSparkArgs{
				InfrastructureSpec: &dataplex.TaskSparkInfrastructureSpecArgs{
					Batch: &dataplex.TaskSparkInfrastructureSpecBatchArgs{
						ExecutorsCount:    pulumi.Int(2),
						MaxExecutorsCount: pulumi.Int(100),
					},
					ContainerImage: &dataplex.TaskSparkInfrastructureSpecContainerImageArgs{
						Image: pulumi.String("test-image"),
						JavaJars: pulumi.StringArray{
							pulumi.String("test-java-jars.jar"),
						},
						PythonPackages: pulumi.StringArray{
							pulumi.String("gs://bucket-name/my/path/to/lib.tar.gz"),
						},
						Properties: pulumi.StringMap{
							"name":  pulumi.String("wrench"),
							"mass":  pulumi.String("1.3kg"),
							"count": pulumi.String("3"),
						},
					},
					VpcNetwork: &dataplex.TaskSparkInfrastructureSpecVpcNetworkArgs{
						NetworkTags: pulumi.StringArray{
							pulumi.String("test-network-tag"),
						},
						SubNetwork: _default.ID(),
					},
				},
				FileUris: pulumi.StringArray{
					pulumi.String("gs://terrafrom-test/test.csv"),
				},
				ArchiveUris: pulumi.StringArray{
					pulumi.String("gs://terraform-test/test.csv"),
				},
				SqlScript: pulumi.String("show databases"),
			},
			Project: pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    // VPC network
    var @default = new Gcp.Compute.Network("default", new()
    {
        Name = "tf-test-workstation-cluster_56730",
        AutoCreateSubnetworks = true,
    });

    var project = Gcp.Organizations.GetProject.Invoke();

    var exampleSpark = new Gcp.DataPlex.Lake("example_spark", new()
    {
        Name = "tf-test-lake_95154",
        Location = "us-central1",
        Project = "my-project-name",
    });

    var exampleSparkTask = new Gcp.DataPlex.Task("example_spark", new()
    {
        TaskId = "tf-test-task_64336",
        Location = "us-central1",
        Lake = exampleSpark.Name,
        TriggerSpec = new Gcp.DataPlex.Inputs.TaskTriggerSpecArgs
        {
            Type = "ON_DEMAND",
        },
        Description = "task-spark-terraform",
        ExecutionSpec = new Gcp.DataPlex.Inputs.TaskExecutionSpecArgs
        {
            ServiceAccount = $"{project.Apply(getProjectResult => getProjectResult.Number)}-compute@developer.gserviceaccount.com",
            Args = 
            {
                { "TASK_ARGS", "--output_location,gs://spark-job/task-result, --output_format, json" },
            },
        },
        Spark = new Gcp.DataPlex.Inputs.TaskSparkArgs
        {
            InfrastructureSpec = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecArgs
            {
                Batch = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecBatchArgs
                {
                    ExecutorsCount = 2,
                    MaxExecutorsCount = 100,
                },
                ContainerImage = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecContainerImageArgs
                {
                    Image = "test-image",
                    JavaJars = new[]
                    {
                        "test-java-jars.jar",
                    },
                    PythonPackages = new[]
                    {
                        "gs://bucket-name/my/path/to/lib.tar.gz",
                    },
                    Properties = 
                    {
                        { "name", "wrench" },
                        { "mass", "1.3kg" },
                        { "count", "3" },
                    },
                },
                VpcNetwork = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecVpcNetworkArgs
                {
                    NetworkTags = new[]
                    {
                        "test-network-tag",
                    },
                    SubNetwork = @default.Id,
                },
            },
            FileUris = new[]
            {
                "gs://terrafrom-test/test.csv",
            },
            ArchiveUris = new[]
            {
                "gs://terraform-test/test.csv",
            },
            SqlScript = "show databases",
        },
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.compute.Network;
import com.pulumi.gcp.compute.NetworkArgs;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.dataplex.Lake;
import com.pulumi.gcp.dataplex.LakeArgs;
import com.pulumi.gcp.dataplex.Task;
import com.pulumi.gcp.dataplex.TaskArgs;
import com.pulumi.gcp.dataplex.inputs.TaskTriggerSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkInfrastructureSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkInfrastructureSpecBatchArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkInfrastructureSpecContainerImageArgs;
import com.pulumi.gcp.dataplex.inputs.TaskSparkInfrastructureSpecVpcNetworkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        // VPC network
        var default_ = new Network("default", NetworkArgs.builder()
            .name("tf-test-workstation-cluster_56730")
            .autoCreateSubnetworks(true)
            .build());

        final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
            .build());

        var exampleSpark = new Lake("exampleSpark", LakeArgs.builder()
            .name("tf-test-lake_95154")
            .location("us-central1")
            .project("my-project-name")
            .build());

        var exampleSparkTask = new Task("exampleSparkTask", TaskArgs.builder()
            .taskId("tf-test-task_64336")
            .location("us-central1")
            .lake(exampleSpark.name())
            .triggerSpec(TaskTriggerSpecArgs.builder()
                .type("ON_DEMAND")
                .build())
            .description("task-spark-terraform")
            .executionSpec(TaskExecutionSpecArgs.builder()
                .serviceAccount(String.format("%s-compute@developer.gserviceaccount.com", project.number()))
                .args(Map.of("TASK_ARGS", "--output_location,gs://spark-job/task-result, --output_format, json"))
                .build())
            .spark(TaskSparkArgs.builder()
                .infrastructureSpec(TaskSparkInfrastructureSpecArgs.builder()
                    .batch(TaskSparkInfrastructureSpecBatchArgs.builder()
                        .executorsCount(2)
                        .maxExecutorsCount(100)
                        .build())
                    .containerImage(TaskSparkInfrastructureSpecContainerImageArgs.builder()
                        .image("test-image")
                        .javaJars("test-java-jars.jar")
                        .pythonPackages("gs://bucket-name/my/path/to/lib.tar.gz")
                        .properties(Map.ofEntries(
                            Map.entry("name", "wrench"),
                            Map.entry("mass", "1.3kg"),
                            Map.entry("count", "3")
                        ))
                        .build())
                    .vpcNetwork(TaskSparkInfrastructureSpecVpcNetworkArgs.builder()
                        .networkTags("test-network-tag")
                        .subNetwork(default_.id())
                        .build())
                    .build())
                .fileUris("gs://terrafrom-test/test.csv")
                .archiveUris("gs://terraform-test/test.csv")
                .sqlScript("show databases")
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  # VPC network
  default:
    type: gcp:compute:Network
    properties:
      name: tf-test-workstation-cluster_56730
      autoCreateSubnetworks: true
  exampleSpark:
    type: gcp:dataplex:Lake
    name: example_spark
    properties:
      name: tf-test-lake_95154
      location: us-central1
      project: my-project-name
  exampleSparkTask:
    type: gcp:dataplex:Task
    name: example_spark
    properties:
      taskId: tf-test-task_64336
      location: us-central1
      lake: ${exampleSpark.name}
      triggerSpec:
        type: ON_DEMAND
      description: task-spark-terraform
      executionSpec:
        serviceAccount: ${project.number}-compute@developer.gserviceaccount.com
        args:
          TASK_ARGS: --output_location,gs://spark-job/task-result, --output_format, json
      spark:
        infrastructureSpec:
          batch:
            executorsCount: 2
            maxExecutorsCount: 100
          containerImage:
            image: test-image
            javaJars:
              - test-java-jars.jar
            pythonPackages:
              - gs://bucket-name/my/path/to/lib.tar.gz
            properties:
              name: wrench
              mass: 1.3kg
              count: '3'
          vpcNetwork:
            networkTags:
              - test-network-tag
            subNetwork: ${default.id}
        fileUris:
          - gs://terrafrom-test/test.csv
        archiveUris:
          - gs://terraform-test/test.csv
        sqlScript: show databases
      project: my-project-name
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Dataplex Task Notebook

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

// VPC network
const _default = new gcp.compute.Network("default", {
    name: "tf-test-workstation-cluster_34962",
    autoCreateSubnetworks: true,
});
const project = gcp.organizations.getProject({});
const exampleNotebook = new gcp.dataplex.Lake("example_notebook", {
    name: "tf-test-lake_74000",
    location: "us-central1",
    project: "my-project-name",
});
const exampleNotebookTask = new gcp.dataplex.Task("example_notebook", {
    taskId: "tf-test-task_75125",
    location: "us-central1",
    lake: exampleNotebook.name,
    triggerSpec: {
        type: "RECURRING",
        schedule: "1 * * * *",
    },
    executionSpec: {
        serviceAccount: project.then(project => `${project.number}-compute@developer.gserviceaccount.com`),
        args: {
            TASK_ARGS: "--output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json",
        },
    },
    notebook: {
        notebook: "gs://terraform-test/test-notebook.ipynb",
        infrastructureSpec: {
            batch: {
                executorsCount: 2,
                maxExecutorsCount: 100,
            },
            containerImage: {
                image: "test-image",
                javaJars: ["test-java-jars.jar"],
                pythonPackages: ["gs://bucket-name/my/path/to/lib.tar.gz"],
                properties: {
                    name: "wrench",
                    mass: "1.3kg",
                    count: "3",
                },
            },
            vpcNetwork: {
                networkTags: ["test-network-tag"],
                network: _default.id,
            },
        },
        fileUris: ["gs://terraform-test/test.csv"],
        archiveUris: ["gs://terraform-test/test.csv"],
    },
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

# VPC network
default = gcp.compute.Network("default",
    name="tf-test-workstation-cluster_34962",
    auto_create_subnetworks=True)
project = gcp.organizations.get_project()
example_notebook = gcp.dataplex.Lake("example_notebook",
    name="tf-test-lake_74000",
    location="us-central1",
    project="my-project-name")
example_notebook_task = gcp.dataplex.Task("example_notebook",
    task_id="tf-test-task_75125",
    location="us-central1",
    lake=example_notebook.name,
    trigger_spec={
        "type": "RECURRING",
        "schedule": "1 * * * *",
    },
    execution_spec={
        "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
        "args": {
            "TASK_ARGS": "--output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json",
        },
    },
    notebook={
        "notebook": "gs://terraform-test/test-notebook.ipynb",
        "infrastructure_spec": {
            "batch": {
                "executors_count": 2,
                "max_executors_count": 100,
            },
            "container_image": {
                "image": "test-image",
                "java_jars": ["test-java-jars.jar"],
                "python_packages": ["gs://bucket-name/my/path/to/lib.tar.gz"],
                "properties": {
                    "name": "wrench",
                    "mass": "1.3kg",
                    "count": "3",
                },
            },
            "vpc_network": {
                "network_tags": ["test-network-tag"],
                "network": default.id,
            },
        },
        "file_uris": ["gs://terraform-test/test.csv"],
        "archive_uris": ["gs://terraform-test/test.csv"],
    },
    project="my-project-name")
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/compute"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		// VPC network
		_default, err := compute.NewNetwork(ctx, "default", &compute.NetworkArgs{
			Name:                  pulumi.String("tf-test-workstation-cluster_34962"),
			AutoCreateSubnetworks: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
		if err != nil {
			return err
		}
		exampleNotebook, err := dataplex.NewLake(ctx, "example_notebook", &dataplex.LakeArgs{
			Name:     pulumi.String("tf-test-lake_74000"),
			Location: pulumi.String("us-central1"),
			Project:  pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		_, err = dataplex.NewTask(ctx, "example_notebook", &dataplex.TaskArgs{
			TaskId:   pulumi.String("tf-test-task_75125"),
			Location: pulumi.String("us-central1"),
			Lake:     exampleNotebook.Name,
			TriggerSpec: &dataplex.TaskTriggerSpecArgs{
				Type:     pulumi.String("RECURRING"),
				Schedule: pulumi.String("1 * * * *"),
			},
			ExecutionSpec: &dataplex.TaskExecutionSpecArgs{
				ServiceAccount: pulumi.Sprintf("%v-compute@developer.gserviceaccount.com", project.Number),
				Args: pulumi.StringMap{
					"TASK_ARGS": pulumi.String("--output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json"),
				},
			},
			Notebook: &dataplex.TaskNotebookArgs{
				Notebook: pulumi.String("gs://terraform-test/test-notebook.ipynb"),
				InfrastructureSpec: &dataplex.TaskNotebookInfrastructureSpecArgs{
					Batch: &dataplex.TaskNotebookInfrastructureSpecBatchArgs{
						ExecutorsCount:    pulumi.Int(2),
						MaxExecutorsCount: pulumi.Int(100),
					},
					ContainerImage: &dataplex.TaskNotebookInfrastructureSpecContainerImageArgs{
						Image: pulumi.String("test-image"),
						JavaJars: pulumi.StringArray{
							pulumi.String("test-java-jars.jar"),
						},
						PythonPackages: pulumi.StringArray{
							pulumi.String("gs://bucket-name/my/path/to/lib.tar.gz"),
						},
						Properties: pulumi.StringMap{
							"name":  pulumi.String("wrench"),
							"mass":  pulumi.String("1.3kg"),
							"count": pulumi.String("3"),
						},
					},
					VpcNetwork: &dataplex.TaskNotebookInfrastructureSpecVpcNetworkArgs{
						NetworkTags: pulumi.StringArray{
							pulumi.String("test-network-tag"),
						},
						Network: _default.ID(),
					},
				},
				FileUris: pulumi.StringArray{
					pulumi.String("gs://terraform-test/test.csv"),
				},
				ArchiveUris: pulumi.StringArray{
					pulumi.String("gs://terraform-test/test.csv"),
				},
			},
			Project: pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    // VPC network
    var @default = new Gcp.Compute.Network("default", new()
    {
        Name = "tf-test-workstation-cluster_34962",
        AutoCreateSubnetworks = true,
    });

    var project = Gcp.Organizations.GetProject.Invoke();

    var exampleNotebook = new Gcp.DataPlex.Lake("example_notebook", new()
    {
        Name = "tf-test-lake_74000",
        Location = "us-central1",
        Project = "my-project-name",
    });

    var exampleNotebookTask = new Gcp.DataPlex.Task("example_notebook", new()
    {
        TaskId = "tf-test-task_75125",
        Location = "us-central1",
        Lake = exampleNotebook.Name,
        TriggerSpec = new Gcp.DataPlex.Inputs.TaskTriggerSpecArgs
        {
            Type = "RECURRING",
            Schedule = "1 * * * *",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.TaskExecutionSpecArgs
        {
            ServiceAccount = $"{project.Apply(getProjectResult => getProjectResult.Number)}-compute@developer.gserviceaccount.com",
            Args = 
            {
                { "TASK_ARGS", "--output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json" },
            },
        },
        Notebook = new Gcp.DataPlex.Inputs.TaskNotebookArgs
        {
            Notebook = "gs://terraform-test/test-notebook.ipynb",
            InfrastructureSpec = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecArgs
            {
                Batch = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecBatchArgs
                {
                    ExecutorsCount = 2,
                    MaxExecutorsCount = 100,
                },
                ContainerImage = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecContainerImageArgs
                {
                    Image = "test-image",
                    JavaJars = new[]
                    {
                        "test-java-jars.jar",
                    },
                    PythonPackages = new[]
                    {
                        "gs://bucket-name/my/path/to/lib.tar.gz",
                    },
                    Properties = 
                    {
                        { "name", "wrench" },
                        { "mass", "1.3kg" },
                        { "count", "3" },
                    },
                },
                VpcNetwork = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecVpcNetworkArgs
                {
                    NetworkTags = new[]
                    {
                        "test-network-tag",
                    },
                    Network = @default.Id,
                },
            },
            FileUris = new[]
            {
                "gs://terraform-test/test.csv",
            },
            ArchiveUris = new[]
            {
                "gs://terraform-test/test.csv",
            },
        },
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.compute.Network;
import com.pulumi.gcp.compute.NetworkArgs;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.dataplex.Lake;
import com.pulumi.gcp.dataplex.LakeArgs;
import com.pulumi.gcp.dataplex.Task;
import com.pulumi.gcp.dataplex.TaskArgs;
import com.pulumi.gcp.dataplex.inputs.TaskTriggerSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskNotebookArgs;
import com.pulumi.gcp.dataplex.inputs.TaskNotebookInfrastructureSpecArgs;
import com.pulumi.gcp.dataplex.inputs.TaskNotebookInfrastructureSpecBatchArgs;
import com.pulumi.gcp.dataplex.inputs.TaskNotebookInfrastructureSpecContainerImageArgs;
import com.pulumi.gcp.dataplex.inputs.TaskNotebookInfrastructureSpecVpcNetworkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        // VPC network
        var default_ = new Network("default", NetworkArgs.builder()
            .name("tf-test-workstation-cluster_34962")
            .autoCreateSubnetworks(true)
            .build());

        final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
            .build());

        var exampleNotebook = new Lake("exampleNotebook", LakeArgs.builder()
            .name("tf-test-lake_74000")
            .location("us-central1")
            .project("my-project-name")
            .build());

        var exampleNotebookTask = new Task("exampleNotebookTask", TaskArgs.builder()
            .taskId("tf-test-task_75125")
            .location("us-central1")
            .lake(exampleNotebook.name())
            .triggerSpec(TaskTriggerSpecArgs.builder()
                .type("RECURRING")
                .schedule("1 * * * *")
                .build())
            .executionSpec(TaskExecutionSpecArgs.builder()
                .serviceAccount(String.format("%s-compute@developer.gserviceaccount.com", project.number()))
                .args(Map.of("TASK_ARGS", "--output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json"))
                .build())
            .notebook(TaskNotebookArgs.builder()
                .notebook("gs://terraform-test/test-notebook.ipynb")
                .infrastructureSpec(TaskNotebookInfrastructureSpecArgs.builder()
                    .batch(TaskNotebookInfrastructureSpecBatchArgs.builder()
                        .executorsCount(2)
                        .maxExecutorsCount(100)
                        .build())
                    .containerImage(TaskNotebookInfrastructureSpecContainerImageArgs.builder()
                        .image("test-image")
                        .javaJars("test-java-jars.jar")
                        .pythonPackages("gs://bucket-name/my/path/to/lib.tar.gz")
                        .properties(Map.ofEntries(
                            Map.entry("name", "wrench"),
                            Map.entry("mass", "1.3kg"),
                            Map.entry("count", "3")
                        ))
                        .build())
                    .vpcNetwork(TaskNotebookInfrastructureSpecVpcNetworkArgs.builder()
                        .networkTags("test-network-tag")
                        .network(default_.id())
                        .build())
                    .build())
                .fileUris("gs://terraform-test/test.csv")
                .archiveUris("gs://terraform-test/test.csv")
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  # VPC network
  default:
    type: gcp:compute:Network
    properties:
      name: tf-test-workstation-cluster_34962
      autoCreateSubnetworks: true
  exampleNotebook:
    type: gcp:dataplex:Lake
    name: example_notebook
    properties:
      name: tf-test-lake_74000
      location: us-central1
      project: my-project-name
  exampleNotebookTask:
    type: gcp:dataplex:Task
    name: example_notebook
    properties:
      taskId: tf-test-task_75125
      location: us-central1
      lake: ${exampleNotebook.name}
      triggerSpec:
        type: RECURRING
        schedule: 1 * * * *
      executionSpec:
        serviceAccount: ${project.number}-compute@developer.gserviceaccount.com
        args:
          TASK_ARGS: --output_location,gs://spark-job-jars-anrajitha/task-result, --output_format, json
      notebook:
        notebook: gs://terraform-test/test-notebook.ipynb
        infrastructureSpec:
          batch:
            executorsCount: 2
            maxExecutorsCount: 100
          containerImage:
            image: test-image
            javaJars:
              - test-java-jars.jar
            pythonPackages:
              - gs://bucket-name/my/path/to/lib.tar.gz
            properties:
              name: wrench
              mass: 1.3kg
              count: '3'
          vpcNetwork:
            networkTags:
              - test-network-tag
            network: ${default.id}
        fileUris:
          - gs://terraform-test/test.csv
        archiveUris:
          - gs://terraform-test/test.csv
      project: my-project-name
variables:
  project:
    fn::invoke:
      function: gcp:organizations:getProject
      arguments: {}
Copy

Create Task Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Task(name: string, args: TaskArgs, opts?: CustomResourceOptions);
@overload
def Task(resource_name: str,
         args: TaskArgs,
         opts: Optional[ResourceOptions] = None)

@overload
def Task(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         execution_spec: Optional[TaskExecutionSpecArgs] = None,
         trigger_spec: Optional[TaskTriggerSpecArgs] = None,
         description: Optional[str] = None,
         display_name: Optional[str] = None,
         labels: Optional[Mapping[str, str]] = None,
         lake: Optional[str] = None,
         location: Optional[str] = None,
         notebook: Optional[TaskNotebookArgs] = None,
         project: Optional[str] = None,
         spark: Optional[TaskSparkArgs] = None,
         task_id: Optional[str] = None)
func NewTask(ctx *Context, name string, args TaskArgs, opts ...ResourceOption) (*Task, error)
public Task(string name, TaskArgs args, CustomResourceOptions? opts = null)
public Task(String name, TaskArgs args)
public Task(String name, TaskArgs args, CustomResourceOptions options)
type: gcp:dataplex:Task
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. TaskArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. TaskArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. TaskArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. TaskArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. TaskArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var taskResource = new Gcp.DataPlex.Task("taskResource", new()
{
    ExecutionSpec = new Gcp.DataPlex.Inputs.TaskExecutionSpecArgs
    {
        ServiceAccount = "string",
        Args = 
        {
            { "string", "string" },
        },
        KmsKey = "string",
        MaxJobExecutionLifetime = "string",
        Project = "string",
    },
    TriggerSpec = new Gcp.DataPlex.Inputs.TaskTriggerSpecArgs
    {
        Type = "string",
        Disabled = false,
        MaxRetries = 0,
        Schedule = "string",
        StartTime = "string",
    },
    Description = "string",
    DisplayName = "string",
    Labels = 
    {
        { "string", "string" },
    },
    Lake = "string",
    Location = "string",
    Notebook = new Gcp.DataPlex.Inputs.TaskNotebookArgs
    {
        Notebook = "string",
        ArchiveUris = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        InfrastructureSpec = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecArgs
        {
            Batch = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecBatchArgs
            {
                ExecutorsCount = 0,
                MaxExecutorsCount = 0,
            },
            ContainerImage = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecContainerImageArgs
            {
                Image = "string",
                JavaJars = new[]
                {
                    "string",
                },
                Properties = 
                {
                    { "string", "string" },
                },
                PythonPackages = new[]
                {
                    "string",
                },
            },
            VpcNetwork = new Gcp.DataPlex.Inputs.TaskNotebookInfrastructureSpecVpcNetworkArgs
            {
                Network = "string",
                NetworkTags = new[]
                {
                    "string",
                },
                SubNetwork = "string",
            },
        },
    },
    Project = "string",
    Spark = new Gcp.DataPlex.Inputs.TaskSparkArgs
    {
        ArchiveUris = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        InfrastructureSpec = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecArgs
        {
            Batch = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecBatchArgs
            {
                ExecutorsCount = 0,
                MaxExecutorsCount = 0,
            },
            ContainerImage = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecContainerImageArgs
            {
                Image = "string",
                JavaJars = new[]
                {
                    "string",
                },
                Properties = 
                {
                    { "string", "string" },
                },
                PythonPackages = new[]
                {
                    "string",
                },
            },
            VpcNetwork = new Gcp.DataPlex.Inputs.TaskSparkInfrastructureSpecVpcNetworkArgs
            {
                Network = "string",
                NetworkTags = new[]
                {
                    "string",
                },
                SubNetwork = "string",
            },
        },
        MainClass = "string",
        MainJarFileUri = "string",
        PythonScriptFile = "string",
        SqlScript = "string",
        SqlScriptFile = "string",
    },
    TaskId = "string",
});
Copy
example, err := dataplex.NewTask(ctx, "taskResource", &dataplex.TaskArgs{
	ExecutionSpec: &dataplex.TaskExecutionSpecArgs{
		ServiceAccount: pulumi.String("string"),
		Args: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		KmsKey:                  pulumi.String("string"),
		MaxJobExecutionLifetime: pulumi.String("string"),
		Project:                 pulumi.String("string"),
	},
	TriggerSpec: &dataplex.TaskTriggerSpecArgs{
		Type:       pulumi.String("string"),
		Disabled:   pulumi.Bool(false),
		MaxRetries: pulumi.Int(0),
		Schedule:   pulumi.String("string"),
		StartTime:  pulumi.String("string"),
	},
	Description: pulumi.String("string"),
	DisplayName: pulumi.String("string"),
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Lake:     pulumi.String("string"),
	Location: pulumi.String("string"),
	Notebook: &dataplex.TaskNotebookArgs{
		Notebook: pulumi.String("string"),
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		InfrastructureSpec: &dataplex.TaskNotebookInfrastructureSpecArgs{
			Batch: &dataplex.TaskNotebookInfrastructureSpecBatchArgs{
				ExecutorsCount:    pulumi.Int(0),
				MaxExecutorsCount: pulumi.Int(0),
			},
			ContainerImage: &dataplex.TaskNotebookInfrastructureSpecContainerImageArgs{
				Image: pulumi.String("string"),
				JavaJars: pulumi.StringArray{
					pulumi.String("string"),
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				PythonPackages: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
			VpcNetwork: &dataplex.TaskNotebookInfrastructureSpecVpcNetworkArgs{
				Network: pulumi.String("string"),
				NetworkTags: pulumi.StringArray{
					pulumi.String("string"),
				},
				SubNetwork: pulumi.String("string"),
			},
		},
	},
	Project: pulumi.String("string"),
	Spark: &dataplex.TaskSparkArgs{
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		InfrastructureSpec: &dataplex.TaskSparkInfrastructureSpecArgs{
			Batch: &dataplex.TaskSparkInfrastructureSpecBatchArgs{
				ExecutorsCount:    pulumi.Int(0),
				MaxExecutorsCount: pulumi.Int(0),
			},
			ContainerImage: &dataplex.TaskSparkInfrastructureSpecContainerImageArgs{
				Image: pulumi.String("string"),
				JavaJars: pulumi.StringArray{
					pulumi.String("string"),
				},
				Properties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
				PythonPackages: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
			VpcNetwork: &dataplex.TaskSparkInfrastructureSpecVpcNetworkArgs{
				Network: pulumi.String("string"),
				NetworkTags: pulumi.StringArray{
					pulumi.String("string"),
				},
				SubNetwork: pulumi.String("string"),
			},
		},
		MainClass:        pulumi.String("string"),
		MainJarFileUri:   pulumi.String("string"),
		PythonScriptFile: pulumi.String("string"),
		SqlScript:        pulumi.String("string"),
		SqlScriptFile:    pulumi.String("string"),
	},
	TaskId: pulumi.String("string"),
})
Copy
var taskResource = new Task("taskResource", TaskArgs.builder()
    .executionSpec(TaskExecutionSpecArgs.builder()
        .serviceAccount("string")
        .args(Map.of("string", "string"))
        .kmsKey("string")
        .maxJobExecutionLifetime("string")
        .project("string")
        .build())
    .triggerSpec(TaskTriggerSpecArgs.builder()
        .type("string")
        .disabled(false)
        .maxRetries(0)
        .schedule("string")
        .startTime("string")
        .build())
    .description("string")
    .displayName("string")
    .labels(Map.of("string", "string"))
    .lake("string")
    .location("string")
    .notebook(TaskNotebookArgs.builder()
        .notebook("string")
        .archiveUris("string")
        .fileUris("string")
        .infrastructureSpec(TaskNotebookInfrastructureSpecArgs.builder()
            .batch(TaskNotebookInfrastructureSpecBatchArgs.builder()
                .executorsCount(0)
                .maxExecutorsCount(0)
                .build())
            .containerImage(TaskNotebookInfrastructureSpecContainerImageArgs.builder()
                .image("string")
                .javaJars("string")
                .properties(Map.of("string", "string"))
                .pythonPackages("string")
                .build())
            .vpcNetwork(TaskNotebookInfrastructureSpecVpcNetworkArgs.builder()
                .network("string")
                .networkTags("string")
                .subNetwork("string")
                .build())
            .build())
        .build())
    .project("string")
    .spark(TaskSparkArgs.builder()
        .archiveUris("string")
        .fileUris("string")
        .infrastructureSpec(TaskSparkInfrastructureSpecArgs.builder()
            .batch(TaskSparkInfrastructureSpecBatchArgs.builder()
                .executorsCount(0)
                .maxExecutorsCount(0)
                .build())
            .containerImage(TaskSparkInfrastructureSpecContainerImageArgs.builder()
                .image("string")
                .javaJars("string")
                .properties(Map.of("string", "string"))
                .pythonPackages("string")
                .build())
            .vpcNetwork(TaskSparkInfrastructureSpecVpcNetworkArgs.builder()
                .network("string")
                .networkTags("string")
                .subNetwork("string")
                .build())
            .build())
        .mainClass("string")
        .mainJarFileUri("string")
        .pythonScriptFile("string")
        .sqlScript("string")
        .sqlScriptFile("string")
        .build())
    .taskId("string")
    .build());
Copy
task_resource = gcp.dataplex.Task("taskResource",
    execution_spec={
        "service_account": "string",
        "args": {
            "string": "string",
        },
        "kms_key": "string",
        "max_job_execution_lifetime": "string",
        "project": "string",
    },
    trigger_spec={
        "type": "string",
        "disabled": False,
        "max_retries": 0,
        "schedule": "string",
        "start_time": "string",
    },
    description="string",
    display_name="string",
    labels={
        "string": "string",
    },
    lake="string",
    location="string",
    notebook={
        "notebook": "string",
        "archive_uris": ["string"],
        "file_uris": ["string"],
        "infrastructure_spec": {
            "batch": {
                "executors_count": 0,
                "max_executors_count": 0,
            },
            "container_image": {
                "image": "string",
                "java_jars": ["string"],
                "properties": {
                    "string": "string",
                },
                "python_packages": ["string"],
            },
            "vpc_network": {
                "network": "string",
                "network_tags": ["string"],
                "sub_network": "string",
            },
        },
    },
    project="string",
    spark={
        "archive_uris": ["string"],
        "file_uris": ["string"],
        "infrastructure_spec": {
            "batch": {
                "executors_count": 0,
                "max_executors_count": 0,
            },
            "container_image": {
                "image": "string",
                "java_jars": ["string"],
                "properties": {
                    "string": "string",
                },
                "python_packages": ["string"],
            },
            "vpc_network": {
                "network": "string",
                "network_tags": ["string"],
                "sub_network": "string",
            },
        },
        "main_class": "string",
        "main_jar_file_uri": "string",
        "python_script_file": "string",
        "sql_script": "string",
        "sql_script_file": "string",
    },
    task_id="string")
Copy
const taskResource = new gcp.dataplex.Task("taskResource", {
    executionSpec: {
        serviceAccount: "string",
        args: {
            string: "string",
        },
        kmsKey: "string",
        maxJobExecutionLifetime: "string",
        project: "string",
    },
    triggerSpec: {
        type: "string",
        disabled: false,
        maxRetries: 0,
        schedule: "string",
        startTime: "string",
    },
    description: "string",
    displayName: "string",
    labels: {
        string: "string",
    },
    lake: "string",
    location: "string",
    notebook: {
        notebook: "string",
        archiveUris: ["string"],
        fileUris: ["string"],
        infrastructureSpec: {
            batch: {
                executorsCount: 0,
                maxExecutorsCount: 0,
            },
            containerImage: {
                image: "string",
                javaJars: ["string"],
                properties: {
                    string: "string",
                },
                pythonPackages: ["string"],
            },
            vpcNetwork: {
                network: "string",
                networkTags: ["string"],
                subNetwork: "string",
            },
        },
    },
    project: "string",
    spark: {
        archiveUris: ["string"],
        fileUris: ["string"],
        infrastructureSpec: {
            batch: {
                executorsCount: 0,
                maxExecutorsCount: 0,
            },
            containerImage: {
                image: "string",
                javaJars: ["string"],
                properties: {
                    string: "string",
                },
                pythonPackages: ["string"],
            },
            vpcNetwork: {
                network: "string",
                networkTags: ["string"],
                subNetwork: "string",
            },
        },
        mainClass: "string",
        mainJarFileUri: "string",
        pythonScriptFile: "string",
        sqlScript: "string",
        sqlScriptFile: "string",
    },
    taskId: "string",
});
Copy
type: gcp:dataplex:Task
properties:
    description: string
    displayName: string
    executionSpec:
        args:
            string: string
        kmsKey: string
        maxJobExecutionLifetime: string
        project: string
        serviceAccount: string
    labels:
        string: string
    lake: string
    location: string
    notebook:
        archiveUris:
            - string
        fileUris:
            - string
        infrastructureSpec:
            batch:
                executorsCount: 0
                maxExecutorsCount: 0
            containerImage:
                image: string
                javaJars:
                    - string
                properties:
                    string: string
                pythonPackages:
                    - string
            vpcNetwork:
                network: string
                networkTags:
                    - string
                subNetwork: string
        notebook: string
    project: string
    spark:
        archiveUris:
            - string
        fileUris:
            - string
        infrastructureSpec:
            batch:
                executorsCount: 0
                maxExecutorsCount: 0
            containerImage:
                image: string
                javaJars:
                    - string
                properties:
                    string: string
                pythonPackages:
                    - string
            vpcNetwork:
                network: string
                networkTags:
                    - string
                subNetwork: string
        mainClass: string
        mainJarFileUri: string
        pythonScriptFile: string
        sqlScript: string
        sqlScriptFile: string
    taskId: string
    triggerSpec:
        disabled: false
        maxRetries: 0
        schedule: string
        startTime: string
        type: string
Copy

Task Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Task resource accepts the following input properties:

ExecutionSpec This property is required. TaskExecutionSpec
Configuration for the cluster Structure is documented below.
TriggerSpec This property is required. TaskTriggerSpec
Configuration for the cluster Structure is documented below.
Description string
User-provided description of the task.
DisplayName string
User friendly display name.
Labels Dictionary<string, string>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
Location Changes to this property will trigger replacement. string
The location in which the task will be created in.
Notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Project Changes to this property will trigger replacement. string
Spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
TaskId Changes to this property will trigger replacement. string
The task Id of the task.
ExecutionSpec This property is required. TaskExecutionSpecArgs
Configuration for the cluster Structure is documented below.
TriggerSpec This property is required. TaskTriggerSpecArgs
Configuration for the cluster Structure is documented below.
Description string
User-provided description of the task.
DisplayName string
User friendly display name.
Labels map[string]string
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
Location Changes to this property will trigger replacement. string
The location in which the task will be created in.
Notebook TaskNotebookArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Project Changes to this property will trigger replacement. string
Spark TaskSparkArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
TaskId Changes to this property will trigger replacement. string
The task Id of the task.
executionSpec This property is required. TaskExecutionSpec
Configuration for the cluster Structure is documented below.
triggerSpec This property is required. TaskTriggerSpec
Configuration for the cluster Structure is documented below.
description String
User-provided description of the task.
displayName String
User friendly display name.
labels Map<String,String>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. String
The lake in which the task will be created in.
location Changes to this property will trigger replacement. String
The location in which the task will be created in.
notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. String
spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
taskId Changes to this property will trigger replacement. String
The task Id of the task.
executionSpec This property is required. TaskExecutionSpec
Configuration for the cluster Structure is documented below.
triggerSpec This property is required. TaskTriggerSpec
Configuration for the cluster Structure is documented below.
description string
User-provided description of the task.
displayName string
User friendly display name.
labels {[key: string]: string}
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
location Changes to this property will trigger replacement. string
The location in which the task will be created in.
notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. string
spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
taskId Changes to this property will trigger replacement. string
The task Id of the task.
execution_spec This property is required. TaskExecutionSpecArgs
Configuration for the cluster Structure is documented below.
trigger_spec This property is required. TaskTriggerSpecArgs
Configuration for the cluster Structure is documented below.
description str
User-provided description of the task.
display_name str
User friendly display name.
labels Mapping[str, str]
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. str
The lake in which the task will be created in.
location Changes to this property will trigger replacement. str
The location in which the task will be created in.
notebook TaskNotebookArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. str
spark TaskSparkArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
task_id Changes to this property will trigger replacement. str
The task Id of the task.
executionSpec This property is required. Property Map
Configuration for the cluster Structure is documented below.
triggerSpec This property is required. Property Map
Configuration for the cluster Structure is documented below.
description String
User-provided description of the task.
displayName String
User friendly display name.
labels Map<String>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. String
The lake in which the task will be created in.
location Changes to this property will trigger replacement. String
The location in which the task will be created in.
notebook Property Map
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. String
spark Property Map
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
taskId Changes to this property will trigger replacement. String
The task Id of the task.

Outputs

All input properties are implicitly available as output properties. Additionally, the Task resource produces the following output properties:

CreateTime string
The time when the task was created.
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionStatuses List<TaskExecutionStatus>
Configuration for the cluster Structure is documented below.
Id string
The provider-assigned unique ID for this managed resource.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
(Output) Execution state for the job.
Uid string
(Output) System generated globally unique ID for the job.
UpdateTime string
(Output) Last update time of the status.
CreateTime string
The time when the task was created.
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionStatuses []TaskExecutionStatus
Configuration for the cluster Structure is documented below.
Id string
The provider-assigned unique ID for this managed resource.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
(Output) Execution state for the job.
Uid string
(Output) System generated globally unique ID for the job.
UpdateTime string
(Output) Last update time of the status.
createTime String
The time when the task was created.
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses List<TaskExecutionStatus>
Configuration for the cluster Structure is documented below.
id String
The provider-assigned unique ID for this managed resource.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
(Output) Execution state for the job.
uid String
(Output) System generated globally unique ID for the job.
updateTime String
(Output) Last update time of the status.
createTime string
The time when the task was created.
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses TaskExecutionStatus[]
Configuration for the cluster Structure is documented below.
id string
The provider-assigned unique ID for this managed resource.
name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
state string
(Output) Execution state for the job.
uid string
(Output) System generated globally unique ID for the job.
updateTime string
(Output) Last update time of the status.
create_time str
The time when the task was created.
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
execution_statuses Sequence[TaskExecutionStatus]
Configuration for the cluster Structure is documented below.
id str
The provider-assigned unique ID for this managed resource.
name str
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
state str
(Output) Execution state for the job.
uid str
(Output) System generated globally unique ID for the job.
update_time str
(Output) Last update time of the status.
createTime String
The time when the task was created.
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses List<Property Map>
Configuration for the cluster Structure is documented below.
id String
The provider-assigned unique ID for this managed resource.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
(Output) Execution state for the job.
uid String
(Output) System generated globally unique ID for the job.
updateTime String
(Output) Last update time of the status.

Look up Existing Task Resource

Get an existing Task resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: TaskState, opts?: CustomResourceOptions): Task
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        create_time: Optional[str] = None,
        description: Optional[str] = None,
        display_name: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        execution_spec: Optional[TaskExecutionSpecArgs] = None,
        execution_statuses: Optional[Sequence[TaskExecutionStatusArgs]] = None,
        labels: Optional[Mapping[str, str]] = None,
        lake: Optional[str] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        notebook: Optional[TaskNotebookArgs] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        spark: Optional[TaskSparkArgs] = None,
        state: Optional[str] = None,
        task_id: Optional[str] = None,
        trigger_spec: Optional[TaskTriggerSpecArgs] = None,
        uid: Optional[str] = None,
        update_time: Optional[str] = None) -> Task
func GetTask(ctx *Context, name string, id IDInput, state *TaskState, opts ...ResourceOption) (*Task, error)
public static Task Get(string name, Input<string> id, TaskState? state, CustomResourceOptions? opts = null)
public static Task get(String name, Output<String> id, TaskState state, CustomResourceOptions options)
resources:  _:    type: gcp:dataplex:Task    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
CreateTime string
The time when the task was created.
Description string
User-provided description of the task.
DisplayName string
User friendly display name.
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionSpec TaskExecutionSpec
Configuration for the cluster Structure is documented below.
ExecutionStatuses List<TaskExecutionStatus>
Configuration for the cluster Structure is documented below.
Labels Dictionary<string, string>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
Location Changes to this property will trigger replacement. string
The location in which the task will be created in.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
Notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Project Changes to this property will trigger replacement. string
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
Spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
State string
(Output) Execution state for the job.
TaskId Changes to this property will trigger replacement. string
The task Id of the task.
TriggerSpec TaskTriggerSpec
Configuration for the cluster Structure is documented below.
Uid string
(Output) System generated globally unique ID for the job.
UpdateTime string
(Output) Last update time of the status.
CreateTime string
The time when the task was created.
Description string
User-provided description of the task.
DisplayName string
User friendly display name.
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionSpec TaskExecutionSpecArgs
Configuration for the cluster Structure is documented below.
ExecutionStatuses []TaskExecutionStatusArgs
Configuration for the cluster Structure is documented below.
Labels map[string]string
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
Location Changes to this property will trigger replacement. string
The location in which the task will be created in.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
Notebook TaskNotebookArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Project Changes to this property will trigger replacement. string
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
Spark TaskSparkArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
State string
(Output) Execution state for the job.
TaskId Changes to this property will trigger replacement. string
The task Id of the task.
TriggerSpec TaskTriggerSpecArgs
Configuration for the cluster Structure is documented below.
Uid string
(Output) System generated globally unique ID for the job.
UpdateTime string
(Output) Last update time of the status.
createTime String
The time when the task was created.
description String
User-provided description of the task.
displayName String
User friendly display name.
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec TaskExecutionSpec
Configuration for the cluster Structure is documented below.
executionStatuses List<TaskExecutionStatus>
Configuration for the cluster Structure is documented below.
labels Map<String,String>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. String
The lake in which the task will be created in.
location Changes to this property will trigger replacement. String
The location in which the task will be created in.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. String
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
state String
(Output) Execution state for the job.
taskId Changes to this property will trigger replacement. String
The task Id of the task.
triggerSpec TaskTriggerSpec
Configuration for the cluster Structure is documented below.
uid String
(Output) System generated globally unique ID for the job.
updateTime String
(Output) Last update time of the status.
createTime string
The time when the task was created.
description string
User-provided description of the task.
displayName string
User friendly display name.
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec TaskExecutionSpec
Configuration for the cluster Structure is documented below.
executionStatuses TaskExecutionStatus[]
Configuration for the cluster Structure is documented below.
labels {[key: string]: string}
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. string
The lake in which the task will be created in.
location Changes to this property will trigger replacement. string
The location in which the task will be created in.
name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
notebook TaskNotebook
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. string
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
spark TaskSpark
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
state string
(Output) Execution state for the job.
taskId Changes to this property will trigger replacement. string
The task Id of the task.
triggerSpec TaskTriggerSpec
Configuration for the cluster Structure is documented below.
uid string
(Output) System generated globally unique ID for the job.
updateTime string
(Output) Last update time of the status.
create_time str
The time when the task was created.
description str
User-provided description of the task.
display_name str
User friendly display name.
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
execution_spec TaskExecutionSpecArgs
Configuration for the cluster Structure is documented below.
execution_statuses Sequence[TaskExecutionStatusArgs]
Configuration for the cluster Structure is documented below.
labels Mapping[str, str]
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. str
The lake in which the task will be created in.
location Changes to this property will trigger replacement. str
The location in which the task will be created in.
name str
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
notebook TaskNotebookArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. str
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
spark TaskSparkArgs
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
state str
(Output) Execution state for the job.
task_id Changes to this property will trigger replacement. str
The task Id of the task.
trigger_spec TaskTriggerSpecArgs
Configuration for the cluster Structure is documented below.
uid str
(Output) System generated globally unique ID for the job.
update_time str
(Output) Last update time of the status.
createTime String
The time when the task was created.
description String
User-provided description of the task.
displayName String
User friendly display name.
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec Property Map
Configuration for the cluster Structure is documented below.
executionStatuses List<Property Map>
Configuration for the cluster Structure is documented below.
labels Map<String>
User-defined labels for the task. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
lake Changes to this property will trigger replacement. String
The lake in which the task will be created in.
location Changes to this property will trigger replacement. String
The location in which the task will be created in.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
notebook Property Map
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
project Changes to this property will trigger replacement. String
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
spark Property Map
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
state String
(Output) Execution state for the job.
taskId Changes to this property will trigger replacement. String
The task Id of the task.
triggerSpec Property Map
Configuration for the cluster Structure is documented below.
uid String
(Output) System generated globally unique ID for the job.
updateTime String
(Output) Last update time of the status.

Supporting Types

TaskExecutionSpec
, TaskExecutionSpecArgs

ServiceAccount This property is required. string
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
Args Dictionary<string, string>
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
KmsKey string
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


MaxJobExecutionLifetime string
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
ServiceAccount This property is required. string
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
Args map[string]string
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
KmsKey string
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


MaxJobExecutionLifetime string
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
serviceAccount This property is required. String
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
args Map<String,String>
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
kmsKey String
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


maxJobExecutionLifetime String
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
serviceAccount This property is required. string
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
args {[key: string]: string}
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
kmsKey string
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


maxJobExecutionLifetime string
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
service_account This property is required. str
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
args Mapping[str, str]
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
kms_key str
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


max_job_execution_lifetime str
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
project str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
serviceAccount This property is required. String
Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
args Map<String>
The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
kmsKey String
The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.


maxJobExecutionLifetime String
The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

TaskExecutionStatus
, TaskExecutionStatusArgs

LatestJobs List<TaskExecutionStatusLatestJob>
(Output) latest job execution. Structure is documented below.
UpdateTime string
(Output) Last update time of the status.
LatestJobs []TaskExecutionStatusLatestJob
(Output) latest job execution. Structure is documented below.
UpdateTime string
(Output) Last update time of the status.
latestJobs List<TaskExecutionStatusLatestJob>
(Output) latest job execution. Structure is documented below.
updateTime String
(Output) Last update time of the status.
latestJobs TaskExecutionStatusLatestJob[]
(Output) latest job execution. Structure is documented below.
updateTime string
(Output) Last update time of the status.
latest_jobs Sequence[TaskExecutionStatusLatestJob]
(Output) latest job execution. Structure is documented below.
update_time str
(Output) Last update time of the status.
latestJobs List<Property Map>
(Output) latest job execution. Structure is documented below.
updateTime String
(Output) Last update time of the status.

TaskExecutionStatusLatestJob
, TaskExecutionStatusLatestJobArgs

EndTime string
(Output) The time when the job ended.
Message string
(Output) Additional information about the current state.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
RetryCount int
(Output) The number of times the job has been retried (excluding the initial attempt).
Service string
(Output) The underlying service running a job.
ServiceJob string
(Output) The full resource name for the job run under a particular service.
StartTime string
(Output) The time when the job was started.
State string
(Output) Execution state for the job.
Uid string
(Output) System generated globally unique ID for the job.
EndTime string
(Output) The time when the job ended.
Message string
(Output) Additional information about the current state.
Name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
RetryCount int
(Output) The number of times the job has been retried (excluding the initial attempt).
Service string
(Output) The underlying service running a job.
ServiceJob string
(Output) The full resource name for the job run under a particular service.
StartTime string
(Output) The time when the job was started.
State string
(Output) Execution state for the job.
Uid string
(Output) System generated globally unique ID for the job.
endTime String
(Output) The time when the job ended.
message String
(Output) Additional information about the current state.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
retryCount Integer
(Output) The number of times the job has been retried (excluding the initial attempt).
service String
(Output) The underlying service running a job.
serviceJob String
(Output) The full resource name for the job run under a particular service.
startTime String
(Output) The time when the job was started.
state String
(Output) Execution state for the job.
uid String
(Output) System generated globally unique ID for the job.
endTime string
(Output) The time when the job ended.
message string
(Output) Additional information about the current state.
name string
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
retryCount number
(Output) The number of times the job has been retried (excluding the initial attempt).
service string
(Output) The underlying service running a job.
serviceJob string
(Output) The full resource name for the job run under a particular service.
startTime string
(Output) The time when the job was started.
state string
(Output) Execution state for the job.
uid string
(Output) System generated globally unique ID for the job.
end_time str
(Output) The time when the job ended.
message str
(Output) Additional information about the current state.
name str
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
retry_count int
(Output) The number of times the job has been retried (excluding the initial attempt).
service str
(Output) The underlying service running a job.
service_job str
(Output) The full resource name for the job run under a particular service.
start_time str
(Output) The time when the job was started.
state str
(Output) Execution state for the job.
uid str
(Output) System generated globally unique ID for the job.
endTime String
(Output) The time when the job ended.
message String
(Output) Additional information about the current state.
name String
(Output) The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
retryCount Number
(Output) The number of times the job has been retried (excluding the initial attempt).
service String
(Output) The underlying service running a job.
serviceJob String
(Output) The full resource name for the job run under a particular service.
startTime String
(Output) The time when the job was started.
state String
(Output) Execution state for the job.
uid String
(Output) System generated globally unique ID for the job.

TaskNotebook
, TaskNotebookArgs

Notebook This property is required. string
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
ArchiveUris List<string>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
FileUris List<string>
Cloud Storage URIs of files to be placed in the working directory of each executor.
InfrastructureSpec TaskNotebookInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
Notebook This property is required. string
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
ArchiveUris []string
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
FileUris []string
Cloud Storage URIs of files to be placed in the working directory of each executor.
InfrastructureSpec TaskNotebookInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
notebook This property is required. String
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
archiveUris List<String>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris List<String>
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec TaskNotebookInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
notebook This property is required. string
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
archiveUris string[]
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris string[]
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec TaskNotebookInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
notebook This property is required. str
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
archive_uris Sequence[str]
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
file_uris Sequence[str]
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructure_spec TaskNotebookInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
notebook This property is required. String
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
archiveUris List<String>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris List<String>
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec Property Map
Infrastructure specification for the execution. Structure is documented below.

TaskNotebookInfrastructureSpec
, TaskNotebookInfrastructureSpecArgs

Batch TaskNotebookInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
ContainerImage TaskNotebookInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
VpcNetwork TaskNotebookInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
Batch TaskNotebookInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
ContainerImage TaskNotebookInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
VpcNetwork TaskNotebookInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskNotebookInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage TaskNotebookInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork TaskNotebookInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskNotebookInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage TaskNotebookInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork TaskNotebookInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskNotebookInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
container_image TaskNotebookInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpc_network TaskNotebookInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch Property Map
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage Property Map
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork Property Map
Vpc network. Structure is documented below.

TaskNotebookInfrastructureSpecBatch
, TaskNotebookInfrastructureSpecBatchArgs

ExecutorsCount int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
MaxExecutorsCount int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
ExecutorsCount int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
MaxExecutorsCount int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount Integer
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount Integer
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount number
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount number
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executors_count int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
max_executors_count int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount Number
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount Number
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]

TaskNotebookInfrastructureSpecContainerImage
, TaskNotebookInfrastructureSpecContainerImageArgs

Image string
Container image to use.
JavaJars List<string>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
Properties Dictionary<string, string>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
PythonPackages List<string>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
Image string
Container image to use.
JavaJars []string
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
Properties map[string]string
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
PythonPackages []string
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image String
Container image to use.
javaJars List<String>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Map<String,String>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages List<String>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image string
Container image to use.
javaJars string[]
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties {[key: string]: string}
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages string[]
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image str
Container image to use.
java_jars Sequence[str]
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Mapping[str, str]
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
python_packages Sequence[str]
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image String
Container image to use.
javaJars List<String>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Map<String>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages List<String>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz

TaskNotebookInfrastructureSpecVpcNetwork
, TaskNotebookInfrastructureSpecVpcNetworkArgs

Network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
NetworkTags List<string>
List of network tags to apply to the job.
SubNetwork string
The Cloud VPC sub-network in which the job is run.
Network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
NetworkTags []string
List of network tags to apply to the job.
SubNetwork string
The Cloud VPC sub-network in which the job is run.
network String
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags List<String>
List of network tags to apply to the job.
subNetwork String
The Cloud VPC sub-network in which the job is run.
network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags string[]
List of network tags to apply to the job.
subNetwork string
The Cloud VPC sub-network in which the job is run.
network str
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
network_tags Sequence[str]
List of network tags to apply to the job.
sub_network str
The Cloud VPC sub-network in which the job is run.
network String
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags List<String>
List of network tags to apply to the job.
subNetwork String
The Cloud VPC sub-network in which the job is run.

TaskSpark
, TaskSparkArgs

ArchiveUris List<string>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
FileUris List<string>
Cloud Storage URIs of files to be placed in the working directory of each executor.
InfrastructureSpec TaskSparkInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
MainClass string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
MainJarFileUri string
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
PythonScriptFile string
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
SqlScript string
The query text. The execution args are used to declare a set of script variables (set key='value';).
SqlScriptFile string
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
ArchiveUris []string
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
FileUris []string
Cloud Storage URIs of files to be placed in the working directory of each executor.
InfrastructureSpec TaskSparkInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
MainClass string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
MainJarFileUri string
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
PythonScriptFile string
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
SqlScript string
The query text. The execution args are used to declare a set of script variables (set key='value';).
SqlScriptFile string
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
archiveUris List<String>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris List<String>
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec TaskSparkInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
mainClass String
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
mainJarFileUri String
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
pythonScriptFile String
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
sqlScript String
The query text. The execution args are used to declare a set of script variables (set key='value';).
sqlScriptFile String
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
archiveUris string[]
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris string[]
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec TaskSparkInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
mainClass string
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
mainJarFileUri string
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
pythonScriptFile string
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
sqlScript string
The query text. The execution args are used to declare a set of script variables (set key='value';).
sqlScriptFile string
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
archive_uris Sequence[str]
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
file_uris Sequence[str]
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructure_spec TaskSparkInfrastructureSpec
Infrastructure specification for the execution. Structure is documented below.
main_class str
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
main_jar_file_uri str
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
python_script_file str
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
sql_script str
The query text. The execution args are used to declare a set of script variables (set key='value';).
sql_script_file str
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
archiveUris List<String>
Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
fileUris List<String>
Cloud Storage URIs of files to be placed in the working directory of each executor.
infrastructureSpec Property Map
Infrastructure specification for the execution. Structure is documented below.
mainClass String
The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
mainJarFileUri String
The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
pythonScriptFile String
The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
sqlScript String
The query text. The execution args are used to declare a set of script variables (set key='value';).
sqlScriptFile String
A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).

TaskSparkInfrastructureSpec
, TaskSparkInfrastructureSpecArgs

Batch TaskSparkInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
ContainerImage TaskSparkInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
VpcNetwork TaskSparkInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
Batch TaskSparkInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
ContainerImage TaskSparkInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
VpcNetwork TaskSparkInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskSparkInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage TaskSparkInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork TaskSparkInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskSparkInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage TaskSparkInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork TaskSparkInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch TaskSparkInfrastructureSpecBatch
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
container_image TaskSparkInfrastructureSpecContainerImage
Container Image Runtime Configuration. Structure is documented below.
vpc_network TaskSparkInfrastructureSpecVpcNetwork
Vpc network. Structure is documented below.
batch Property Map
Compute resources needed for a Task when using Dataproc Serverless. Structure is documented below.
containerImage Property Map
Container Image Runtime Configuration. Structure is documented below.
vpcNetwork Property Map
Vpc network. Structure is documented below.

TaskSparkInfrastructureSpecBatch
, TaskSparkInfrastructureSpecBatchArgs

ExecutorsCount int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
MaxExecutorsCount int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
ExecutorsCount int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
MaxExecutorsCount int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount Integer
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount Integer
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount number
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount number
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executors_count int
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
max_executors_count int
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
executorsCount Number
Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
maxExecutorsCount Number
Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]

TaskSparkInfrastructureSpecContainerImage
, TaskSparkInfrastructureSpecContainerImageArgs

Image string
Container image to use.
JavaJars List<string>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
Properties Dictionary<string, string>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
PythonPackages List<string>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
Image string
Container image to use.
JavaJars []string
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
Properties map[string]string
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
PythonPackages []string
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image String
Container image to use.
javaJars List<String>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Map<String,String>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages List<String>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image string
Container image to use.
javaJars string[]
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties {[key: string]: string}
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages string[]
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image str
Container image to use.
java_jars Sequence[str]
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Mapping[str, str]
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
python_packages Sequence[str]
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
image String
Container image to use.
javaJars List<String>
A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
properties Map<String>
Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
pythonPackages List<String>
A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz

TaskSparkInfrastructureSpecVpcNetwork
, TaskSparkInfrastructureSpecVpcNetworkArgs

Network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
NetworkTags List<string>
List of network tags to apply to the job.
SubNetwork string
The Cloud VPC sub-network in which the job is run.
Network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
NetworkTags []string
List of network tags to apply to the job.
SubNetwork string
The Cloud VPC sub-network in which the job is run.
network String
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags List<String>
List of network tags to apply to the job.
subNetwork String
The Cloud VPC sub-network in which the job is run.
network string
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags string[]
List of network tags to apply to the job.
subNetwork string
The Cloud VPC sub-network in which the job is run.
network str
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
network_tags Sequence[str]
List of network tags to apply to the job.
sub_network str
The Cloud VPC sub-network in which the job is run.
network String
The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
networkTags List<String>
List of network tags to apply to the job.
subNetwork String
The Cloud VPC sub-network in which the job is run.

TaskTriggerSpec
, TaskTriggerSpecArgs

Type
This property is required.
Changes to this property will trigger replacement.
string
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
Disabled bool
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
MaxRetries int
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
Schedule string
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
StartTime string
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
Type
This property is required.
Changes to this property will trigger replacement.
string
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
Disabled bool
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
MaxRetries int
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
Schedule string
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
StartTime string
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
type
This property is required.
Changes to this property will trigger replacement.
String
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
disabled Boolean
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
maxRetries Integer
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
schedule String
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
startTime String
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
type
This property is required.
Changes to this property will trigger replacement.
string
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
disabled boolean
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
maxRetries number
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
schedule string
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
startTime string
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
type
This property is required.
Changes to this property will trigger replacement.
str
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
disabled bool
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
max_retries int
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
schedule str
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
start_time str
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
type
This property is required.
Changes to this property will trigger replacement.
String
Trigger type of the user-specified Task Possible values are: ON_DEMAND, RECURRING.
disabled Boolean
Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
maxRetries Number
Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
schedule String
Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
startTime String
The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.

Import

Task can be imported using any of these accepted formats:

  • projects/{{project}}/locations/{{location}}/lakes/{{lake}}/tasks/{{task_id}}

  • {{project}}/{{location}}/{{lake}}/{{task_id}}

  • {{location}}/{{lake}}/{{task_id}}

When using the pulumi import command, Task can be imported using one of the formats above. For example:

$ pulumi import gcp:dataplex/task:Task default projects/{{project}}/locations/{{location}}/lakes/{{lake}}/tasks/{{task_id}}
Copy
$ pulumi import gcp:dataplex/task:Task default {{project}}/{{location}}/{{lake}}/{{task_id}}
Copy
$ pulumi import gcp:dataplex/task:Task default {{location}}/{{lake}}/{{task_id}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.