1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. bigquery
  5. Dataset
Google Cloud v8.26.0 published on Thursday, Apr 10, 2025 by Pulumi

gcp.bigquery.Dataset

Explore with Pulumi AI

Example Usage

Bigquery Dataset Basic

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
Copy
import pulumi
import pulumi_gcp as gcp

bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
    ])
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()
            .accountId("bqowner")
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());

    }
}
Copy
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  bqowner:
    type: gcp:serviceaccount:Account
    properties:
      accountId: bqowner
Copy

Bigquery Dataset Cmek

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const keyRing = new gcp.kms.KeyRing("key_ring", {
    name: "example-keyring",
    location: "us",
});
const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
    name: "example-key",
    keyRing: keyRing.id,
});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    defaultTableExpirationMs: 3600000,
    defaultEncryptionConfiguration: {
        kmsKeyName: cryptoKey.id,
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

key_ring = gcp.kms.KeyRing("key_ring",
    name="example-keyring",
    location="us")
crypto_key = gcp.kms.CryptoKey("crypto_key",
    name="example-key",
    key_ring=key_ring.id)
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    default_table_expiration_ms=3600000,
    default_encryption_configuration={
        "kms_key_name": crypto_key.id,
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
			Name:     pulumi.String("example-keyring"),
			Location: pulumi.String("us"),
		})
		if err != nil {
			return err
		}
		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
			Name:    pulumi.String("example-key"),
			KeyRing: keyRing.ID(),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("US"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
				KmsKeyName: cryptoKey.ID(),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
    {
        Name = "example-keyring",
        Location = "us",
    });

    var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
    {
        Name = "example-key",
        KeyRing = keyRing.Id,
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        DefaultTableExpirationMs = 3600000,
        DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
        {
            KmsKeyName = cryptoKey.Id,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
            .name("example-keyring")
            .location("us")
            .build());

        var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
            .name("example-key")
            .keyRing(keyRing.id())
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .defaultTableExpirationMs(3600000)
            .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
                .kmsKeyName(cryptoKey.id())
                .build())
            .build());

    }
}
Copy
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: US
      defaultTableExpirationMs: 3.6e+06
      defaultEncryptionConfiguration:
        kmsKeyName: ${cryptoKey.id}
  cryptoKey:
    type: gcp:kms:CryptoKey
    name: crypto_key
    properties:
      name: example-key
      keyRing: ${keyRing.id}
  keyRing:
    type: gcp:kms:KeyRing
    name: key_ring
    properties:
      name: example-keyring
      location: us
Copy

Bigquery Dataset Authorized Dataset

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const _public = new gcp.bigquery.Dataset("public", {
    datasetId: "public",
    friendlyName: "test",
    description: "This dataset is public",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "private",
    friendlyName: "test",
    description: "This dataset is private",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
        {
            dataset: {
                dataset: {
                    projectId: _public.project,
                    datasetId: _public.datasetId,
                },
                targetTypes: ["VIEWS"],
            },
        },
    ],
});
Copy
import pulumi
import pulumi_gcp as gcp

bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
public = gcp.bigquery.Dataset("public",
    dataset_id="public",
    friendly_name="test",
    description="This dataset is public",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
    ])
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="private",
    friendly_name="test",
    description="This dataset is private",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
        {
            "dataset": {
                "dataset": {
                    "project_id": public.project,
                    "dataset_id": public.dataset_id,
                },
                "target_types": ["VIEWS"],
            },
        },
    ])
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("public"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is public"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("private"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is private"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Dataset: &bigquery.DatasetAccessDatasetArgs{
						Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
							ProjectId: public.Project,
							DatasetId: public.DatasetId,
						},
						TargetTypes: pulumi.StringArray{
							pulumi.String("VIEWS"),
						},
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });

    var @public = new Gcp.BigQuery.Dataset("public", new()
    {
        DatasetId = "public",
        FriendlyName = "test",
        Description = "This dataset is public",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "private",
        FriendlyName = "test",
        Description = "This dataset is private",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
                {
                    Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                    {
                        ProjectId = @public.Project,
                        DatasetId = @public.DatasetId,
                    },
                    TargetTypes = new[]
                    {
                        "VIEWS",
                    },
                },
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()
            .accountId("bqowner")
            .build());

        var public_ = new Dataset("public", DatasetArgs.builder()
            .datasetId("public")
            .friendlyName("test")
            .description("This dataset is public")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("private")
            .friendlyName("test")
            .description("This dataset is private")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .dataset(DatasetAccessDatasetArgs.builder()
                        .dataset(DatasetAccessDatasetDatasetArgs.builder()
                            .projectId(public_.project())
                            .datasetId(public_.datasetId())
                            .build())
                        .targetTypes("VIEWS")
                        .build())
                    .build())
            .build());

    }
}
Copy
resources:
  public:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public
      friendlyName: test
      description: This dataset is public
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private
      friendlyName: test
      description: This dataset is private
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
        - dataset:
            dataset:
              projectId: ${public.project}
              datasetId: ${public.datasetId}
            targetTypes:
              - VIEWS
  bqowner:
    type: gcp:serviceaccount:Account
    properties:
      accountId: bqowner
Copy

Bigquery Dataset Authorized Routine

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const _public = new gcp.bigquery.Dataset("public", {
    datasetId: "public_dataset",
    description: "This dataset is public",
});
const publicRoutine = new gcp.bigquery.Routine("public", {
    datasetId: _public.datasetId,
    routineId: "public_routine",
    routineType: "TABLE_VALUED_FUNCTION",
    language: "SQL",
    definitionBody: "SELECT 1 + value AS value\n",
    arguments: [{
        name: "value",
        argumentKind: "FIXED_TYPE",
        dataType: JSON.stringify({
            typeKind: "INT64",
        }),
    }],
    returnTableType: JSON.stringify({
        columns: [{
            name: "value",
            type: {
                typeKind: "INT64",
            },
        }],
    }),
});
const _private = new gcp.bigquery.Dataset("private", {
    datasetId: "private_dataset",
    description: "This dataset is private",
    accesses: [
        {
            role: "OWNER",
            userByEmail: "my@service-account.com",
        },
        {
            routine: {
                projectId: publicRoutine.project,
                datasetId: publicRoutine.datasetId,
                routineId: publicRoutine.routineId,
            },
        },
    ],
});
Copy
import pulumi
import json
import pulumi_gcp as gcp

public = gcp.bigquery.Dataset("public",
    dataset_id="public_dataset",
    description="This dataset is public")
public_routine = gcp.bigquery.Routine("public",
    dataset_id=public.dataset_id,
    routine_id="public_routine",
    routine_type="TABLE_VALUED_FUNCTION",
    language="SQL",
    definition_body="SELECT 1 + value AS value\n",
    arguments=[{
        "name": "value",
        "argument_kind": "FIXED_TYPE",
        "data_type": json.dumps({
            "typeKind": "INT64",
        }),
    }],
    return_table_type=json.dumps({
        "columns": [{
            "name": "value",
            "type": {
                "typeKind": "INT64",
            },
        }],
    }))
private = gcp.bigquery.Dataset("private",
    dataset_id="private_dataset",
    description="This dataset is private",
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": "my@service-account.com",
        },
        {
            "routine": {
                "project_id": public_routine.project,
                "dataset_id": public_routine.dataset_id,
                "routine_id": public_routine.routine_id,
            },
        },
    ])
Copy
package main

import (
	"encoding/json"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("public_dataset"),
			Description: pulumi.String("This dataset is public"),
		})
		if err != nil {
			return err
		}
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"typeKind": "INT64",
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		tmpJSON1, err := json.Marshal(map[string]interface{}{
			"columns": []map[string]interface{}{
				map[string]interface{}{
					"name": "value",
					"type": map[string]interface{}{
						"typeKind": "INT64",
					},
				},
			},
		})
		if err != nil {
			return err
		}
		json1 := string(tmpJSON1)
		publicRoutine, err := bigquery.NewRoutine(ctx, "public", &bigquery.RoutineArgs{
			DatasetId:      public.DatasetId,
			RoutineId:      pulumi.String("public_routine"),
			RoutineType:    pulumi.String("TABLE_VALUED_FUNCTION"),
			Language:       pulumi.String("SQL"),
			DefinitionBody: pulumi.String("SELECT 1 + value AS value\n"),
			Arguments: bigquery.RoutineArgumentArray{
				&bigquery.RoutineArgumentArgs{
					Name:         pulumi.String("value"),
					ArgumentKind: pulumi.String("FIXED_TYPE"),
					DataType:     pulumi.String(json0),
				},
			},
			ReturnTableType: pulumi.String(json1),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "private", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("private_dataset"),
			Description: pulumi.String("This dataset is private"),
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: pulumi.String("my@service-account.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Routine: &bigquery.DatasetAccessRoutineArgs{
						ProjectId: publicRoutine.Project,
						DatasetId: publicRoutine.DatasetId,
						RoutineId: publicRoutine.RoutineId,
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var @public = new Gcp.BigQuery.Dataset("public", new()
    {
        DatasetId = "public_dataset",
        Description = "This dataset is public",
    });

    var publicRoutine = new Gcp.BigQuery.Routine("public", new()
    {
        DatasetId = @public.DatasetId,
        RoutineId = "public_routine",
        RoutineType = "TABLE_VALUED_FUNCTION",
        Language = "SQL",
        DefinitionBody = @"SELECT 1 + value AS value
",
        Arguments = new[]
        {
            new Gcp.BigQuery.Inputs.RoutineArgumentArgs
            {
                Name = "value",
                ArgumentKind = "FIXED_TYPE",
                DataType = JsonSerializer.Serialize(new Dictionary<string, object?>
                {
                    ["typeKind"] = "INT64",
                }),
            },
        },
        ReturnTableType = JsonSerializer.Serialize(new Dictionary<string, object?>
        {
            ["columns"] = new[]
            {
                new Dictionary<string, object?>
                {
                    ["name"] = "value",
                    ["type"] = new Dictionary<string, object?>
                    {
                        ["typeKind"] = "INT64",
                    },
                },
            },
        }),
    });

    var @private = new Gcp.BigQuery.Dataset("private", new()
    {
        DatasetId = "private_dataset",
        Description = "This dataset is private",
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = "my@service-account.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
                {
                    ProjectId = publicRoutine.Project,
                    DatasetId = publicRoutine.DatasetId,
                    RoutineId = publicRoutine.RoutineId,
                },
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var public_ = new Dataset("public", DatasetArgs.builder()
            .datasetId("public_dataset")
            .description("This dataset is public")
            .build());

        var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()
            .datasetId(public_.datasetId())
            .routineId("public_routine")
            .routineType("TABLE_VALUED_FUNCTION")
            .language("SQL")
            .definitionBody("""
SELECT 1 + value AS value
            """)
            .arguments(RoutineArgumentArgs.builder()
                .name("value")
                .argumentKind("FIXED_TYPE")
                .dataType(serializeJson(
                    jsonObject(
                        jsonProperty("typeKind", "INT64")
                    )))
                .build())
            .returnTableType(serializeJson(
                jsonObject(
                    jsonProperty("columns", jsonArray(jsonObject(
                        jsonProperty("name", "value"),
                        jsonProperty("type", jsonObject(
                            jsonProperty("typeKind", "INT64")
                        ))
                    )))
                )))
            .build());

        var private_ = new Dataset("private", DatasetArgs.builder()
            .datasetId("private_dataset")
            .description("This dataset is private")
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail("my@service-account.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .routine(DatasetAccessRoutineArgs.builder()
                        .projectId(publicRoutine.project())
                        .datasetId(publicRoutine.datasetId())
                        .routineId(publicRoutine.routineId())
                        .build())
                    .build())
            .build());

    }
}
Copy
resources:
  public:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public_dataset
      description: This dataset is public
  publicRoutine:
    type: gcp:bigquery:Routine
    name: public
    properties:
      datasetId: ${public.datasetId}
      routineId: public_routine
      routineType: TABLE_VALUED_FUNCTION
      language: SQL
      definitionBody: |
        SELECT 1 + value AS value        
      arguments:
        - name: value
          argumentKind: FIXED_TYPE
          dataType:
            fn::toJSON:
              typeKind: INT64
      returnTableType:
        fn::toJSON:
          columns:
            - name: value
              type:
                typeKind: INT64
  private:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private_dataset
      description: This dataset is private
      accesses:
        - role: OWNER
          userByEmail: my@service-account.com
        - routine:
            projectId: ${publicRoutine.project}
            datasetId: ${publicRoutine.datasetId}
            routineId: ${publicRoutine.routineId}
Copy

Bigquery Dataset External Reference Aws

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "aws-us-east-1",
    externalDatasetReference: {
        externalSource: "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
        connection: "projects/project/locations/aws-us-east-1/connections/connection",
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="aws-us-east-1",
    external_dataset_reference={
        "external_source": "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
        "connection": "projects/project/locations/aws-us-east-1/connections/connection",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("example_dataset"),
			FriendlyName: pulumi.String("test"),
			Description:  pulumi.String("This is a test description"),
			Location:     pulumi.String("aws-us-east-1"),
			ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
				ExternalSource: pulumi.String("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database"),
				Connection:     pulumi.String("projects/project/locations/aws-us-east-1/connections/connection"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "aws-us-east-1",
        ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
        {
            ExternalSource = "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
            Connection = "projects/project/locations/aws-us-east-1/connections/connection",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetExternalDatasetReferenceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("aws-us-east-1")
            .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
                .externalSource("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database")
                .connection("projects/project/locations/aws-us-east-1/connections/connection")
                .build())
            .build());

    }
}
Copy
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: aws-us-east-1
      externalDatasetReference:
        externalSource: aws-glue://arn:aws:glue:us-east-1:999999999999:database/database
        connection: projects/project/locations/aws-us-east-1/connections/connection
Copy

Bigquery Dataset External Catalog Dataset Options

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    externalCatalogDatasetOptions: {
        parameters: {
            dataset_owner: "test_dataset_owner",
        },
        defaultStorageLocationUri: "gs://test_dataset/tables",
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    external_catalog_dataset_options={
        "parameters": {
            "dataset_owner": "test_dataset_owner",
        },
        "default_storage_location_uri": "gs://test_dataset/tables",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("example_dataset"),
			FriendlyName: pulumi.String("test"),
			Description:  pulumi.String("This is a test description"),
			Location:     pulumi.String("US"),
			ExternalCatalogDatasetOptions: &bigquery.DatasetExternalCatalogDatasetOptionsArgs{
				Parameters: pulumi.StringMap{
					"dataset_owner": pulumi.String("test_dataset_owner"),
				},
				DefaultStorageLocationUri: pulumi.String("gs://test_dataset/tables"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        ExternalCatalogDatasetOptions = new Gcp.BigQuery.Inputs.DatasetExternalCatalogDatasetOptionsArgs
        {
            Parameters = 
            {
                { "dataset_owner", "test_dataset_owner" },
            },
            DefaultStorageLocationUri = "gs://test_dataset/tables",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetExternalCatalogDatasetOptionsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .externalCatalogDatasetOptions(DatasetExternalCatalogDatasetOptionsArgs.builder()
                .parameters(Map.of("dataset_owner", "test_dataset_owner"))
                .defaultStorageLocationUri("gs://test_dataset/tables")
                .build())
            .build());

    }
}
Copy
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: US
      externalCatalogDatasetOptions:
        parameters:
          dataset_owner: test_dataset_owner
        defaultStorageLocationUri: gs://test_dataset/tables
Copy

Create Dataset Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Dataset(name: string, args: DatasetArgs, opts?: CustomResourceOptions);
@overload
def Dataset(resource_name: str,
            args: DatasetArgs,
            opts: Optional[ResourceOptions] = None)

@overload
def Dataset(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            dataset_id: Optional[str] = None,
            external_catalog_dataset_options: Optional[DatasetExternalCatalogDatasetOptionsArgs] = None,
            friendly_name: Optional[str] = None,
            default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
            default_partition_expiration_ms: Optional[int] = None,
            default_table_expiration_ms: Optional[int] = None,
            delete_contents_on_destroy: Optional[bool] = None,
            description: Optional[str] = None,
            accesses: Optional[Sequence[DatasetAccessArgs]] = None,
            default_collation: Optional[str] = None,
            is_case_insensitive: Optional[bool] = None,
            external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
            labels: Optional[Mapping[str, str]] = None,
            location: Optional[str] = None,
            max_time_travel_hours: Optional[str] = None,
            project: Optional[str] = None,
            resource_tags: Optional[Mapping[str, str]] = None,
            storage_billing_model: Optional[str] = None)
func NewDataset(ctx *Context, name string, args DatasetArgs, opts ...ResourceOption) (*Dataset, error)
public Dataset(string name, DatasetArgs args, CustomResourceOptions? opts = null)
public Dataset(String name, DatasetArgs args)
public Dataset(String name, DatasetArgs args, CustomResourceOptions options)
type: gcp:bigquery:Dataset
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. DatasetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. DatasetArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. DatasetArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. DatasetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. DatasetArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var datasetResource = new Gcp.BigQuery.Dataset("datasetResource", new()
{
    DatasetId = "string",
    ExternalCatalogDatasetOptions = new Gcp.BigQuery.Inputs.DatasetExternalCatalogDatasetOptionsArgs
    {
        DefaultStorageLocationUri = "string",
        Parameters = 
        {
            { "string", "string" },
        },
    },
    FriendlyName = "string",
    DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
    {
        KmsKeyName = "string",
    },
    DefaultPartitionExpirationMs = 0,
    DefaultTableExpirationMs = 0,
    DeleteContentsOnDestroy = false,
    Description = "string",
    Accesses = new[]
    {
        new Gcp.BigQuery.Inputs.DatasetAccessArgs
        {
            Condition = new Gcp.BigQuery.Inputs.DatasetAccessConditionArgs
            {
                Expression = "string",
                Description = "string",
                Location = "string",
                Title = "string",
            },
            Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                {
                    DatasetId = "string",
                    ProjectId = "string",
                },
                TargetTypes = new[]
                {
                    "string",
                },
            },
            Domain = "string",
            GroupByEmail = "string",
            IamMember = "string",
            Role = "string",
            Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
            {
                DatasetId = "string",
                ProjectId = "string",
                RoutineId = "string",
            },
            SpecialGroup = "string",
            UserByEmail = "string",
            View = new Gcp.BigQuery.Inputs.DatasetAccessViewArgs
            {
                DatasetId = "string",
                ProjectId = "string",
                TableId = "string",
            },
        },
    },
    DefaultCollation = "string",
    IsCaseInsensitive = false,
    ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
    {
        Connection = "string",
        ExternalSource = "string",
    },
    Labels = 
    {
        { "string", "string" },
    },
    Location = "string",
    MaxTimeTravelHours = "string",
    Project = "string",
    ResourceTags = 
    {
        { "string", "string" },
    },
    StorageBillingModel = "string",
});
Copy
example, err := bigquery.NewDataset(ctx, "datasetResource", &bigquery.DatasetArgs{
	DatasetId: pulumi.String("string"),
	ExternalCatalogDatasetOptions: &bigquery.DatasetExternalCatalogDatasetOptionsArgs{
		DefaultStorageLocationUri: pulumi.String("string"),
		Parameters: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	FriendlyName: pulumi.String("string"),
	DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
		KmsKeyName: pulumi.String("string"),
	},
	DefaultPartitionExpirationMs: pulumi.Int(0),
	DefaultTableExpirationMs:     pulumi.Int(0),
	DeleteContentsOnDestroy:      pulumi.Bool(false),
	Description:                  pulumi.String("string"),
	Accesses: bigquery.DatasetAccessTypeArray{
		&bigquery.DatasetAccessTypeArgs{
			Condition: &bigquery.DatasetAccessConditionArgs{
				Expression:  pulumi.String("string"),
				Description: pulumi.String("string"),
				Location:    pulumi.String("string"),
				Title:       pulumi.String("string"),
			},
			Dataset: &bigquery.DatasetAccessDatasetArgs{
				Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
					DatasetId: pulumi.String("string"),
					ProjectId: pulumi.String("string"),
				},
				TargetTypes: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
			Domain:       pulumi.String("string"),
			GroupByEmail: pulumi.String("string"),
			IamMember:    pulumi.String("string"),
			Role:         pulumi.String("string"),
			Routine: &bigquery.DatasetAccessRoutineArgs{
				DatasetId: pulumi.String("string"),
				ProjectId: pulumi.String("string"),
				RoutineId: pulumi.String("string"),
			},
			SpecialGroup: pulumi.String("string"),
			UserByEmail:  pulumi.String("string"),
			View: &bigquery.DatasetAccessViewArgs{
				DatasetId: pulumi.String("string"),
				ProjectId: pulumi.String("string"),
				TableId:   pulumi.String("string"),
			},
		},
	},
	DefaultCollation:  pulumi.String("string"),
	IsCaseInsensitive: pulumi.Bool(false),
	ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
		Connection:     pulumi.String("string"),
		ExternalSource: pulumi.String("string"),
	},
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Location:           pulumi.String("string"),
	MaxTimeTravelHours: pulumi.String("string"),
	Project:            pulumi.String("string"),
	ResourceTags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	StorageBillingModel: pulumi.String("string"),
})
Copy
var datasetResource = new Dataset("datasetResource", DatasetArgs.builder()
    .datasetId("string")
    .externalCatalogDatasetOptions(DatasetExternalCatalogDatasetOptionsArgs.builder()
        .defaultStorageLocationUri("string")
        .parameters(Map.of("string", "string"))
        .build())
    .friendlyName("string")
    .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
        .kmsKeyName("string")
        .build())
    .defaultPartitionExpirationMs(0)
    .defaultTableExpirationMs(0)
    .deleteContentsOnDestroy(false)
    .description("string")
    .accesses(DatasetAccessArgs.builder()
        .condition(DatasetAccessConditionArgs.builder()
            .expression("string")
            .description("string")
            .location("string")
            .title("string")
            .build())
        .dataset(DatasetAccessDatasetArgs.builder()
            .dataset(DatasetAccessDatasetDatasetArgs.builder()
                .datasetId("string")
                .projectId("string")
                .build())
            .targetTypes("string")
            .build())
        .domain("string")
        .groupByEmail("string")
        .iamMember("string")
        .role("string")
        .routine(DatasetAccessRoutineArgs.builder()
            .datasetId("string")
            .projectId("string")
            .routineId("string")
            .build())
        .specialGroup("string")
        .userByEmail("string")
        .view(DatasetAccessViewArgs.builder()
            .datasetId("string")
            .projectId("string")
            .tableId("string")
            .build())
        .build())
    .defaultCollation("string")
    .isCaseInsensitive(false)
    .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
        .connection("string")
        .externalSource("string")
        .build())
    .labels(Map.of("string", "string"))
    .location("string")
    .maxTimeTravelHours("string")
    .project("string")
    .resourceTags(Map.of("string", "string"))
    .storageBillingModel("string")
    .build());
Copy
dataset_resource = gcp.bigquery.Dataset("datasetResource",
    dataset_id="string",
    external_catalog_dataset_options={
        "default_storage_location_uri": "string",
        "parameters": {
            "string": "string",
        },
    },
    friendly_name="string",
    default_encryption_configuration={
        "kms_key_name": "string",
    },
    default_partition_expiration_ms=0,
    default_table_expiration_ms=0,
    delete_contents_on_destroy=False,
    description="string",
    accesses=[{
        "condition": {
            "expression": "string",
            "description": "string",
            "location": "string",
            "title": "string",
        },
        "dataset": {
            "dataset": {
                "dataset_id": "string",
                "project_id": "string",
            },
            "target_types": ["string"],
        },
        "domain": "string",
        "group_by_email": "string",
        "iam_member": "string",
        "role": "string",
        "routine": {
            "dataset_id": "string",
            "project_id": "string",
            "routine_id": "string",
        },
        "special_group": "string",
        "user_by_email": "string",
        "view": {
            "dataset_id": "string",
            "project_id": "string",
            "table_id": "string",
        },
    }],
    default_collation="string",
    is_case_insensitive=False,
    external_dataset_reference={
        "connection": "string",
        "external_source": "string",
    },
    labels={
        "string": "string",
    },
    location="string",
    max_time_travel_hours="string",
    project="string",
    resource_tags={
        "string": "string",
    },
    storage_billing_model="string")
Copy
const datasetResource = new gcp.bigquery.Dataset("datasetResource", {
    datasetId: "string",
    externalCatalogDatasetOptions: {
        defaultStorageLocationUri: "string",
        parameters: {
            string: "string",
        },
    },
    friendlyName: "string",
    defaultEncryptionConfiguration: {
        kmsKeyName: "string",
    },
    defaultPartitionExpirationMs: 0,
    defaultTableExpirationMs: 0,
    deleteContentsOnDestroy: false,
    description: "string",
    accesses: [{
        condition: {
            expression: "string",
            description: "string",
            location: "string",
            title: "string",
        },
        dataset: {
            dataset: {
                datasetId: "string",
                projectId: "string",
            },
            targetTypes: ["string"],
        },
        domain: "string",
        groupByEmail: "string",
        iamMember: "string",
        role: "string",
        routine: {
            datasetId: "string",
            projectId: "string",
            routineId: "string",
        },
        specialGroup: "string",
        userByEmail: "string",
        view: {
            datasetId: "string",
            projectId: "string",
            tableId: "string",
        },
    }],
    defaultCollation: "string",
    isCaseInsensitive: false,
    externalDatasetReference: {
        connection: "string",
        externalSource: "string",
    },
    labels: {
        string: "string",
    },
    location: "string",
    maxTimeTravelHours: "string",
    project: "string",
    resourceTags: {
        string: "string",
    },
    storageBillingModel: "string",
});
Copy
type: gcp:bigquery:Dataset
properties:
    accesses:
        - condition:
            description: string
            expression: string
            location: string
            title: string
          dataset:
            dataset:
                datasetId: string
                projectId: string
            targetTypes:
                - string
          domain: string
          groupByEmail: string
          iamMember: string
          role: string
          routine:
            datasetId: string
            projectId: string
            routineId: string
          specialGroup: string
          userByEmail: string
          view:
            datasetId: string
            projectId: string
            tableId: string
    datasetId: string
    defaultCollation: string
    defaultEncryptionConfiguration:
        kmsKeyName: string
    defaultPartitionExpirationMs: 0
    defaultTableExpirationMs: 0
    deleteContentsOnDestroy: false
    description: string
    externalCatalogDatasetOptions:
        defaultStorageLocationUri: string
        parameters:
            string: string
    externalDatasetReference:
        connection: string
        externalSource: string
    friendlyName: string
    isCaseInsensitive: false
    labels:
        string: string
    location: string
    maxTimeTravelHours: string
    project: string
    resourceTags:
        string: string
    storageBillingModel: string
Copy

Dataset Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Dataset resource accepts the following input properties:

DatasetId
This property is required.
Changes to this property will trigger replacement.
string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


Accesses List<DatasetAccess>
An array of objects that define dataset access for one or more entities. Structure is documented below.
DefaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
DefaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
DefaultPartitionExpirationMs int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
DefaultTableExpirationMs int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
DeleteContentsOnDestroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
Description string
A user-friendly description of the dataset
ExternalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
ExternalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
FriendlyName string
A descriptive name for the dataset
IsCaseInsensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
Labels Dictionary<string, string>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

Location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
MaxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
ResourceTags Dictionary<string, string>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
StorageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
DatasetId
This property is required.
Changes to this property will trigger replacement.
string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


Accesses []DatasetAccessTypeArgs
An array of objects that define dataset access for one or more entities. Structure is documented below.
DefaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
DefaultPartitionExpirationMs int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
DefaultTableExpirationMs int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
DeleteContentsOnDestroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
Description string
A user-friendly description of the dataset
ExternalCatalogDatasetOptions DatasetExternalCatalogDatasetOptionsArgs
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
ExternalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReferenceArgs
Information about the external metadata storage where the dataset is defined. Structure is documented below.
FriendlyName string
A descriptive name for the dataset
IsCaseInsensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
Labels map[string]string

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

Location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
MaxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
ResourceTags map[string]string
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
StorageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


accesses List<DatasetAccess>
An array of objects that define dataset access for one or more entities. Structure is documented below.
defaultCollation String
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs Integer
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs Integer
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy Boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description String
A user-friendly description of the dataset
externalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName String
A descriptive name for the dataset
isCaseInsensitive Boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Map<String,String>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

location Changes to this property will trigger replacement. String
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours String
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
resourceTags Map<String,String>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
storageBillingModel String
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
datasetId
This property is required.
Changes to this property will trigger replacement.
string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


accesses DatasetAccess[]
An array of objects that define dataset access for one or more entities. Structure is documented below.
defaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs number
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs number
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description string
A user-friendly description of the dataset
externalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName string
A descriptive name for the dataset
isCaseInsensitive boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels {[key: string]: string}

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
resourceTags {[key: string]: string}
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
storageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
dataset_id
This property is required.
Changes to this property will trigger replacement.
str
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


accesses Sequence[DatasetAccessArgs]
An array of objects that define dataset access for one or more entities. Structure is documented below.
default_collation str
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
default_partition_expiration_ms int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
default_table_expiration_ms int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
delete_contents_on_destroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description str
A user-friendly description of the dataset
external_catalog_dataset_options DatasetExternalCatalogDatasetOptionsArgs
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
external_dataset_reference Changes to this property will trigger replacement. DatasetExternalDatasetReferenceArgs
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendly_name str
A descriptive name for the dataset
is_case_insensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Mapping[str, str]

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

location Changes to this property will trigger replacement. str
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
max_time_travel_hours str
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
resource_tags Mapping[str, str]
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
storage_billing_model str
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


accesses List<Property Map>
An array of objects that define dataset access for one or more entities. Structure is documented below.
defaultCollation String
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration Property Map
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs Number
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs Number
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy Boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description String
A user-friendly description of the dataset
externalCatalogDatasetOptions Property Map
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. Property Map
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName String
A descriptive name for the dataset
isCaseInsensitive Boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Map<String>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

location Changes to this property will trigger replacement. String
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours String
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
resourceTags Map<String>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
storageBillingModel String
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.

Outputs

All input properties are implicitly available as output properties. Additionally, the Dataset resource produces the following output properties:

CreationTime int
The time when this dataset was created, in milliseconds since the epoch.
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
Etag string
A hash of the resource.
Id string
The provider-assigned unique ID for this managed resource.
LastModifiedTime int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
SelfLink string
The URI of the created resource.
CreationTime int
The time when this dataset was created, in milliseconds since the epoch.
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
Etag string
A hash of the resource.
Id string
The provider-assigned unique ID for this managed resource.
LastModifiedTime int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
SelfLink string
The URI of the created resource.
creationTime Integer
The time when this dataset was created, in milliseconds since the epoch.
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag String
A hash of the resource.
id String
The provider-assigned unique ID for this managed resource.
lastModifiedTime Integer
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
selfLink String
The URI of the created resource.
creationTime number
The time when this dataset was created, in milliseconds since the epoch.
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag string
A hash of the resource.
id string
The provider-assigned unique ID for this managed resource.
lastModifiedTime number
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
selfLink string
The URI of the created resource.
creation_time int
The time when this dataset was created, in milliseconds since the epoch.
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag str
A hash of the resource.
id str
The provider-assigned unique ID for this managed resource.
last_modified_time int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
self_link str
The URI of the created resource.
creationTime Number
The time when this dataset was created, in milliseconds since the epoch.
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag String
A hash of the resource.
id String
The provider-assigned unique ID for this managed resource.
lastModifiedTime Number
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
selfLink String
The URI of the created resource.

Look up Existing Dataset Resource

Get an existing Dataset resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DatasetState, opts?: CustomResourceOptions): Dataset
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        accesses: Optional[Sequence[DatasetAccessArgs]] = None,
        creation_time: Optional[int] = None,
        dataset_id: Optional[str] = None,
        default_collation: Optional[str] = None,
        default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
        default_partition_expiration_ms: Optional[int] = None,
        default_table_expiration_ms: Optional[int] = None,
        delete_contents_on_destroy: Optional[bool] = None,
        description: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        etag: Optional[str] = None,
        external_catalog_dataset_options: Optional[DatasetExternalCatalogDatasetOptionsArgs] = None,
        external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
        friendly_name: Optional[str] = None,
        is_case_insensitive: Optional[bool] = None,
        labels: Optional[Mapping[str, str]] = None,
        last_modified_time: Optional[int] = None,
        location: Optional[str] = None,
        max_time_travel_hours: Optional[str] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        resource_tags: Optional[Mapping[str, str]] = None,
        self_link: Optional[str] = None,
        storage_billing_model: Optional[str] = None) -> Dataset
func GetDataset(ctx *Context, name string, id IDInput, state *DatasetState, opts ...ResourceOption) (*Dataset, error)
public static Dataset Get(string name, Input<string> id, DatasetState? state, CustomResourceOptions? opts = null)
public static Dataset get(String name, Output<String> id, DatasetState state, CustomResourceOptions options)
resources:  _:    type: gcp:bigquery:Dataset    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Accesses List<DatasetAccess>
An array of objects that define dataset access for one or more entities. Structure is documented below.
CreationTime int
The time when this dataset was created, in milliseconds since the epoch.
DatasetId Changes to this property will trigger replacement. string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


DefaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
DefaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
DefaultPartitionExpirationMs int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
DefaultTableExpirationMs int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
DeleteContentsOnDestroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
Description string
A user-friendly description of the dataset
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
Etag string
A hash of the resource.
ExternalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
ExternalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
FriendlyName string
A descriptive name for the dataset
IsCaseInsensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
Labels Dictionary<string, string>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

LastModifiedTime int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
Location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
MaxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
ResourceTags Dictionary<string, string>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
SelfLink string
The URI of the created resource.
StorageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
Accesses []DatasetAccessTypeArgs
An array of objects that define dataset access for one or more entities. Structure is documented below.
CreationTime int
The time when this dataset was created, in milliseconds since the epoch.
DatasetId Changes to this property will trigger replacement. string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


DefaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
DefaultPartitionExpirationMs int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
DefaultTableExpirationMs int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
DeleteContentsOnDestroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
Description string
A user-friendly description of the dataset
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
Etag string
A hash of the resource.
ExternalCatalogDatasetOptions DatasetExternalCatalogDatasetOptionsArgs
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
ExternalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReferenceArgs
Information about the external metadata storage where the dataset is defined. Structure is documented below.
FriendlyName string
A descriptive name for the dataset
IsCaseInsensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
Labels map[string]string

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

LastModifiedTime int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
Location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
MaxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
ResourceTags map[string]string
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
SelfLink string
The URI of the created resource.
StorageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
accesses List<DatasetAccess>
An array of objects that define dataset access for one or more entities. Structure is documented below.
creationTime Integer
The time when this dataset was created, in milliseconds since the epoch.
datasetId Changes to this property will trigger replacement. String
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


defaultCollation String
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs Integer
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs Integer
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy Boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description String
A user-friendly description of the dataset
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag String
A hash of the resource.
externalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName String
A descriptive name for the dataset
isCaseInsensitive Boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Map<String,String>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

lastModifiedTime Integer
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
location Changes to this property will trigger replacement. String
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours String
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
resourceTags Map<String,String>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
selfLink String
The URI of the created resource.
storageBillingModel String
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
accesses DatasetAccess[]
An array of objects that define dataset access for one or more entities. Structure is documented below.
creationTime number
The time when this dataset was created, in milliseconds since the epoch.
datasetId Changes to this property will trigger replacement. string
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


defaultCollation string
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs number
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs number
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description string
A user-friendly description of the dataset
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag string
A hash of the resource.
externalCatalogDatasetOptions DatasetExternalCatalogDatasetOptions
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. DatasetExternalDatasetReference
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName string
A descriptive name for the dataset
isCaseInsensitive boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels {[key: string]: string}

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

lastModifiedTime number
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
location Changes to this property will trigger replacement. string
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours string
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
resourceTags {[key: string]: string}
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
selfLink string
The URI of the created resource.
storageBillingModel string
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
accesses Sequence[DatasetAccessArgs]
An array of objects that define dataset access for one or more entities. Structure is documented below.
creation_time int
The time when this dataset was created, in milliseconds since the epoch.
dataset_id Changes to this property will trigger replacement. str
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


default_collation str
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
default_partition_expiration_ms int
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
default_table_expiration_ms int
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
delete_contents_on_destroy bool
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description str
A user-friendly description of the dataset
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag str
A hash of the resource.
external_catalog_dataset_options DatasetExternalCatalogDatasetOptionsArgs
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
external_dataset_reference Changes to this property will trigger replacement. DatasetExternalDatasetReferenceArgs
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendly_name str
A descriptive name for the dataset
is_case_insensitive bool
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Mapping[str, str]

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

last_modified_time int
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
location Changes to this property will trigger replacement. str
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
max_time_travel_hours str
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
resource_tags Mapping[str, str]
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
self_link str
The URI of the created resource.
storage_billing_model str
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
accesses List<Property Map>
An array of objects that define dataset access for one or more entities. Structure is documented below.
creationTime Number
The time when this dataset was created, in milliseconds since the epoch.
datasetId Changes to this property will trigger replacement. String
A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


defaultCollation String
Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

  • 'und:ci': undetermined locale, case insensitive.
  • '': empty string. Default to case-sensitive behavior.
defaultEncryptionConfiguration Property Map
The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
defaultPartitionExpirationMs Number
The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
defaultTableExpirationMs Number
The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.
deleteContentsOnDestroy Boolean
If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
description String
A user-friendly description of the dataset
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
etag String
A hash of the resource.
externalCatalogDatasetOptions Property Map
Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
externalDatasetReference Changes to this property will trigger replacement. Property Map
Information about the external metadata storage where the dataset is defined. Structure is documented below.
friendlyName String
A descriptive name for the dataset
isCaseInsensitive Boolean
TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
labels Map<String>

The labels associated with this dataset. You can use these to organize and group your datasets.

Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

lastModifiedTime Number
The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
location Changes to this property will trigger replacement. String
The geographic location where the dataset should reside. See official docs. There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places. The default value is multi-regional location US. Changing this forces a new resource to be created.
maxTimeTravelHours String
Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
resourceTags Map<String>
The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
selfLink String
The URI of the created resource.
storageBillingModel String
Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.

Supporting Types

DatasetAccess
, DatasetAccessArgs

Condition DatasetAccessCondition
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
Dataset DatasetAccessDataset
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
Domain string
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
GroupByEmail string
An email address of a Google Group to grant access to.
IamMember string
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
Role string
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
Routine DatasetAccessRoutine
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
SpecialGroup string
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
UserByEmail string
An email address of a user to grant access to. For example: fred@example.com
View DatasetAccessView
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
Condition DatasetAccessCondition
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
Dataset DatasetAccessDataset
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
Domain string
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
GroupByEmail string
An email address of a Google Group to grant access to.
IamMember string
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
Role string
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
Routine DatasetAccessRoutine
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
SpecialGroup string
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
UserByEmail string
An email address of a user to grant access to. For example: fred@example.com
View DatasetAccessView
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
condition DatasetAccessCondition
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
dataset DatasetAccessDataset
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
domain String
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
groupByEmail String
An email address of a Google Group to grant access to.
iamMember String
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
role String
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
routine DatasetAccessRoutine
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
specialGroup String
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
userByEmail String
An email address of a user to grant access to. For example: fred@example.com
view DatasetAccessView
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
condition DatasetAccessCondition
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
dataset DatasetAccessDataset
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
domain string
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
groupByEmail string
An email address of a Google Group to grant access to.
iamMember string
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
role string
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
routine DatasetAccessRoutine
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
specialGroup string
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
userByEmail string
An email address of a user to grant access to. For example: fred@example.com
view DatasetAccessView
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
condition DatasetAccessCondition
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
dataset DatasetAccessDataset
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
domain str
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
group_by_email str
An email address of a Google Group to grant access to.
iam_member str
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
role str
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
routine DatasetAccessRoutine
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
special_group str
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
user_by_email str
An email address of a user to grant access to. For example: fred@example.com
view DatasetAccessView
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
condition Property Map
Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
dataset Property Map
Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
domain String
A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
groupByEmail String
An email address of a Google Group to grant access to.
iamMember String
Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
role String
Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
routine Property Map
A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
specialGroup String
A special group to grant access to. Possible values include:

  • projectOwners: Owners of the enclosing project.
  • projectReaders: Readers of the enclosing project.
  • projectWriters: Writers of the enclosing project.
  • allAuthenticatedUsers: All authenticated BigQuery users.
userByEmail String
An email address of a user to grant access to. For example: fred@example.com
view Property Map
A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

DatasetAccessCondition
, DatasetAccessConditionArgs

Expression
This property is required.
Changes to this property will trigger replacement.
string
Textual representation of an expression in Common Expression Language syntax.
Description Changes to this property will trigger replacement. string
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
Location Changes to this property will trigger replacement. string
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
Title Changes to this property will trigger replacement. string
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
Expression
This property is required.
Changes to this property will trigger replacement.
string
Textual representation of an expression in Common Expression Language syntax.
Description Changes to this property will trigger replacement. string
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
Location Changes to this property will trigger replacement. string
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
Title Changes to this property will trigger replacement. string
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
expression
This property is required.
Changes to this property will trigger replacement.
String
Textual representation of an expression in Common Expression Language syntax.
description Changes to this property will trigger replacement. String
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
location Changes to this property will trigger replacement. String
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
title Changes to this property will trigger replacement. String
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
expression
This property is required.
Changes to this property will trigger replacement.
string
Textual representation of an expression in Common Expression Language syntax.
description Changes to this property will trigger replacement. string
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
location Changes to this property will trigger replacement. string
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
title Changes to this property will trigger replacement. string
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
expression
This property is required.
Changes to this property will trigger replacement.
str
Textual representation of an expression in Common Expression Language syntax.
description Changes to this property will trigger replacement. str
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
location Changes to this property will trigger replacement. str
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
title Changes to this property will trigger replacement. str
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
expression
This property is required.
Changes to this property will trigger replacement.
String
Textual representation of an expression in Common Expression Language syntax.
description Changes to this property will trigger replacement. String
Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
location Changes to this property will trigger replacement. String
String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
title Changes to this property will trigger replacement. String
Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.

DatasetAccessDataset
, DatasetAccessDatasetArgs

Dataset This property is required. DatasetAccessDatasetDataset
The dataset this entry applies to Structure is documented below.
TargetTypes This property is required. List<string>
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
Dataset This property is required. DatasetAccessDatasetDataset
The dataset this entry applies to Structure is documented below.
TargetTypes This property is required. []string
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
dataset This property is required. DatasetAccessDatasetDataset
The dataset this entry applies to Structure is documented below.
targetTypes This property is required. List<String>
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
dataset This property is required. DatasetAccessDatasetDataset
The dataset this entry applies to Structure is documented below.
targetTypes This property is required. string[]
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
dataset This property is required. DatasetAccessDatasetDataset
The dataset this entry applies to Structure is documented below.
target_types This property is required. Sequence[str]
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
dataset This property is required. Property Map
The dataset this entry applies to Structure is documented below.
targetTypes This property is required. List<String>
Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

DatasetAccessDatasetDataset
, DatasetAccessDatasetDatasetArgs

DatasetId This property is required. string
The ID of the dataset containing this table.
ProjectId This property is required. string
The ID of the project containing this table.
DatasetId This property is required. string
The ID of the dataset containing this table.
ProjectId This property is required. string
The ID of the project containing this table.
datasetId This property is required. String
The ID of the dataset containing this table.
projectId This property is required. String
The ID of the project containing this table.
datasetId This property is required. string
The ID of the dataset containing this table.
projectId This property is required. string
The ID of the project containing this table.
dataset_id This property is required. str
The ID of the dataset containing this table.
project_id This property is required. str
The ID of the project containing this table.
datasetId This property is required. String
The ID of the dataset containing this table.
projectId This property is required. String
The ID of the project containing this table.

DatasetAccessRoutine
, DatasetAccessRoutineArgs

DatasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
ProjectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
RoutineId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
DatasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
ProjectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
RoutineId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the project containing this table.
routineId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
routineId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
dataset_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the dataset containing this table.
project_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the project containing this table.
routine_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the project containing this table.
routineId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

DatasetAccessView
, DatasetAccessViewArgs

DatasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
ProjectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
TableId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
DatasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
ProjectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
TableId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the project containing this table.
tableId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the project containing this table.
tableId
This property is required.
Changes to this property will trigger replacement.
string
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
dataset_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the dataset containing this table.
project_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the project containing this table.
table_id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
datasetId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the dataset containing this table.
projectId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the project containing this table.
tableId
This property is required.
Changes to this property will trigger replacement.
String
The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

DatasetDefaultEncryptionConfiguration
, DatasetDefaultEncryptionConfigurationArgs

KmsKeyName This property is required. string
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
KmsKeyName This property is required. string
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
kmsKeyName This property is required. String
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
kmsKeyName This property is required. string
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
kms_key_name This property is required. str
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
kmsKeyName This property is required. String
Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

DatasetExternalCatalogDatasetOptions
, DatasetExternalCatalogDatasetOptionsArgs

DefaultStorageLocationUri string
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
Parameters Dictionary<string, string>
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
DefaultStorageLocationUri string
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
Parameters map[string]string
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
defaultStorageLocationUri String
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
parameters Map<String,String>
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
defaultStorageLocationUri string
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
parameters {[key: string]: string}
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
default_storage_location_uri str
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
parameters Mapping[str, str]
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
defaultStorageLocationUri String
The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
parameters Map<String>
A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.

DatasetExternalDatasetReference
, DatasetExternalDatasetReferenceArgs

Connection
This property is required.
Changes to this property will trigger replacement.
string
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
ExternalSource
This property is required.
Changes to this property will trigger replacement.
string
External source that backs this dataset.
Connection
This property is required.
Changes to this property will trigger replacement.
string
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
ExternalSource
This property is required.
Changes to this property will trigger replacement.
string
External source that backs this dataset.
connection
This property is required.
Changes to this property will trigger replacement.
String
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
externalSource
This property is required.
Changes to this property will trigger replacement.
String
External source that backs this dataset.
connection
This property is required.
Changes to this property will trigger replacement.
string
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
externalSource
This property is required.
Changes to this property will trigger replacement.
string
External source that backs this dataset.
connection
This property is required.
Changes to this property will trigger replacement.
str
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
external_source
This property is required.
Changes to this property will trigger replacement.
str
External source that backs this dataset.
connection
This property is required.
Changes to this property will trigger replacement.
String
The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
externalSource
This property is required.
Changes to this property will trigger replacement.
String
External source that backs this dataset.

Import

Dataset can be imported using any of these accepted formats:

  • projects/{{project}}/datasets/{{dataset_id}}

  • {{project}}/{{dataset_id}}

  • {{dataset_id}}

When using the pulumi import command, Dataset can be imported using one of the formats above. For example:

$ pulumi import gcp:bigquery/dataset:Dataset default projects/{{project}}/datasets/{{dataset_id}}
Copy
$ pulumi import gcp:bigquery/dataset:Dataset default {{project}}/{{dataset_id}}
Copy
$ pulumi import gcp:bigquery/dataset:Dataset default {{dataset_id}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.