1. Packages
  2. AWS
  3. API Docs
  4. redshift
  5. Logging
AWS v6.77.0 published on Wednesday, Apr 9, 2025 by Pulumi

aws.redshift.Logging

Explore with Pulumi AI

Resource for managing an AWS Redshift Logging configuration.

Example Usage

Basic Usage

import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";

const example = new aws.redshift.Logging("example", {
    clusterIdentifier: exampleAwsRedshiftCluster.id,
    logDestinationType: "cloudwatch",
    logExports: [
        "connectionlog",
        "userlog",
    ],
});
Copy
import pulumi
import pulumi_aws as aws

example = aws.redshift.Logging("example",
    cluster_identifier=example_aws_redshift_cluster["id"],
    log_destination_type="cloudwatch",
    log_exports=[
        "connectionlog",
        "userlog",
    ])
Copy
package main

import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/redshift"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := redshift.NewLogging(ctx, "example", &redshift.LoggingArgs{
			ClusterIdentifier:  pulumi.Any(exampleAwsRedshiftCluster.Id),
			LogDestinationType: pulumi.String("cloudwatch"),
			LogExports: pulumi.StringArray{
				pulumi.String("connectionlog"),
				pulumi.String("userlog"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;

return await Deployment.RunAsync(() => 
{
    var example = new Aws.RedShift.Logging("example", new()
    {
        ClusterIdentifier = exampleAwsRedshiftCluster.Id,
        LogDestinationType = "cloudwatch",
        LogExports = new[]
        {
            "connectionlog",
            "userlog",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.redshift.Logging;
import com.pulumi.aws.redshift.LoggingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new Logging("example", LoggingArgs.builder()
            .clusterIdentifier(exampleAwsRedshiftCluster.id())
            .logDestinationType("cloudwatch")
            .logExports(            
                "connectionlog",
                "userlog")
            .build());

    }
}
Copy
resources:
  example:
    type: aws:redshift:Logging
    properties:
      clusterIdentifier: ${exampleAwsRedshiftCluster.id}
      logDestinationType: cloudwatch
      logExports:
        - connectionlog
        - userlog
Copy

S3 Destination Type

import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";

const example = new aws.redshift.Logging("example", {
    clusterIdentifier: exampleAwsRedshiftCluster.id,
    logDestinationType: "s3",
    bucketName: exampleAwsS3Bucket.id,
    s3KeyPrefix: "example-prefix/",
});
Copy
import pulumi
import pulumi_aws as aws

example = aws.redshift.Logging("example",
    cluster_identifier=example_aws_redshift_cluster["id"],
    log_destination_type="s3",
    bucket_name=example_aws_s3_bucket["id"],
    s3_key_prefix="example-prefix/")
Copy
package main

import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/redshift"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := redshift.NewLogging(ctx, "example", &redshift.LoggingArgs{
			ClusterIdentifier:  pulumi.Any(exampleAwsRedshiftCluster.Id),
			LogDestinationType: pulumi.String("s3"),
			BucketName:         pulumi.Any(exampleAwsS3Bucket.Id),
			S3KeyPrefix:        pulumi.String("example-prefix/"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;

return await Deployment.RunAsync(() => 
{
    var example = new Aws.RedShift.Logging("example", new()
    {
        ClusterIdentifier = exampleAwsRedshiftCluster.Id,
        LogDestinationType = "s3",
        BucketName = exampleAwsS3Bucket.Id,
        S3KeyPrefix = "example-prefix/",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.redshift.Logging;
import com.pulumi.aws.redshift.LoggingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new Logging("example", LoggingArgs.builder()
            .clusterIdentifier(exampleAwsRedshiftCluster.id())
            .logDestinationType("s3")
            .bucketName(exampleAwsS3Bucket.id())
            .s3KeyPrefix("example-prefix/")
            .build());

    }
}
Copy
resources:
  example:
    type: aws:redshift:Logging
    properties:
      clusterIdentifier: ${exampleAwsRedshiftCluster.id}
      logDestinationType: s3
      bucketName: ${exampleAwsS3Bucket.id}
      s3KeyPrefix: example-prefix/
Copy

Create Logging Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Logging(name: string, args: LoggingArgs, opts?: CustomResourceOptions);
@overload
def Logging(resource_name: str,
            args: LoggingArgs,
            opts: Optional[ResourceOptions] = None)

@overload
def Logging(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            cluster_identifier: Optional[str] = None,
            bucket_name: Optional[str] = None,
            log_destination_type: Optional[str] = None,
            log_exports: Optional[Sequence[str]] = None,
            s3_key_prefix: Optional[str] = None)
func NewLogging(ctx *Context, name string, args LoggingArgs, opts ...ResourceOption) (*Logging, error)
public Logging(string name, LoggingArgs args, CustomResourceOptions? opts = null)
public Logging(String name, LoggingArgs args)
public Logging(String name, LoggingArgs args, CustomResourceOptions options)
type: aws:redshift:Logging
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. LoggingArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. LoggingArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. LoggingArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. LoggingArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. LoggingArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var loggingResource = new Aws.RedShift.Logging("loggingResource", new()
{
    ClusterIdentifier = "string",
    BucketName = "string",
    LogDestinationType = "string",
    LogExports = new[]
    {
        "string",
    },
    S3KeyPrefix = "string",
});
Copy
example, err := redshift.NewLogging(ctx, "loggingResource", &redshift.LoggingArgs{
	ClusterIdentifier:  pulumi.String("string"),
	BucketName:         pulumi.String("string"),
	LogDestinationType: pulumi.String("string"),
	LogExports: pulumi.StringArray{
		pulumi.String("string"),
	},
	S3KeyPrefix: pulumi.String("string"),
})
Copy
var loggingResource = new Logging("loggingResource", LoggingArgs.builder()
    .clusterIdentifier("string")
    .bucketName("string")
    .logDestinationType("string")
    .logExports("string")
    .s3KeyPrefix("string")
    .build());
Copy
logging_resource = aws.redshift.Logging("loggingResource",
    cluster_identifier="string",
    bucket_name="string",
    log_destination_type="string",
    log_exports=["string"],
    s3_key_prefix="string")
Copy
const loggingResource = new aws.redshift.Logging("loggingResource", {
    clusterIdentifier: "string",
    bucketName: "string",
    logDestinationType: "string",
    logExports: ["string"],
    s3KeyPrefix: "string",
});
Copy
type: aws:redshift:Logging
properties:
    bucketName: string
    clusterIdentifier: string
    logDestinationType: string
    logExports:
        - string
    s3KeyPrefix: string
Copy

Logging Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Logging resource accepts the following input properties:

ClusterIdentifier This property is required. string

Identifier of the source cluster.

The following arguments are optional:

BucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
LogDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
LogExports List<string>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
S3KeyPrefix string
Prefix applied to the log file names.
ClusterIdentifier This property is required. string

Identifier of the source cluster.

The following arguments are optional:

BucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
LogDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
LogExports []string
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
S3KeyPrefix string
Prefix applied to the log file names.
clusterIdentifier This property is required. String

Identifier of the source cluster.

The following arguments are optional:

bucketName String
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
logDestinationType String
Log destination type. Valid values are s3 and cloudwatch.
logExports List<String>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix String
Prefix applied to the log file names.
clusterIdentifier This property is required. string

Identifier of the source cluster.

The following arguments are optional:

bucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
logDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
logExports string[]
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix string
Prefix applied to the log file names.
cluster_identifier This property is required. str

Identifier of the source cluster.

The following arguments are optional:

bucket_name str
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
log_destination_type str
Log destination type. Valid values are s3 and cloudwatch.
log_exports Sequence[str]
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3_key_prefix str
Prefix applied to the log file names.
clusterIdentifier This property is required. String

Identifier of the source cluster.

The following arguments are optional:

bucketName String
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
logDestinationType String
Log destination type. Valid values are s3 and cloudwatch.
logExports List<String>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix String
Prefix applied to the log file names.

Outputs

All input properties are implicitly available as output properties. Additionally, the Logging resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing Logging Resource

Get an existing Logging resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: LoggingState, opts?: CustomResourceOptions): Logging
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        bucket_name: Optional[str] = None,
        cluster_identifier: Optional[str] = None,
        log_destination_type: Optional[str] = None,
        log_exports: Optional[Sequence[str]] = None,
        s3_key_prefix: Optional[str] = None) -> Logging
func GetLogging(ctx *Context, name string, id IDInput, state *LoggingState, opts ...ResourceOption) (*Logging, error)
public static Logging Get(string name, Input<string> id, LoggingState? state, CustomResourceOptions? opts = null)
public static Logging get(String name, Output<String> id, LoggingState state, CustomResourceOptions options)
resources:  _:    type: aws:redshift:Logging    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
BucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
ClusterIdentifier string

Identifier of the source cluster.

The following arguments are optional:

LogDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
LogExports List<string>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
S3KeyPrefix string
Prefix applied to the log file names.
BucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
ClusterIdentifier string

Identifier of the source cluster.

The following arguments are optional:

LogDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
LogExports []string
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
S3KeyPrefix string
Prefix applied to the log file names.
bucketName String
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
clusterIdentifier String

Identifier of the source cluster.

The following arguments are optional:

logDestinationType String
Log destination type. Valid values are s3 and cloudwatch.
logExports List<String>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix String
Prefix applied to the log file names.
bucketName string
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
clusterIdentifier string

Identifier of the source cluster.

The following arguments are optional:

logDestinationType string
Log destination type. Valid values are s3 and cloudwatch.
logExports string[]
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix string
Prefix applied to the log file names.
bucket_name str
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
cluster_identifier str

Identifier of the source cluster.

The following arguments are optional:

log_destination_type str
Log destination type. Valid values are s3 and cloudwatch.
log_exports Sequence[str]
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3_key_prefix str
Prefix applied to the log file names.
bucketName String
Name of an existing S3 bucket where the log files are to be stored. Required when log_destination_type is s3. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. For more information on the permissions required for the bucket, please read the AWS documentation
clusterIdentifier String

Identifier of the source cluster.

The following arguments are optional:

logDestinationType String
Log destination type. Valid values are s3 and cloudwatch.
logExports List<String>
Collection of exported log types. Required when log_destination_type is cloudwatch. Valid values are connectionlog, useractivitylog, and userlog.
s3KeyPrefix String
Prefix applied to the log file names.

Import

Using pulumi import, import Redshift Logging using the id. For example:

$ pulumi import aws:redshift/logging:Logging example cluster-id-12345678
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
AWS Classic pulumi/pulumi-aws
License
Apache-2.0
Notes
This Pulumi package is based on the aws Terraform Provider.