1. Packages
  2. Alibaba Cloud Provider
  3. API Docs
  4. log
  5. OssExport
Alibaba Cloud v3.76.0 published on Tuesday, Apr 8, 2025 by Pulumi

alicloud.log.OssExport

Explore with Pulumi AI

Log service data delivery management, this service provides the function of delivering data in logstore to oss product storage. Refer to details.

NOTE: Available in 1.187.0+

Example Usage

Basic Usage

import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
import * as random from "@pulumi/random";

const _default = new random.index.Integer("default", {
    max: 99999,
    min: 10000,
});
const example = new alicloud.log.Project("example", {
    projectName: `terraform-example-${_default.result}`,
    description: "terraform-example",
    tags: {
        Created: "TF",
        For: "example",
    },
});
const exampleStore = new alicloud.log.Store("example", {
    projectName: example.projectName,
    logstoreName: "example-store",
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const exampleOssExport = new alicloud.log.OssExport("example", {
    projectName: example.projectName,
    logstoreName: exampleStore.logstoreName,
    exportName: "terraform-example",
    displayName: "terraform-example",
    bucket: "example-bucket",
    prefix: "root",
    suffix: "",
    bufferInterval: 300,
    bufferSize: 250,
    compressType: "none",
    pathFormat: "%Y/%m/%d/%H/%M",
    contentType: "json",
    jsonEnableTag: true,
    roleArn: "role_arn_for_oss_write",
    logReadRoleArn: "role_arn_for_sls_read",
    timeZone: "+0800",
});
Copy
import pulumi
import pulumi_alicloud as alicloud
import pulumi_random as random

default = random.index.Integer("default",
    max=99999,
    min=10000)
example = alicloud.log.Project("example",
    project_name=f"terraform-example-{default['result']}",
    description="terraform-example",
    tags={
        "Created": "TF",
        "For": "example",
    })
example_store = alicloud.log.Store("example",
    project_name=example.project_name,
    logstore_name="example-store",
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example_oss_export = alicloud.log.OssExport("example",
    project_name=example.project_name,
    logstore_name=example_store.logstore_name,
    export_name="terraform-example",
    display_name="terraform-example",
    bucket="example-bucket",
    prefix="root",
    suffix="",
    buffer_interval=300,
    buffer_size=250,
    compress_type="none",
    path_format="%Y/%m/%d/%H/%M",
    content_type="json",
    json_enable_tag=True,
    role_arn="role_arn_for_oss_write",
    log_read_role_arn="role_arn_for_sls_read",
    time_zone="+0800")
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
	"github.com/pulumi/pulumi-random/sdk/v4/go/random"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_default, err := random.NewInteger(ctx, "default", &random.IntegerArgs{
			Max: 99999,
			Min: 10000,
		})
		if err != nil {
			return err
		}
		example, err := log.NewProject(ctx, "example", &log.ProjectArgs{
			ProjectName: pulumi.Sprintf("terraform-example-%v", _default.Result),
			Description: pulumi.String("terraform-example"),
			Tags: pulumi.StringMap{
				"Created": pulumi.String("TF"),
				"For":     pulumi.String("example"),
			},
		})
		if err != nil {
			return err
		}
		exampleStore, err := log.NewStore(ctx, "example", &log.StoreArgs{
			ProjectName:        example.ProjectName,
			LogstoreName:       pulumi.String("example-store"),
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = log.NewOssExport(ctx, "example", &log.OssExportArgs{
			ProjectName:    example.ProjectName,
			LogstoreName:   exampleStore.LogstoreName,
			ExportName:     pulumi.String("terraform-example"),
			DisplayName:    pulumi.String("terraform-example"),
			Bucket:         pulumi.String("example-bucket"),
			Prefix:         pulumi.String("root"),
			Suffix:         pulumi.String(""),
			BufferInterval: pulumi.Int(300),
			BufferSize:     pulumi.Int(250),
			CompressType:   pulumi.String("none"),
			PathFormat:     pulumi.String("%Y/%m/%d/%H/%M"),
			ContentType:    pulumi.String("json"),
			JsonEnableTag:  pulumi.Bool(true),
			RoleArn:        pulumi.String("role_arn_for_oss_write"),
			LogReadRoleArn: pulumi.String("role_arn_for_sls_read"),
			TimeZone:       pulumi.String("+0800"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
using Random = Pulumi.Random;

return await Deployment.RunAsync(() => 
{
    var @default = new Random.Index.Integer("default", new()
    {
        Max = 99999,
        Min = 10000,
    });

    var example = new AliCloud.Log.Project("example", new()
    {
        ProjectName = $"terraform-example-{@default.Result}",
        Description = "terraform-example",
        Tags = 
        {
            { "Created", "TF" },
            { "For", "example" },
        },
    });

    var exampleStore = new AliCloud.Log.Store("example", new()
    {
        ProjectName = example.ProjectName,
        LogstoreName = "example-store",
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });

    var exampleOssExport = new AliCloud.Log.OssExport("example", new()
    {
        ProjectName = example.ProjectName,
        LogstoreName = exampleStore.LogstoreName,
        ExportName = "terraform-example",
        DisplayName = "terraform-example",
        Bucket = "example-bucket",
        Prefix = "root",
        Suffix = "",
        BufferInterval = 300,
        BufferSize = 250,
        CompressType = "none",
        PathFormat = "%Y/%m/%d/%H/%M",
        ContentType = "json",
        JsonEnableTag = true,
        RoleArn = "role_arn_for_oss_write",
        LogReadRoleArn = "role_arn_for_sls_read",
        TimeZone = "+0800",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.random.integer;
import com.pulumi.random.IntegerArgs;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.log.OssExport;
import com.pulumi.alicloud.log.OssExportArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var default_ = new Integer("default", IntegerArgs.builder()
            .max(99999)
            .min(10000)
            .build());

        var example = new Project("example", ProjectArgs.builder()
            .projectName(String.format("terraform-example-%s", default_.result()))
            .description("terraform-example")
            .tags(Map.ofEntries(
                Map.entry("Created", "TF"),
                Map.entry("For", "example")
            ))
            .build());

        var exampleStore = new Store("exampleStore", StoreArgs.builder()
            .projectName(example.projectName())
            .logstoreName("example-store")
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());

        var exampleOssExport = new OssExport("exampleOssExport", OssExportArgs.builder()
            .projectName(example.projectName())
            .logstoreName(exampleStore.logstoreName())
            .exportName("terraform-example")
            .displayName("terraform-example")
            .bucket("example-bucket")
            .prefix("root")
            .suffix("")
            .bufferInterval(300)
            .bufferSize(250)
            .compressType("none")
            .pathFormat("%Y/%m/%d/%H/%M")
            .contentType("json")
            .jsonEnableTag(true)
            .roleArn("role_arn_for_oss_write")
            .logReadRoleArn("role_arn_for_sls_read")
            .timeZone("+0800")
            .build());

    }
}
Copy
resources:
  default:
    type: random:integer
    properties:
      max: 99999
      min: 10000
  example:
    type: alicloud:log:Project
    properties:
      projectName: terraform-example-${default.result}
      description: terraform-example
      tags:
        Created: TF
        For: example
  exampleStore:
    type: alicloud:log:Store
    name: example
    properties:
      projectName: ${example.projectName}
      logstoreName: example-store
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  exampleOssExport:
    type: alicloud:log:OssExport
    name: example
    properties:
      projectName: ${example.projectName}
      logstoreName: ${exampleStore.logstoreName}
      exportName: terraform-example
      displayName: terraform-example
      bucket: example-bucket
      prefix: root
      suffix: ""
      bufferInterval: 300
      bufferSize: 250
      compressType: none
      pathFormat: '%Y/%m/%d/%H/%M'
      contentType: json
      jsonEnableTag: true
      roleArn: role_arn_for_oss_write
      logReadRoleArn: role_arn_for_sls_read
      timeZone: '+0800'
Copy

Create OssExport Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new OssExport(name: string, args: OssExportArgs, opts?: CustomResourceOptions);
@overload
def OssExport(resource_name: str,
              args: OssExportArgs,
              opts: Optional[ResourceOptions] = None)

@overload
def OssExport(resource_name: str,
              opts: Optional[ResourceOptions] = None,
              export_name: Optional[str] = None,
              buffer_interval: Optional[int] = None,
              buffer_size: Optional[int] = None,
              time_zone: Optional[str] = None,
              project_name: Optional[str] = None,
              content_type: Optional[str] = None,
              bucket: Optional[str] = None,
              path_format: Optional[str] = None,
              logstore_name: Optional[str] = None,
              csv_config_columns: Optional[Sequence[str]] = None,
              json_enable_tag: Optional[bool] = None,
              csv_config_null: Optional[str] = None,
              csv_config_quote: Optional[str] = None,
              display_name: Optional[str] = None,
              csv_config_header: Optional[bool] = None,
              from_time: Optional[int] = None,
              csv_config_linefeed: Optional[str] = None,
              log_read_role_arn: Optional[str] = None,
              csv_config_escape: Optional[str] = None,
              csv_config_delimiter: Optional[str] = None,
              prefix: Optional[str] = None,
              config_columns: Optional[Sequence[OssExportConfigColumnArgs]] = None,
              role_arn: Optional[str] = None,
              suffix: Optional[str] = None,
              compress_type: Optional[str] = None)
func NewOssExport(ctx *Context, name string, args OssExportArgs, opts ...ResourceOption) (*OssExport, error)
public OssExport(string name, OssExportArgs args, CustomResourceOptions? opts = null)
public OssExport(String name, OssExportArgs args)
public OssExport(String name, OssExportArgs args, CustomResourceOptions options)
type: alicloud:log:OssExport
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. OssExportArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. OssExportArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. OssExportArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. OssExportArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. OssExportArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var ossExportResource = new AliCloud.Log.OssExport("ossExportResource", new()
{
    ExportName = "string",
    BufferInterval = 0,
    BufferSize = 0,
    TimeZone = "string",
    ProjectName = "string",
    ContentType = "string",
    Bucket = "string",
    PathFormat = "string",
    LogstoreName = "string",
    CsvConfigColumns = new[]
    {
        "string",
    },
    JsonEnableTag = false,
    CsvConfigNull = "string",
    CsvConfigQuote = "string",
    DisplayName = "string",
    CsvConfigHeader = false,
    FromTime = 0,
    CsvConfigLinefeed = "string",
    LogReadRoleArn = "string",
    CsvConfigEscape = "string",
    CsvConfigDelimiter = "string",
    Prefix = "string",
    ConfigColumns = new[]
    {
        new AliCloud.Log.Inputs.OssExportConfigColumnArgs
        {
            Name = "string",
            Type = "string",
        },
    },
    RoleArn = "string",
    Suffix = "string",
    CompressType = "string",
});
Copy
example, err := log.NewOssExport(ctx, "ossExportResource", &log.OssExportArgs{
	ExportName:     pulumi.String("string"),
	BufferInterval: pulumi.Int(0),
	BufferSize:     pulumi.Int(0),
	TimeZone:       pulumi.String("string"),
	ProjectName:    pulumi.String("string"),
	ContentType:    pulumi.String("string"),
	Bucket:         pulumi.String("string"),
	PathFormat:     pulumi.String("string"),
	LogstoreName:   pulumi.String("string"),
	CsvConfigColumns: pulumi.StringArray{
		pulumi.String("string"),
	},
	JsonEnableTag:      pulumi.Bool(false),
	CsvConfigNull:      pulumi.String("string"),
	CsvConfigQuote:     pulumi.String("string"),
	DisplayName:        pulumi.String("string"),
	CsvConfigHeader:    pulumi.Bool(false),
	FromTime:           pulumi.Int(0),
	CsvConfigLinefeed:  pulumi.String("string"),
	LogReadRoleArn:     pulumi.String("string"),
	CsvConfigEscape:    pulumi.String("string"),
	CsvConfigDelimiter: pulumi.String("string"),
	Prefix:             pulumi.String("string"),
	ConfigColumns: log.OssExportConfigColumnArray{
		&log.OssExportConfigColumnArgs{
			Name: pulumi.String("string"),
			Type: pulumi.String("string"),
		},
	},
	RoleArn:      pulumi.String("string"),
	Suffix:       pulumi.String("string"),
	CompressType: pulumi.String("string"),
})
Copy
var ossExportResource = new OssExport("ossExportResource", OssExportArgs.builder()
    .exportName("string")
    .bufferInterval(0)
    .bufferSize(0)
    .timeZone("string")
    .projectName("string")
    .contentType("string")
    .bucket("string")
    .pathFormat("string")
    .logstoreName("string")
    .csvConfigColumns("string")
    .jsonEnableTag(false)
    .csvConfigNull("string")
    .csvConfigQuote("string")
    .displayName("string")
    .csvConfigHeader(false)
    .fromTime(0)
    .csvConfigLinefeed("string")
    .logReadRoleArn("string")
    .csvConfigEscape("string")
    .csvConfigDelimiter("string")
    .prefix("string")
    .configColumns(OssExportConfigColumnArgs.builder()
        .name("string")
        .type("string")
        .build())
    .roleArn("string")
    .suffix("string")
    .compressType("string")
    .build());
Copy
oss_export_resource = alicloud.log.OssExport("ossExportResource",
    export_name="string",
    buffer_interval=0,
    buffer_size=0,
    time_zone="string",
    project_name="string",
    content_type="string",
    bucket="string",
    path_format="string",
    logstore_name="string",
    csv_config_columns=["string"],
    json_enable_tag=False,
    csv_config_null="string",
    csv_config_quote="string",
    display_name="string",
    csv_config_header=False,
    from_time=0,
    csv_config_linefeed="string",
    log_read_role_arn="string",
    csv_config_escape="string",
    csv_config_delimiter="string",
    prefix="string",
    config_columns=[{
        "name": "string",
        "type": "string",
    }],
    role_arn="string",
    suffix="string",
    compress_type="string")
Copy
const ossExportResource = new alicloud.log.OssExport("ossExportResource", {
    exportName: "string",
    bufferInterval: 0,
    bufferSize: 0,
    timeZone: "string",
    projectName: "string",
    contentType: "string",
    bucket: "string",
    pathFormat: "string",
    logstoreName: "string",
    csvConfigColumns: ["string"],
    jsonEnableTag: false,
    csvConfigNull: "string",
    csvConfigQuote: "string",
    displayName: "string",
    csvConfigHeader: false,
    fromTime: 0,
    csvConfigLinefeed: "string",
    logReadRoleArn: "string",
    csvConfigEscape: "string",
    csvConfigDelimiter: "string",
    prefix: "string",
    configColumns: [{
        name: "string",
        type: "string",
    }],
    roleArn: "string",
    suffix: "string",
    compressType: "string",
});
Copy
type: alicloud:log:OssExport
properties:
    bucket: string
    bufferInterval: 0
    bufferSize: 0
    compressType: string
    configColumns:
        - name: string
          type: string
    contentType: string
    csvConfigColumns:
        - string
    csvConfigDelimiter: string
    csvConfigEscape: string
    csvConfigHeader: false
    csvConfigLinefeed: string
    csvConfigNull: string
    csvConfigQuote: string
    displayName: string
    exportName: string
    fromTime: 0
    jsonEnableTag: false
    logReadRoleArn: string
    logstoreName: string
    pathFormat: string
    prefix: string
    projectName: string
    roleArn: string
    suffix: string
    timeZone: string
Copy

OssExport Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The OssExport resource accepts the following input properties:

Bucket This property is required. string
The name of the oss bucket.
BufferInterval This property is required. int
How often is it delivered every interval.
BufferSize This property is required. int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
ContentType This property is required. string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
ExportName
This property is required.
Changes to this property will trigger replacement.
string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
LogstoreName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log logstore.
PathFormat This property is required. string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
ProjectName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log project. It is the only in one Alicloud account.
TimeZone This property is required. string
This time zone that is used to format the time, +0800 e.g.
CompressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
ConfigColumns List<Pulumi.AliCloud.Log.Inputs.OssExportConfigColumn>
Configure columns when content_type is parquet or orc.
CsvConfigColumns List<string>
Field configuration in csv content_type.
CsvConfigDelimiter string
Separator configuration in csv content_type.
CsvConfigEscape string
escape in csv content_type.
CsvConfigHeader bool
Indicates whether to write the field name to the CSV file, the default value is false.
CsvConfigLinefeed string
lineFeed in csv content_type.
CsvConfigNull string
Invalid field content in csv content_type.
CsvConfigQuote string
Escape character in csv content_type.
DisplayName string
The display name for oss export.
FromTime int
The log from when to export to oss.
JsonEnableTag bool
Whether to deliver the label when content_type = json.
LogReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
Prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
RoleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
Suffix string
The suffix for the objects in which the shipped data is stored.
Bucket This property is required. string
The name of the oss bucket.
BufferInterval This property is required. int
How often is it delivered every interval.
BufferSize This property is required. int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
ContentType This property is required. string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
ExportName
This property is required.
Changes to this property will trigger replacement.
string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
LogstoreName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log logstore.
PathFormat This property is required. string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
ProjectName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log project. It is the only in one Alicloud account.
TimeZone This property is required. string
This time zone that is used to format the time, +0800 e.g.
CompressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
ConfigColumns []OssExportConfigColumnArgs
Configure columns when content_type is parquet or orc.
CsvConfigColumns []string
Field configuration in csv content_type.
CsvConfigDelimiter string
Separator configuration in csv content_type.
CsvConfigEscape string
escape in csv content_type.
CsvConfigHeader bool
Indicates whether to write the field name to the CSV file, the default value is false.
CsvConfigLinefeed string
lineFeed in csv content_type.
CsvConfigNull string
Invalid field content in csv content_type.
CsvConfigQuote string
Escape character in csv content_type.
DisplayName string
The display name for oss export.
FromTime int
The log from when to export to oss.
JsonEnableTag bool
Whether to deliver the label when content_type = json.
LogReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
Prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
RoleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
Suffix string
The suffix for the objects in which the shipped data is stored.
bucket This property is required. String
The name of the oss bucket.
bufferInterval This property is required. Integer
How often is it delivered every interval.
bufferSize This property is required. Integer
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
contentType This property is required. String
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
exportName
This property is required.
Changes to this property will trigger replacement.
String
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
logstoreName
This property is required.
Changes to this property will trigger replacement.
String
The name of the log logstore.
pathFormat This property is required. String
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
projectName
This property is required.
Changes to this property will trigger replacement.
String
The name of the log project. It is the only in one Alicloud account.
timeZone This property is required. String
This time zone that is used to format the time, +0800 e.g.
compressType String
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns List<OssExportConfigColumn>
Configure columns when content_type is parquet or orc.
csvConfigColumns List<String>
Field configuration in csv content_type.
csvConfigDelimiter String
Separator configuration in csv content_type.
csvConfigEscape String
escape in csv content_type.
csvConfigHeader Boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed String
lineFeed in csv content_type.
csvConfigNull String
Invalid field content in csv content_type.
csvConfigQuote String
Escape character in csv content_type.
displayName String
The display name for oss export.
fromTime Integer
The log from when to export to oss.
jsonEnableTag Boolean
Whether to deliver the label when content_type = json.
logReadRoleArn String
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
prefix String
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
roleArn String
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix String
The suffix for the objects in which the shipped data is stored.
bucket This property is required. string
The name of the oss bucket.
bufferInterval This property is required. number
How often is it delivered every interval.
bufferSize This property is required. number
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
contentType This property is required. string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
exportName
This property is required.
Changes to this property will trigger replacement.
string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
logstoreName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log logstore.
pathFormat This property is required. string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
projectName
This property is required.
Changes to this property will trigger replacement.
string
The name of the log project. It is the only in one Alicloud account.
timeZone This property is required. string
This time zone that is used to format the time, +0800 e.g.
compressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns OssExportConfigColumn[]
Configure columns when content_type is parquet or orc.
csvConfigColumns string[]
Field configuration in csv content_type.
csvConfigDelimiter string
Separator configuration in csv content_type.
csvConfigEscape string
escape in csv content_type.
csvConfigHeader boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed string
lineFeed in csv content_type.
csvConfigNull string
Invalid field content in csv content_type.
csvConfigQuote string
Escape character in csv content_type.
displayName string
The display name for oss export.
fromTime number
The log from when to export to oss.
jsonEnableTag boolean
Whether to deliver the label when content_type = json.
logReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
roleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix string
The suffix for the objects in which the shipped data is stored.
bucket This property is required. str
The name of the oss bucket.
buffer_interval This property is required. int
How often is it delivered every interval.
buffer_size This property is required. int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
content_type This property is required. str
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
export_name
This property is required.
Changes to this property will trigger replacement.
str
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
logstore_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the log logstore.
path_format This property is required. str
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
project_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the log project. It is the only in one Alicloud account.
time_zone This property is required. str
This time zone that is used to format the time, +0800 e.g.
compress_type str
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
config_columns Sequence[OssExportConfigColumnArgs]
Configure columns when content_type is parquet or orc.
csv_config_columns Sequence[str]
Field configuration in csv content_type.
csv_config_delimiter str
Separator configuration in csv content_type.
csv_config_escape str
escape in csv content_type.
csv_config_header bool
Indicates whether to write the field name to the CSV file, the default value is false.
csv_config_linefeed str
lineFeed in csv content_type.
csv_config_null str
Invalid field content in csv content_type.
csv_config_quote str
Escape character in csv content_type.
display_name str
The display name for oss export.
from_time int
The log from when to export to oss.
json_enable_tag bool
Whether to deliver the label when content_type = json.
log_read_role_arn str
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
prefix str
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
role_arn str
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix str
The suffix for the objects in which the shipped data is stored.
bucket This property is required. String
The name of the oss bucket.
bufferInterval This property is required. Number
How often is it delivered every interval.
bufferSize This property is required. Number
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
contentType This property is required. String
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
exportName
This property is required.
Changes to this property will trigger replacement.
String
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
logstoreName
This property is required.
Changes to this property will trigger replacement.
String
The name of the log logstore.
pathFormat This property is required. String
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
projectName
This property is required.
Changes to this property will trigger replacement.
String
The name of the log project. It is the only in one Alicloud account.
timeZone This property is required. String
This time zone that is used to format the time, +0800 e.g.
compressType String
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns List<Property Map>
Configure columns when content_type is parquet or orc.
csvConfigColumns List<String>
Field configuration in csv content_type.
csvConfigDelimiter String
Separator configuration in csv content_type.
csvConfigEscape String
escape in csv content_type.
csvConfigHeader Boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed String
lineFeed in csv content_type.
csvConfigNull String
Invalid field content in csv content_type.
csvConfigQuote String
Escape character in csv content_type.
displayName String
The display name for oss export.
fromTime Number
The log from when to export to oss.
jsonEnableTag Boolean
Whether to deliver the label when content_type = json.
logReadRoleArn String
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
prefix String
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
roleArn String
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix String
The suffix for the objects in which the shipped data is stored.

Outputs

All input properties are implicitly available as output properties. Additionally, the OssExport resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing OssExport Resource

Get an existing OssExport resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: OssExportState, opts?: CustomResourceOptions): OssExport
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        bucket: Optional[str] = None,
        buffer_interval: Optional[int] = None,
        buffer_size: Optional[int] = None,
        compress_type: Optional[str] = None,
        config_columns: Optional[Sequence[OssExportConfigColumnArgs]] = None,
        content_type: Optional[str] = None,
        csv_config_columns: Optional[Sequence[str]] = None,
        csv_config_delimiter: Optional[str] = None,
        csv_config_escape: Optional[str] = None,
        csv_config_header: Optional[bool] = None,
        csv_config_linefeed: Optional[str] = None,
        csv_config_null: Optional[str] = None,
        csv_config_quote: Optional[str] = None,
        display_name: Optional[str] = None,
        export_name: Optional[str] = None,
        from_time: Optional[int] = None,
        json_enable_tag: Optional[bool] = None,
        log_read_role_arn: Optional[str] = None,
        logstore_name: Optional[str] = None,
        path_format: Optional[str] = None,
        prefix: Optional[str] = None,
        project_name: Optional[str] = None,
        role_arn: Optional[str] = None,
        suffix: Optional[str] = None,
        time_zone: Optional[str] = None) -> OssExport
func GetOssExport(ctx *Context, name string, id IDInput, state *OssExportState, opts ...ResourceOption) (*OssExport, error)
public static OssExport Get(string name, Input<string> id, OssExportState? state, CustomResourceOptions? opts = null)
public static OssExport get(String name, Output<String> id, OssExportState state, CustomResourceOptions options)
resources:  _:    type: alicloud:log:OssExport    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Bucket string
The name of the oss bucket.
BufferInterval int
How often is it delivered every interval.
BufferSize int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
CompressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
ConfigColumns List<Pulumi.AliCloud.Log.Inputs.OssExportConfigColumn>
Configure columns when content_type is parquet or orc.
ContentType string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
CsvConfigColumns List<string>
Field configuration in csv content_type.
CsvConfigDelimiter string
Separator configuration in csv content_type.
CsvConfigEscape string
escape in csv content_type.
CsvConfigHeader bool
Indicates whether to write the field name to the CSV file, the default value is false.
CsvConfigLinefeed string
lineFeed in csv content_type.
CsvConfigNull string
Invalid field content in csv content_type.
CsvConfigQuote string
Escape character in csv content_type.
DisplayName string
The display name for oss export.
ExportName Changes to this property will trigger replacement. string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
FromTime int
The log from when to export to oss.
JsonEnableTag bool
Whether to deliver the label when content_type = json.
LogReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
LogstoreName Changes to this property will trigger replacement. string
The name of the log logstore.
PathFormat string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
Prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
ProjectName Changes to this property will trigger replacement. string
The name of the log project. It is the only in one Alicloud account.
RoleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
Suffix string
The suffix for the objects in which the shipped data is stored.
TimeZone string
This time zone that is used to format the time, +0800 e.g.
Bucket string
The name of the oss bucket.
BufferInterval int
How often is it delivered every interval.
BufferSize int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
CompressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
ConfigColumns []OssExportConfigColumnArgs
Configure columns when content_type is parquet or orc.
ContentType string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
CsvConfigColumns []string
Field configuration in csv content_type.
CsvConfigDelimiter string
Separator configuration in csv content_type.
CsvConfigEscape string
escape in csv content_type.
CsvConfigHeader bool
Indicates whether to write the field name to the CSV file, the default value is false.
CsvConfigLinefeed string
lineFeed in csv content_type.
CsvConfigNull string
Invalid field content in csv content_type.
CsvConfigQuote string
Escape character in csv content_type.
DisplayName string
The display name for oss export.
ExportName Changes to this property will trigger replacement. string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
FromTime int
The log from when to export to oss.
JsonEnableTag bool
Whether to deliver the label when content_type = json.
LogReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
LogstoreName Changes to this property will trigger replacement. string
The name of the log logstore.
PathFormat string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
Prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
ProjectName Changes to this property will trigger replacement. string
The name of the log project. It is the only in one Alicloud account.
RoleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
Suffix string
The suffix for the objects in which the shipped data is stored.
TimeZone string
This time zone that is used to format the time, +0800 e.g.
bucket String
The name of the oss bucket.
bufferInterval Integer
How often is it delivered every interval.
bufferSize Integer
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
compressType String
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns List<OssExportConfigColumn>
Configure columns when content_type is parquet or orc.
contentType String
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
csvConfigColumns List<String>
Field configuration in csv content_type.
csvConfigDelimiter String
Separator configuration in csv content_type.
csvConfigEscape String
escape in csv content_type.
csvConfigHeader Boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed String
lineFeed in csv content_type.
csvConfigNull String
Invalid field content in csv content_type.
csvConfigQuote String
Escape character in csv content_type.
displayName String
The display name for oss export.
exportName Changes to this property will trigger replacement. String
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
fromTime Integer
The log from when to export to oss.
jsonEnableTag Boolean
Whether to deliver the label when content_type = json.
logReadRoleArn String
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
logstoreName Changes to this property will trigger replacement. String
The name of the log logstore.
pathFormat String
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
prefix String
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
projectName Changes to this property will trigger replacement. String
The name of the log project. It is the only in one Alicloud account.
roleArn String
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix String
The suffix for the objects in which the shipped data is stored.
timeZone String
This time zone that is used to format the time, +0800 e.g.
bucket string
The name of the oss bucket.
bufferInterval number
How often is it delivered every interval.
bufferSize number
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
compressType string
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns OssExportConfigColumn[]
Configure columns when content_type is parquet or orc.
contentType string
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
csvConfigColumns string[]
Field configuration in csv content_type.
csvConfigDelimiter string
Separator configuration in csv content_type.
csvConfigEscape string
escape in csv content_type.
csvConfigHeader boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed string
lineFeed in csv content_type.
csvConfigNull string
Invalid field content in csv content_type.
csvConfigQuote string
Escape character in csv content_type.
displayName string
The display name for oss export.
exportName Changes to this property will trigger replacement. string
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
fromTime number
The log from when to export to oss.
jsonEnableTag boolean
Whether to deliver the label when content_type = json.
logReadRoleArn string
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
logstoreName Changes to this property will trigger replacement. string
The name of the log logstore.
pathFormat string
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
prefix string
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
projectName Changes to this property will trigger replacement. string
The name of the log project. It is the only in one Alicloud account.
roleArn string
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix string
The suffix for the objects in which the shipped data is stored.
timeZone string
This time zone that is used to format the time, +0800 e.g.
bucket str
The name of the oss bucket.
buffer_interval int
How often is it delivered every interval.
buffer_size int
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
compress_type str
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
config_columns Sequence[OssExportConfigColumnArgs]
Configure columns when content_type is parquet or orc.
content_type str
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
csv_config_columns Sequence[str]
Field configuration in csv content_type.
csv_config_delimiter str
Separator configuration in csv content_type.
csv_config_escape str
escape in csv content_type.
csv_config_header bool
Indicates whether to write the field name to the CSV file, the default value is false.
csv_config_linefeed str
lineFeed in csv content_type.
csv_config_null str
Invalid field content in csv content_type.
csv_config_quote str
Escape character in csv content_type.
display_name str
The display name for oss export.
export_name Changes to this property will trigger replacement. str
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
from_time int
The log from when to export to oss.
json_enable_tag bool
Whether to deliver the label when content_type = json.
log_read_role_arn str
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
logstore_name Changes to this property will trigger replacement. str
The name of the log logstore.
path_format str
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
prefix str
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
project_name Changes to this property will trigger replacement. str
The name of the log project. It is the only in one Alicloud account.
role_arn str
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix str
The suffix for the objects in which the shipped data is stored.
time_zone str
This time zone that is used to format the time, +0800 e.g.
bucket String
The name of the oss bucket.
bufferInterval Number
How often is it delivered every interval.
bufferSize Number
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB.
compressType String
OSS data storage compression method, support: none, snappy, zstd, gzip. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket.
configColumns List<Property Map>
Configure columns when content_type is parquet or orc.
contentType String
Storage format, only supports three types: json, parquet, orc, csv. According to the different format, please select the following parameters
csvConfigColumns List<String>
Field configuration in csv content_type.
csvConfigDelimiter String
Separator configuration in csv content_type.
csvConfigEscape String
escape in csv content_type.
csvConfigHeader Boolean
Indicates whether to write the field name to the CSV file, the default value is false.
csvConfigLinefeed String
lineFeed in csv content_type.
csvConfigNull String
Invalid field content in csv content_type.
csvConfigQuote String
Escape character in csv content_type.
displayName String
The display name for oss export.
exportName Changes to this property will trigger replacement. String
Delivery configuration name, it can only contain lowercase letters, numbers, dashes - and underscores _. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
fromTime Number
The log from when to export to oss.
jsonEnableTag Boolean
Whether to deliver the label when content_type = json.
logReadRoleArn String
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole, if log_read_role_arn is not set, role_arn is used to read logstore.
logstoreName Changes to this property will trigger replacement. String
The name of the log logstore.
pathFormat String
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /, the default value is %Y/%m/%d/%H/%M.
prefix String
The data synchronized from Log Service to OSS will be stored in this directory of Bucket.
projectName Changes to this property will trigger replacement. String
The name of the log project. It is the only in one Alicloud account.
roleArn String
Used to write to oss bucket, the OSS Bucket owner creates the role mark which has the oss bucket write policy, such as acs:ram::13234:role/logrole.
suffix String
The suffix for the objects in which the shipped data is stored.
timeZone String
This time zone that is used to format the time, +0800 e.g.

Supporting Types

OssExportConfigColumn
, OssExportConfigColumnArgs

Name This property is required. string
The name of the key.
Type This property is required. string
Type of configuration name.
Name This property is required. string
The name of the key.
Type This property is required. string
Type of configuration name.
name This property is required. String
The name of the key.
type This property is required. String
Type of configuration name.
name This property is required. string
The name of the key.
type This property is required. string
Type of configuration name.
name This property is required. str
The name of the key.
type This property is required. str
Type of configuration name.
name This property is required. String
The name of the key.
type This property is required. String
Type of configuration name.

Import

Log oss export can be imported using the id or name, e.g.

$ pulumi import alicloud:log/ossExport:OssExport example tf-log-project:tf-log-logstore:tf-log-export
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Alibaba Cloud pulumi/pulumi-alicloud
License
Apache-2.0
Notes
This Pulumi package is based on the alicloud Terraform Provider.