Cloudflare v6.0.1 published on Wednesday, Apr 16, 2025 by Pulumi
cloudflare.getLogpushJobs
Explore with Pulumi AI
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as cloudflare from "@pulumi/cloudflare";
const exampleLogpushJobs = cloudflare.getLogpushJobs({
accountId: "account_id",
zoneId: "zone_id",
});
import pulumi
import pulumi_cloudflare as cloudflare
example_logpush_jobs = cloudflare.get_logpush_jobs(account_id="account_id",
zone_id="zone_id")
package main
import (
"github.com/pulumi/pulumi-cloudflare/sdk/v6/go/cloudflare"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := cloudflare.LookupLogpushJobs(ctx, &cloudflare.LookupLogpushJobsArgs{
AccountId: pulumi.StringRef("account_id"),
ZoneId: pulumi.StringRef("zone_id"),
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Cloudflare = Pulumi.Cloudflare;
return await Deployment.RunAsync(() =>
{
var exampleLogpushJobs = Cloudflare.GetLogpushJobs.Invoke(new()
{
AccountId = "account_id",
ZoneId = "zone_id",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.cloudflare.CloudflareFunctions;
import com.pulumi.cloudflare.inputs.GetLogpushJobsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var exampleLogpushJobs = CloudflareFunctions.getLogpushJobs(GetLogpushJobsArgs.builder()
.accountId("account_id")
.zoneId("zone_id")
.build());
}
}
variables:
exampleLogpushJobs:
fn::invoke:
function: cloudflare:getLogpushJobs
arguments:
accountId: account_id
zoneId: zone_id
Using getLogpushJobs
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getLogpushJobs(args: GetLogpushJobsArgs, opts?: InvokeOptions): Promise<GetLogpushJobsResult>
function getLogpushJobsOutput(args: GetLogpushJobsOutputArgs, opts?: InvokeOptions): Output<GetLogpushJobsResult>
def get_logpush_jobs(account_id: Optional[str] = None,
max_items: Optional[int] = None,
zone_id: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetLogpushJobsResult
def get_logpush_jobs_output(account_id: Optional[pulumi.Input[str]] = None,
max_items: Optional[pulumi.Input[int]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetLogpushJobsResult]
func LookupLogpushJobs(ctx *Context, args *LookupLogpushJobsArgs, opts ...InvokeOption) (*LookupLogpushJobsResult, error)
func LookupLogpushJobsOutput(ctx *Context, args *LookupLogpushJobsOutputArgs, opts ...InvokeOption) LookupLogpushJobsResultOutput
> Note: This function is named LookupLogpushJobs
in the Go SDK.
public static class GetLogpushJobs
{
public static Task<GetLogpushJobsResult> InvokeAsync(GetLogpushJobsArgs args, InvokeOptions? opts = null)
public static Output<GetLogpushJobsResult> Invoke(GetLogpushJobsInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetLogpushJobsResult> getLogpushJobs(GetLogpushJobsArgs args, InvokeOptions options)
public static Output<GetLogpushJobsResult> getLogpushJobs(GetLogpushJobsArgs args, InvokeOptions options)
fn::invoke:
function: cloudflare:index/getLogpushJobs:getLogpushJobs
arguments:
# arguments dictionary
The following arguments are supported:
- account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- max_
items int - Max items to fetch, default: 1000
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
getLogpushJobs Result
The following output properties are available:
- Id string
- The provider-assigned unique ID for this managed resource.
- Results
List<Get
Logpush Jobs Result> - The items returned by the data source
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Max
Items int - Max items to fetch, default: 1000
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Id string
- The provider-assigned unique ID for this managed resource.
- Results
[]Get
Logpush Jobs Result - The items returned by the data source
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Max
Items int - Max items to fetch, default: 1000
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- id String
- The provider-assigned unique ID for this managed resource.
- results
List<Get
Logpush Jobs Result> - The items returned by the data source
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- max
Items Integer - Max items to fetch, default: 1000
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- id string
- The provider-assigned unique ID for this managed resource.
- results
Get
Logpush Jobs Result[] - The items returned by the data source
- account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- max
Items number - Max items to fetch, default: 1000
- zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- id str
- The provider-assigned unique ID for this managed resource.
- results
Sequence[Get
Logpush Jobs Result] - The items returned by the data source
- account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- max_
items int - Max items to fetch, default: 1000
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- id String
- The provider-assigned unique ID for this managed resource.
- results List<Property Map>
- The items returned by the data source
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- max
Items Number - Max items to fetch, default: 1000
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Supporting Types
GetLogpushJobsResult
- Dataset
This property is required. string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf This property is required. string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled
This property is required. bool - Flag that indicates if the job is enabled.
- Error
Message This property is required. string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency
This property is required. string - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Id
This property is required. int - Unique id of the job.
- Kind
This property is required. string - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete This property is required. string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error This property is required. string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options This property is required. string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload Bytes This property is required. int - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload Interval Seconds This property is required. int - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload Records This property is required. int - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name
This property is required. string - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options This property is required. GetLogpush Jobs Result Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
- Dataset
This property is required. string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf This property is required. string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled
This property is required. bool - Flag that indicates if the job is enabled.
- Error
Message This property is required. string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency
This property is required. string - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Id
This property is required. int - Unique id of the job.
- Kind
This property is required. string - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete This property is required. string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error This property is required. string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options This property is required. string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload Bytes This property is required. int - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload Interval Seconds This property is required. int - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload Records This property is required. int - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name
This property is required. string - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options This property is required. GetLogpush Jobs Result Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
- dataset
This property is required. String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf This property is required. String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled
This property is required. Boolean - Flag that indicates if the job is enabled.
- error
Message This property is required. String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency
This property is required. String - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id
This property is required. Integer - Unique id of the job.
- kind
This property is required. String - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete This property is required. String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error This property is required. String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options This property is required. String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload Bytes This property is required. Integer - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload Interval Seconds This property is required. Integer - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload Records This property is required. Integer - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name
This property is required. String - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options This property is required. GetLogpush Jobs Result Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
- dataset
This property is required. string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf This property is required. string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled
This property is required. boolean - Flag that indicates if the job is enabled.
- error
Message This property is required. string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency
This property is required. string - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id
This property is required. number - Unique id of the job.
- kind
This property is required. string - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete This property is required. string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error This property is required. string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options This property is required. string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload Bytes This property is required. number - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload Interval Seconds This property is required. number - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload Records This property is required. number - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name
This property is required. string - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options This property is required. GetLogpush Jobs Result Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
- dataset
This property is required. str - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination_
conf This property is required. str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled
This property is required. bool - Flag that indicates if the job is enabled.
- error_
message This property is required. str - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency
This property is required. str - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id
This property is required. int - Unique id of the job.
- kind
This property is required. str - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last_
complete This property is required. str - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last_
error This property is required. str - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull_
options This property is required. str - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max_
upload_ bytes This property is required. int - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max_
upload_ interval_ seconds This property is required. int - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max_
upload_ records This property is required. int - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name
This property is required. str - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output_
options This property is required. GetLogpush Jobs Result Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
- dataset
This property is required. String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf This property is required. String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled
This property is required. Boolean - Flag that indicates if the job is enabled.
- error
Message This property is required. String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency
This property is required. String - This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id
This property is required. Number - Unique id of the job.
- kind
This property is required. String - The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete This property is required. String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error This property is required. String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options This property is required. String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload Bytes This property is required. Number - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload Interval Seconds This property is required. Number - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload Records This property is required. Number - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name
This property is required. String - Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options This property is required. Property Map - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored.
GetLogpushJobsResultOutputOptions
- Batch
Prefix This property is required. string - String to be prepended before each batch.
- Batch
Suffix This property is required. string - String to be appended after each batch.
- Cve202144228
This property is required. bool - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter This property is required. string - String to join fields. This field be ignored when
record_template
is set. - Field
Names This property is required. List<string> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type This property is required. string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter This property is required. string - String to be inserted in-between the records as separator.
- Record
Prefix This property is required. string - String to be prepended before each record.
- Record
Suffix This property is required. string - String to be appended after each record.
- Record
Template This property is required. string - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate This property is required. double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format This property is required. string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- Batch
Prefix This property is required. string - String to be prepended before each batch.
- Batch
Suffix This property is required. string - String to be appended after each batch.
- Cve202144228
This property is required. bool - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter This property is required. string - String to join fields. This field be ignored when
record_template
is set. - Field
Names This property is required. []string - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type This property is required. string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter This property is required. string - String to be inserted in-between the records as separator.
- Record
Prefix This property is required. string - String to be prepended before each record.
- Record
Suffix This property is required. string - String to be appended after each record.
- Record
Template This property is required. string - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate This property is required. float64 - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format This property is required. string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix This property is required. String - String to be prepended before each batch.
- batch
Suffix This property is required. String - String to be appended after each batch.
- cve202144228
This property is required. Boolean - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter This property is required. String - String to join fields. This field be ignored when
record_template
is set. - field
Names This property is required. List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type This property is required. String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter This property is required. String - String to be inserted in-between the records as separator.
- record
Prefix This property is required. String - String to be prepended before each record.
- record
Suffix This property is required. String - String to be appended after each record.
- record
Template This property is required. String - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate This property is required. Double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format This property is required. String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix This property is required. string - String to be prepended before each batch.
- batch
Suffix This property is required. string - String to be appended after each batch.
- cve202144228
This property is required. boolean - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter This property is required. string - String to join fields. This field be ignored when
record_template
is set. - field
Names This property is required. string[] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type This property is required. string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter This property is required. string - String to be inserted in-between the records as separator.
- record
Prefix This property is required. string - String to be prepended before each record.
- record
Suffix This property is required. string - String to be appended after each record.
- record
Template This property is required. string - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate This property is required. number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format This property is required. string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch_
prefix This property is required. str - String to be prepended before each batch.
- batch_
suffix This property is required. str - String to be appended after each batch.
- cve202144228
This property is required. bool - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field_
delimiter This property is required. str - String to join fields. This field be ignored when
record_template
is set. - field_
names This property is required. Sequence[str] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output_
type This property is required. str - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record_
delimiter This property is required. str - String to be inserted in-between the records as separator.
- record_
prefix This property is required. str - String to be prepended before each record.
- record_
suffix This property is required. str - String to be appended after each record.
- record_
template This property is required. str - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample_
rate This property is required. float - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp_
format This property is required. str - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix This property is required. String - String to be prepended before each batch.
- batch
Suffix This property is required. String - String to be appended after each batch.
- cve202144228
This property is required. Boolean - If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter This property is required. String - String to join fields. This field be ignored when
record_template
is set. - field
Names This property is required. List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type This property is required. String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter This property is required. String - String to be inserted in-between the records as separator.
- record
Prefix This property is required. String - String to be prepended before each record.
- record
Suffix This property is required. String - String to be appended after each record.
- record
Template This property is required. String - String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate This property is required. Number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format This property is required. String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
cloudflare
Terraform Provider.