1. Packages
  2. Cloudflare Provider
  3. API Docs
  4. getLogpushJobs
Cloudflare v6.0.1 published on Wednesday, Apr 16, 2025 by Pulumi

cloudflare.getLogpushJobs

Explore with Pulumi AI

Cloudflare v6.0.1 published on Wednesday, Apr 16, 2025 by Pulumi

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as cloudflare from "@pulumi/cloudflare";

const exampleLogpushJobs = cloudflare.getLogpushJobs({
    accountId: "account_id",
    zoneId: "zone_id",
});
Copy
import pulumi
import pulumi_cloudflare as cloudflare

example_logpush_jobs = cloudflare.get_logpush_jobs(account_id="account_id",
    zone_id="zone_id")
Copy
package main

import (
	"github.com/pulumi/pulumi-cloudflare/sdk/v6/go/cloudflare"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := cloudflare.LookupLogpushJobs(ctx, &cloudflare.LookupLogpushJobsArgs{
			AccountId: pulumi.StringRef("account_id"),
			ZoneId:    pulumi.StringRef("zone_id"),
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Cloudflare = Pulumi.Cloudflare;

return await Deployment.RunAsync(() => 
{
    var exampleLogpushJobs = Cloudflare.GetLogpushJobs.Invoke(new()
    {
        AccountId = "account_id",
        ZoneId = "zone_id",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.cloudflare.CloudflareFunctions;
import com.pulumi.cloudflare.inputs.GetLogpushJobsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var exampleLogpushJobs = CloudflareFunctions.getLogpushJobs(GetLogpushJobsArgs.builder()
            .accountId("account_id")
            .zoneId("zone_id")
            .build());

    }
}
Copy
variables:
  exampleLogpushJobs:
    fn::invoke:
      function: cloudflare:getLogpushJobs
      arguments:
        accountId: account_id
        zoneId: zone_id
Copy

Using getLogpushJobs

Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

function getLogpushJobs(args: GetLogpushJobsArgs, opts?: InvokeOptions): Promise<GetLogpushJobsResult>
function getLogpushJobsOutput(args: GetLogpushJobsOutputArgs, opts?: InvokeOptions): Output<GetLogpushJobsResult>
Copy
def get_logpush_jobs(account_id: Optional[str] = None,
                     max_items: Optional[int] = None,
                     zone_id: Optional[str] = None,
                     opts: Optional[InvokeOptions] = None) -> GetLogpushJobsResult
def get_logpush_jobs_output(account_id: Optional[pulumi.Input[str]] = None,
                     max_items: Optional[pulumi.Input[int]] = None,
                     zone_id: Optional[pulumi.Input[str]] = None,
                     opts: Optional[InvokeOptions] = None) -> Output[GetLogpushJobsResult]
Copy
func LookupLogpushJobs(ctx *Context, args *LookupLogpushJobsArgs, opts ...InvokeOption) (*LookupLogpushJobsResult, error)
func LookupLogpushJobsOutput(ctx *Context, args *LookupLogpushJobsOutputArgs, opts ...InvokeOption) LookupLogpushJobsResultOutput
Copy

> Note: This function is named LookupLogpushJobs in the Go SDK.

public static class GetLogpushJobs 
{
    public static Task<GetLogpushJobsResult> InvokeAsync(GetLogpushJobsArgs args, InvokeOptions? opts = null)
    public static Output<GetLogpushJobsResult> Invoke(GetLogpushJobsInvokeArgs args, InvokeOptions? opts = null)
}
Copy
public static CompletableFuture<GetLogpushJobsResult> getLogpushJobs(GetLogpushJobsArgs args, InvokeOptions options)
public static Output<GetLogpushJobsResult> getLogpushJobs(GetLogpushJobsArgs args, InvokeOptions options)
Copy
fn::invoke:
  function: cloudflare:index/getLogpushJobs:getLogpushJobs
  arguments:
    # arguments dictionary
Copy

The following arguments are supported:

AccountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
MaxItems int
Max items to fetch, default: 1000
ZoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
AccountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
MaxItems int
Max items to fetch, default: 1000
ZoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
accountId String
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems Integer
Max items to fetch, default: 1000
zoneId String
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
accountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems number
Max items to fetch, default: 1000
zoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
account_id str
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
max_items int
Max items to fetch, default: 1000
zone_id str
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
accountId String
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems Number
Max items to fetch, default: 1000
zoneId String
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.

getLogpushJobs Result

The following output properties are available:

Id string
The provider-assigned unique ID for this managed resource.
Results List<GetLogpushJobsResult>
The items returned by the data source
AccountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
MaxItems int
Max items to fetch, default: 1000
ZoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Id string
The provider-assigned unique ID for this managed resource.
Results []GetLogpushJobsResult
The items returned by the data source
AccountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
MaxItems int
Max items to fetch, default: 1000
ZoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
id String
The provider-assigned unique ID for this managed resource.
results List<GetLogpushJobsResult>
The items returned by the data source
accountId String
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems Integer
Max items to fetch, default: 1000
zoneId String
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
id string
The provider-assigned unique ID for this managed resource.
results GetLogpushJobsResult[]
The items returned by the data source
accountId string
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems number
Max items to fetch, default: 1000
zoneId string
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
id str
The provider-assigned unique ID for this managed resource.
results Sequence[GetLogpushJobsResult]
The items returned by the data source
account_id str
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
max_items int
Max items to fetch, default: 1000
zone_id str
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
id String
The provider-assigned unique ID for this managed resource.
results List<Property Map>
The items returned by the data source
accountId String
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
maxItems Number
Max items to fetch, default: 1000
zoneId String
The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.

Supporting Types

GetLogpushJobsResult

Dataset This property is required. string
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
DestinationConf This property is required. string
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
Enabled This property is required. bool
Flag that indicates if the job is enabled.
ErrorMessage This property is required. string
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
Frequency This property is required. string
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
Id This property is required. int
Unique id of the job.
Kind This property is required. string
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
LastComplete This property is required. string
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
LastError This property is required. string
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
LogpullOptions This property is required. string
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
MaxUploadBytes This property is required. int
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
MaxUploadIntervalSeconds This property is required. int
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
MaxUploadRecords This property is required. int
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
Name This property is required. string
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
OutputOptions This property is required. GetLogpushJobsResultOutputOptions
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.
Dataset This property is required. string
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
DestinationConf This property is required. string
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
Enabled This property is required. bool
Flag that indicates if the job is enabled.
ErrorMessage This property is required. string
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
Frequency This property is required. string
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
Id This property is required. int
Unique id of the job.
Kind This property is required. string
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
LastComplete This property is required. string
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
LastError This property is required. string
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
LogpullOptions This property is required. string
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
MaxUploadBytes This property is required. int
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
MaxUploadIntervalSeconds This property is required. int
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
MaxUploadRecords This property is required. int
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
Name This property is required. string
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
OutputOptions This property is required. GetLogpushJobsResultOutputOptions
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.
dataset This property is required. String
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
destinationConf This property is required. String
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
enabled This property is required. Boolean
Flag that indicates if the job is enabled.
errorMessage This property is required. String
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
frequency This property is required. String
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
id This property is required. Integer
Unique id of the job.
kind This property is required. String
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
lastComplete This property is required. String
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
lastError This property is required. String
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
logpullOptions This property is required. String
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
maxUploadBytes This property is required. Integer
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
maxUploadIntervalSeconds This property is required. Integer
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
maxUploadRecords This property is required. Integer
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
name This property is required. String
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
outputOptions This property is required. GetLogpushJobsResultOutputOptions
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.
dataset This property is required. string
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
destinationConf This property is required. string
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
enabled This property is required. boolean
Flag that indicates if the job is enabled.
errorMessage This property is required. string
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
frequency This property is required. string
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
id This property is required. number
Unique id of the job.
kind This property is required. string
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
lastComplete This property is required. string
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
lastError This property is required. string
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
logpullOptions This property is required. string
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
maxUploadBytes This property is required. number
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
maxUploadIntervalSeconds This property is required. number
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
maxUploadRecords This property is required. number
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
name This property is required. string
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
outputOptions This property is required. GetLogpushJobsResultOutputOptions
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.
dataset This property is required. str
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
destination_conf This property is required. str
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
enabled This property is required. bool
Flag that indicates if the job is enabled.
error_message This property is required. str
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
frequency This property is required. str
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
id This property is required. int
Unique id of the job.
kind This property is required. str
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
last_complete This property is required. str
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
last_error This property is required. str
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
logpull_options This property is required. str
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
max_upload_bytes This property is required. int
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
max_upload_interval_seconds This property is required. int
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
max_upload_records This property is required. int
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
name This property is required. str
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
output_options This property is required. GetLogpushJobsResultOutputOptions
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.
dataset This property is required. String
Name of the dataset. A list of supported datasets can be found on the Developer Docs.
destinationConf This property is required. String
Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
enabled This property is required. Boolean
Flag that indicates if the job is enabled.
errorMessage This property is required. String
If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
frequency This property is required. String
This field is deprecated. Please use max_upload_* parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
id This property is required. Number
Unique id of the job.
kind This property is required. String
The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the http_requests dataset. Available values: "edge".
lastComplete This property is required. String
Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
lastError This property is required. String
Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
logpullOptions This property is required. String
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
maxUploadBytes This property is required. Number
The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MB and 1 GB, or 0 to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs with edge as its kind.
maxUploadIntervalSeconds This property is required. Number
The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0 to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs with edge as its kind.
maxUploadRecords This property is required. Number
The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0 to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs with edge as its kind.
name This property is required. String
Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
outputOptions This property is required. Property Map
The structured replacement for logpull_options. When including this field, the logpull_option field will be ignored.

GetLogpushJobsResultOutputOptions

BatchPrefix This property is required. string
String to be prepended before each batch.
BatchSuffix This property is required. string
String to be appended after each batch.
Cve202144228 This property is required. bool
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
FieldDelimiter This property is required. string
String to join fields. This field be ignored when record_template is set.
FieldNames This property is required. List<string>
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
OutputType This property is required. string
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
RecordDelimiter This property is required. string
String to be inserted in-between the records as separator.
RecordPrefix This property is required. string
String to be prepended before each record.
RecordSuffix This property is required. string
String to be appended after each record.
RecordTemplate This property is required. string
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
SampleRate This property is required. double
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
TimestampFormat This property is required. string
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".
BatchPrefix This property is required. string
String to be prepended before each batch.
BatchSuffix This property is required. string
String to be appended after each batch.
Cve202144228 This property is required. bool
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
FieldDelimiter This property is required. string
String to join fields. This field be ignored when record_template is set.
FieldNames This property is required. []string
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
OutputType This property is required. string
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
RecordDelimiter This property is required. string
String to be inserted in-between the records as separator.
RecordPrefix This property is required. string
String to be prepended before each record.
RecordSuffix This property is required. string
String to be appended after each record.
RecordTemplate This property is required. string
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
SampleRate This property is required. float64
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
TimestampFormat This property is required. string
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".
batchPrefix This property is required. String
String to be prepended before each batch.
batchSuffix This property is required. String
String to be appended after each batch.
cve202144228 This property is required. Boolean
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
fieldDelimiter This property is required. String
String to join fields. This field be ignored when record_template is set.
fieldNames This property is required. List<String>
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
outputType This property is required. String
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
recordDelimiter This property is required. String
String to be inserted in-between the records as separator.
recordPrefix This property is required. String
String to be prepended before each record.
recordSuffix This property is required. String
String to be appended after each record.
recordTemplate This property is required. String
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
sampleRate This property is required. Double
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
timestampFormat This property is required. String
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".
batchPrefix This property is required. string
String to be prepended before each batch.
batchSuffix This property is required. string
String to be appended after each batch.
cve202144228 This property is required. boolean
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
fieldDelimiter This property is required. string
String to join fields. This field be ignored when record_template is set.
fieldNames This property is required. string[]
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
outputType This property is required. string
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
recordDelimiter This property is required. string
String to be inserted in-between the records as separator.
recordPrefix This property is required. string
String to be prepended before each record.
recordSuffix This property is required. string
String to be appended after each record.
recordTemplate This property is required. string
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
sampleRate This property is required. number
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
timestampFormat This property is required. string
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".
batch_prefix This property is required. str
String to be prepended before each batch.
batch_suffix This property is required. str
String to be appended after each batch.
cve202144228 This property is required. bool
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
field_delimiter This property is required. str
String to join fields. This field be ignored when record_template is set.
field_names This property is required. Sequence[str]
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
output_type This property is required. str
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
record_delimiter This property is required. str
String to be inserted in-between the records as separator.
record_prefix This property is required. str
String to be prepended before each record.
record_suffix This property is required. str
String to be appended after each record.
record_template This property is required. str
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
sample_rate This property is required. float
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
timestamp_format This property is required. str
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".
batchPrefix This property is required. String
String to be prepended before each batch.
batchSuffix This property is required. String
String to be appended after each batch.
cve202144228 This property is required. Boolean
If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{.
fieldDelimiter This property is required. String
String to join fields. This field be ignored when record_template is set.
fieldNames This property is required. List<String>
List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
outputType This property is required. String
Specifies the output type, such as ndjson or csv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
recordDelimiter This property is required. String
String to be inserted in-between the records as separator.
recordPrefix This property is required. String
String to be prepended before each record.
recordSuffix This property is required. String
String to be appended after each record.
recordTemplate This property is required. String
String to use as template for each record instead of the default comma-separated list. All fields used in the template must be present in field_names as well, otherwise they will end up as null. Format as a Go text/template without any standard functions, like conditionals, loops, sub-templates, etc.
sampleRate This property is required. Number
Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_interval of the data.
timestampFormat This property is required. String
String to specify the format for timestamps, such as unixnano, unix, or rfc3339. Available values: "unixnano", "unix", "rfc3339".

Package Details

Repository
Cloudflare pulumi/pulumi-cloudflare
License
Apache-2.0
Notes
This Pulumi package is based on the cloudflare Terraform Provider.
Cloudflare v6.0.1 published on Wednesday, Apr 16, 2025 by Pulumi