dataflow

package
v6.67.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 4, 2023 License: Apache-2.0 Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type FlexTemplateJob

type FlexTemplateJob struct {
	pulumi.CustomResourceState

	// List of experiments that should be used by the job. An example value is ["enable_stackdriver_agent_metrics"].
	AdditionalExperiments pulumi.StringArrayOutput `pulumi:"additionalExperiments"`
	// The algorithm to use for autoscaling
	AutoscalingAlgorithm pulumi.StringPtrOutput `pulumi:"autoscalingAlgorithm"`
	// The GCS path to the Dataflow job Flex
	// Template.
	//
	// ***
	ContainerSpecGcsPath pulumi.StringOutput `pulumi:"containerSpecGcsPath"`
	// Indicates if the job should use the streaming engine feature.
	EnableStreamingEngine pulumi.BoolPtrOutput `pulumi:"enableStreamingEngine"`
	// The configuration for VM IPs. Options are "WORKER_IP_PUBLIC" or "WORKER_IP_PRIVATE".
	IpConfiguration pulumi.StringPtrOutput `pulumi:"ipConfiguration"`
	// The unique ID of this job.
	JobId pulumi.StringOutput `pulumi:"jobId"`
	// The name for the Cloud KMS key for the job. Key format is:
	// projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
	KmsKeyName pulumi.StringPtrOutput `pulumi:"kmsKeyName"`
	// User labels to be specified for the job. Keys and values
	// should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions)
	// page. **Note**: This field is marked as deprecated as the API does not currently
	// support adding labels.
	// **NOTE**: Google-provided Dataflow templates often provide default labels
	// that begin with `goog-dataflow-provided`. Unless explicitly set in config, these
	// labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapOutput `pulumi:"labels"`
	// The machine type to use for launching the job. The default is n1-standard-1.
	LauncherMachineType pulumi.StringPtrOutput `pulumi:"launcherMachineType"`
	// The machine type to use for the job.
	MachineType pulumi.StringPtrOutput `pulumi:"machineType"`
	// The maximum number of Google Compute Engine instances to be made available to your pipeline during execution, from 1 to
	// 1000.
	MaxWorkers pulumi.IntPtrOutput `pulumi:"maxWorkers"`
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringOutput `pulumi:"name"`
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrOutput `pulumi:"network"`
	// The initial number of Google Compute Engine instances for the job.
	NumWorkers pulumi.IntPtrOutput `pulumi:"numWorkers"`
	// One of "drain" or "cancel". Specifies behavior of
	// deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrOutput `pulumi:"onDelete"`
	// Key/Value pairs to be passed to the Dataflow job (as
	// used in the template). Additional [pipeline options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-dataflow-pipeline-options)
	// such as `serviceAccount`, `workerMachineType`, etc can be specified here.
	Parameters pulumi.MapOutput `pulumi:"parameters"`
	// The project in which the resource belongs. If it is not
	// provided, the provider project is used.
	Project pulumi.StringOutput `pulumi:"project"`
	// The region in which the created job should run.
	Region pulumi.StringOutput `pulumi:"region"`
	// Docker registry location of container image to use for the 'worker harness. Default is the container for the version of
	// the SDK. Note this field is only valid for portable pipelines.
	SdkContainerImage pulumi.StringPtrOutput `pulumi:"sdkContainerImage"`
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringOutput `pulumi:"serviceAccountEmail"`
	// If true, treat DRAINING and CANCELLING as terminal job states and do not wait for further changes before removing from
	// terraform state and moving on. WARNING: this will lead to job name conflicts if you do not ensure that the job names are
	// different, e.g. by embedding a release ID or by using a random_id.
	SkipWaitOnJobTermination pulumi.BoolPtrOutput `pulumi:"skipWaitOnJobTermination"`
	// The Cloud Storage path to use for staging files. Must be a valid Cloud Storage URL, beginning with gs://.
	StagingLocation pulumi.StringOutput `pulumi:"stagingLocation"`
	// The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)
	State pulumi.StringOutput `pulumi:"state"`
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".
	Subnetwork pulumi.StringPtrOutput `pulumi:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation pulumi.StringOutput `pulumi:"tempLocation"`
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the
	// corresponding name prefixes of the new job.
	TransformNameMapping pulumi.MapOutput `pulumi:"transformNameMapping"`
	// The type of this job, selected from the JobType enum.
	Type pulumi.StringOutput `pulumi:"type"`
}

## Example Usage

```go package main

import (

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataflow.NewFlexTemplateJob(ctx, "bigDataJob", &dataflow.FlexTemplateJobArgs{
			ContainerSpecGcsPath: pulumi.String("gs://my-bucket/templates/template.json"),
			Parameters: pulumi.Map{
				"inputSubscription": pulumi.Any("messages"),
			},
		}, pulumi.Provider(google_beta))
		if err != nil {
			return err
		}
		return nil
	})
}

``` ## Note on "destroy" / "apply"

There are many types of Dataflow jobs. Some Dataflow jobs run constantly, getting new data from (e.g.) a GCS bucket, and outputting data continuously. Some jobs process a set amount of data then terminate. All jobs can fail while running due to programming errors or other issues. In this way, Dataflow jobs are different from most other provider / Google resources.

The Dataflow resource is considered 'existing' while it is in a nonterminal state. If it reaches a terminal state (e.g. 'FAILED', 'COMPLETE', 'CANCELLED'), it will be recreated on the next 'apply'. This is as expected for jobs which run continuously, but may surprise users who use this resource for other kinds of Dataflow jobs.

A Dataflow job which is 'destroyed' may be "cancelled" or "drained". If "cancelled", the job terminates - any data written remains where it is, but no new data will be processed. If "drained", no new data will enter the pipeline, but any data currently in the pipeline will finish being processed. The default is "cancelled", but if a user sets `onDelete` to `"drain"` in the configuration, you may experience a long wait for your `pulumi destroy` to complete.

You can potentially short-circuit the wait by setting `skipWaitOnJobTermination` to `true`, but beware that unless you take active steps to ensure that the job `name` parameter changes between instances, the name will conflict and the launch of the new job will fail. One way to do this is with a randomId resource, for example:

```go package main

import (

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		cfg := config.New(ctx, "")
		bigDataJobSubscriptionId := "projects/myproject/subscriptions/messages"
		if param := cfg.Get("bigDataJobSubscriptionId"); param != "" {
			bigDataJobSubscriptionId = param
		}
		_, err := random.NewRandomId(ctx, "bigDataJobNameSuffix", &random.RandomIdArgs{
			ByteLength: pulumi.Int(4),
			Keepers: pulumi.Map{
				"region":          pulumi.Any(_var.Region),
				"subscription_id": pulumi.String(bigDataJobSubscriptionId),
			},
		})
		if err != nil {
			return err
		}
		_, err = dataflow.NewFlexTemplateJob(ctx, "bigDataJob", &dataflow.FlexTemplateJobArgs{
			Region:                   pulumi.Any(_var.Region),
			ContainerSpecGcsPath:     pulumi.String("gs://my-bucket/templates/template.json"),
			SkipWaitOnJobTermination: pulumi.Bool(true),
			Parameters: pulumi.Map{
				"inputSubscription": pulumi.String(bigDataJobSubscriptionId),
			},
		}, pulumi.Provider(google_beta))
		if err != nil {
			return err
		}
		return nil
	})
}

```

## Import

This resource does not support import.

func GetFlexTemplateJob

func GetFlexTemplateJob(ctx *pulumi.Context,
	name string, id pulumi.IDInput, state *FlexTemplateJobState, opts ...pulumi.ResourceOption) (*FlexTemplateJob, error)

GetFlexTemplateJob gets an existing FlexTemplateJob resource's state with the given name, ID, and optional state properties that are used to uniquely qualify the lookup (nil if not required).

func NewFlexTemplateJob

func NewFlexTemplateJob(ctx *pulumi.Context,
	name string, args *FlexTemplateJobArgs, opts ...pulumi.ResourceOption) (*FlexTemplateJob, error)

NewFlexTemplateJob registers a new resource with the given unique name, arguments, and options.

func (*FlexTemplateJob) ElementType

func (*FlexTemplateJob) ElementType() reflect.Type

func (*FlexTemplateJob) ToFlexTemplateJobOutput

func (i *FlexTemplateJob) ToFlexTemplateJobOutput() FlexTemplateJobOutput

func (*FlexTemplateJob) ToFlexTemplateJobOutputWithContext

func (i *FlexTemplateJob) ToFlexTemplateJobOutputWithContext(ctx context.Context) FlexTemplateJobOutput

func (*FlexTemplateJob) ToOutput added in v6.65.1

type FlexTemplateJobArgs

type FlexTemplateJobArgs struct {
	// List of experiments that should be used by the job. An example value is ["enable_stackdriver_agent_metrics"].
	AdditionalExperiments pulumi.StringArrayInput
	// The algorithm to use for autoscaling
	AutoscalingAlgorithm pulumi.StringPtrInput
	// The GCS path to the Dataflow job Flex
	// Template.
	//
	// ***
	ContainerSpecGcsPath pulumi.StringInput
	// Indicates if the job should use the streaming engine feature.
	EnableStreamingEngine pulumi.BoolPtrInput
	// The configuration for VM IPs. Options are "WORKER_IP_PUBLIC" or "WORKER_IP_PRIVATE".
	IpConfiguration pulumi.StringPtrInput
	// The name for the Cloud KMS key for the job. Key format is:
	// projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
	KmsKeyName pulumi.StringPtrInput
	// User labels to be specified for the job. Keys and values
	// should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions)
	// page. **Note**: This field is marked as deprecated as the API does not currently
	// support adding labels.
	// **NOTE**: Google-provided Dataflow templates often provide default labels
	// that begin with `goog-dataflow-provided`. Unless explicitly set in config, these
	// labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapInput
	// The machine type to use for launching the job. The default is n1-standard-1.
	LauncherMachineType pulumi.StringPtrInput
	// The machine type to use for the job.
	MachineType pulumi.StringPtrInput
	// The maximum number of Google Compute Engine instances to be made available to your pipeline during execution, from 1 to
	// 1000.
	MaxWorkers pulumi.IntPtrInput
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringPtrInput
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrInput
	// The initial number of Google Compute Engine instances for the job.
	NumWorkers pulumi.IntPtrInput
	// One of "drain" or "cancel". Specifies behavior of
	// deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrInput
	// Key/Value pairs to be passed to the Dataflow job (as
	// used in the template). Additional [pipeline options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-dataflow-pipeline-options)
	// such as `serviceAccount`, `workerMachineType`, etc can be specified here.
	Parameters pulumi.MapInput
	// The project in which the resource belongs. If it is not
	// provided, the provider project is used.
	Project pulumi.StringPtrInput
	// The region in which the created job should run.
	Region pulumi.StringPtrInput
	// Docker registry location of container image to use for the 'worker harness. Default is the container for the version of
	// the SDK. Note this field is only valid for portable pipelines.
	SdkContainerImage pulumi.StringPtrInput
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringPtrInput
	// If true, treat DRAINING and CANCELLING as terminal job states and do not wait for further changes before removing from
	// terraform state and moving on. WARNING: this will lead to job name conflicts if you do not ensure that the job names are
	// different, e.g. by embedding a release ID or by using a random_id.
	SkipWaitOnJobTermination pulumi.BoolPtrInput
	// The Cloud Storage path to use for staging files. Must be a valid Cloud Storage URL, beginning with gs://.
	StagingLocation pulumi.StringPtrInput
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".
	Subnetwork pulumi.StringPtrInput
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation pulumi.StringPtrInput
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the
	// corresponding name prefixes of the new job.
	TransformNameMapping pulumi.MapInput
}

The set of arguments for constructing a FlexTemplateJob resource.

func (FlexTemplateJobArgs) ElementType

func (FlexTemplateJobArgs) ElementType() reflect.Type

type FlexTemplateJobArray

type FlexTemplateJobArray []FlexTemplateJobInput

func (FlexTemplateJobArray) ElementType

func (FlexTemplateJobArray) ElementType() reflect.Type

func (FlexTemplateJobArray) ToFlexTemplateJobArrayOutput

func (i FlexTemplateJobArray) ToFlexTemplateJobArrayOutput() FlexTemplateJobArrayOutput

func (FlexTemplateJobArray) ToFlexTemplateJobArrayOutputWithContext

func (i FlexTemplateJobArray) ToFlexTemplateJobArrayOutputWithContext(ctx context.Context) FlexTemplateJobArrayOutput

func (FlexTemplateJobArray) ToOutput added in v6.65.1

type FlexTemplateJobArrayInput

type FlexTemplateJobArrayInput interface {
	pulumi.Input

	ToFlexTemplateJobArrayOutput() FlexTemplateJobArrayOutput
	ToFlexTemplateJobArrayOutputWithContext(context.Context) FlexTemplateJobArrayOutput
}

FlexTemplateJobArrayInput is an input type that accepts FlexTemplateJobArray and FlexTemplateJobArrayOutput values. You can construct a concrete instance of `FlexTemplateJobArrayInput` via:

FlexTemplateJobArray{ FlexTemplateJobArgs{...} }

type FlexTemplateJobArrayOutput

type FlexTemplateJobArrayOutput struct{ *pulumi.OutputState }

func (FlexTemplateJobArrayOutput) ElementType

func (FlexTemplateJobArrayOutput) ElementType() reflect.Type

func (FlexTemplateJobArrayOutput) Index

func (FlexTemplateJobArrayOutput) ToFlexTemplateJobArrayOutput

func (o FlexTemplateJobArrayOutput) ToFlexTemplateJobArrayOutput() FlexTemplateJobArrayOutput

func (FlexTemplateJobArrayOutput) ToFlexTemplateJobArrayOutputWithContext

func (o FlexTemplateJobArrayOutput) ToFlexTemplateJobArrayOutputWithContext(ctx context.Context) FlexTemplateJobArrayOutput

func (FlexTemplateJobArrayOutput) ToOutput added in v6.65.1

type FlexTemplateJobInput

type FlexTemplateJobInput interface {
	pulumi.Input

	ToFlexTemplateJobOutput() FlexTemplateJobOutput
	ToFlexTemplateJobOutputWithContext(ctx context.Context) FlexTemplateJobOutput
}

type FlexTemplateJobMap

type FlexTemplateJobMap map[string]FlexTemplateJobInput

func (FlexTemplateJobMap) ElementType

func (FlexTemplateJobMap) ElementType() reflect.Type

func (FlexTemplateJobMap) ToFlexTemplateJobMapOutput

func (i FlexTemplateJobMap) ToFlexTemplateJobMapOutput() FlexTemplateJobMapOutput

func (FlexTemplateJobMap) ToFlexTemplateJobMapOutputWithContext

func (i FlexTemplateJobMap) ToFlexTemplateJobMapOutputWithContext(ctx context.Context) FlexTemplateJobMapOutput

func (FlexTemplateJobMap) ToOutput added in v6.65.1

type FlexTemplateJobMapInput

type FlexTemplateJobMapInput interface {
	pulumi.Input

	ToFlexTemplateJobMapOutput() FlexTemplateJobMapOutput
	ToFlexTemplateJobMapOutputWithContext(context.Context) FlexTemplateJobMapOutput
}

FlexTemplateJobMapInput is an input type that accepts FlexTemplateJobMap and FlexTemplateJobMapOutput values. You can construct a concrete instance of `FlexTemplateJobMapInput` via:

FlexTemplateJobMap{ "key": FlexTemplateJobArgs{...} }

type FlexTemplateJobMapOutput

type FlexTemplateJobMapOutput struct{ *pulumi.OutputState }

func (FlexTemplateJobMapOutput) ElementType

func (FlexTemplateJobMapOutput) ElementType() reflect.Type

func (FlexTemplateJobMapOutput) MapIndex

func (FlexTemplateJobMapOutput) ToFlexTemplateJobMapOutput

func (o FlexTemplateJobMapOutput) ToFlexTemplateJobMapOutput() FlexTemplateJobMapOutput

func (FlexTemplateJobMapOutput) ToFlexTemplateJobMapOutputWithContext

func (o FlexTemplateJobMapOutput) ToFlexTemplateJobMapOutputWithContext(ctx context.Context) FlexTemplateJobMapOutput

func (FlexTemplateJobMapOutput) ToOutput added in v6.65.1

type FlexTemplateJobOutput

type FlexTemplateJobOutput struct{ *pulumi.OutputState }

func (FlexTemplateJobOutput) AdditionalExperiments added in v6.57.0

func (o FlexTemplateJobOutput) AdditionalExperiments() pulumi.StringArrayOutput

List of experiments that should be used by the job. An example value is ["enable_stackdriver_agent_metrics"].

func (FlexTemplateJobOutput) AutoscalingAlgorithm added in v6.57.0

func (o FlexTemplateJobOutput) AutoscalingAlgorithm() pulumi.StringPtrOutput

The algorithm to use for autoscaling

func (FlexTemplateJobOutput) ContainerSpecGcsPath added in v6.23.0

func (o FlexTemplateJobOutput) ContainerSpecGcsPath() pulumi.StringOutput

The GCS path to the Dataflow job Flex Template.

***

func (FlexTemplateJobOutput) ElementType

func (FlexTemplateJobOutput) ElementType() reflect.Type

func (FlexTemplateJobOutput) EnableStreamingEngine added in v6.57.0

func (o FlexTemplateJobOutput) EnableStreamingEngine() pulumi.BoolPtrOutput

Indicates if the job should use the streaming engine feature.

func (FlexTemplateJobOutput) IpConfiguration added in v6.57.0

func (o FlexTemplateJobOutput) IpConfiguration() pulumi.StringPtrOutput

The configuration for VM IPs. Options are "WORKER_IP_PUBLIC" or "WORKER_IP_PRIVATE".

func (FlexTemplateJobOutput) JobId added in v6.23.0

The unique ID of this job.

func (FlexTemplateJobOutput) KmsKeyName added in v6.57.0

The name for the Cloud KMS key for the job. Key format is: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY

func (FlexTemplateJobOutput) Labels added in v6.23.0

User labels to be specified for the job. Keys and values should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page. **Note**: This field is marked as deprecated as the API does not currently support adding labels. **NOTE**: Google-provided Dataflow templates often provide default labels that begin with `goog-dataflow-provided`. Unless explicitly set in config, these labels will be ignored to prevent diffs on re-apply.

func (FlexTemplateJobOutput) LauncherMachineType added in v6.57.0

func (o FlexTemplateJobOutput) LauncherMachineType() pulumi.StringPtrOutput

The machine type to use for launching the job. The default is n1-standard-1.

func (FlexTemplateJobOutput) MachineType added in v6.57.0

The machine type to use for the job.

func (FlexTemplateJobOutput) MaxWorkers added in v6.57.0

func (o FlexTemplateJobOutput) MaxWorkers() pulumi.IntPtrOutput

The maximum number of Google Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.

func (FlexTemplateJobOutput) Name added in v6.23.0

A unique name for the resource, required by Dataflow.

func (FlexTemplateJobOutput) Network added in v6.57.0

The network to which VMs will be assigned. If it is not provided, "default" will be used.

func (FlexTemplateJobOutput) NumWorkers added in v6.57.0

func (o FlexTemplateJobOutput) NumWorkers() pulumi.IntPtrOutput

The initial number of Google Compute Engine instances for the job.

func (FlexTemplateJobOutput) OnDelete added in v6.23.0

One of "drain" or "cancel". Specifies behavior of deletion during `pulumi destroy`. See above note.

func (FlexTemplateJobOutput) Parameters added in v6.23.0

func (o FlexTemplateJobOutput) Parameters() pulumi.MapOutput

Key/Value pairs to be passed to the Dataflow job (as used in the template). Additional [pipeline options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-dataflow-pipeline-options) such as `serviceAccount`, `workerMachineType`, etc can be specified here.

func (FlexTemplateJobOutput) Project added in v6.23.0

The project in which the resource belongs. If it is not provided, the provider project is used.

func (FlexTemplateJobOutput) Region added in v6.23.0

The region in which the created job should run.

func (FlexTemplateJobOutput) SdkContainerImage added in v6.57.0

func (o FlexTemplateJobOutput) SdkContainerImage() pulumi.StringPtrOutput

Docker registry location of container image to use for the 'worker harness. Default is the container for the version of the SDK. Note this field is only valid for portable pipelines.

func (FlexTemplateJobOutput) ServiceAccountEmail added in v6.57.0

func (o FlexTemplateJobOutput) ServiceAccountEmail() pulumi.StringOutput

The Service Account email used to create the job.

func (FlexTemplateJobOutput) SkipWaitOnJobTermination added in v6.23.0

func (o FlexTemplateJobOutput) SkipWaitOnJobTermination() pulumi.BoolPtrOutput

If true, treat DRAINING and CANCELLING as terminal job states and do not wait for further changes before removing from terraform state and moving on. WARNING: this will lead to job name conflicts if you do not ensure that the job names are different, e.g. by embedding a release ID or by using a random_id.

func (FlexTemplateJobOutput) StagingLocation added in v6.57.0

func (o FlexTemplateJobOutput) StagingLocation() pulumi.StringOutput

The Cloud Storage path to use for staging files. Must be a valid Cloud Storage URL, beginning with gs://.

func (FlexTemplateJobOutput) State added in v6.23.0

The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)

func (FlexTemplateJobOutput) Subnetwork added in v6.57.0

The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".

func (FlexTemplateJobOutput) TempLocation added in v6.57.0

func (o FlexTemplateJobOutput) TempLocation() pulumi.StringOutput

The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.

func (FlexTemplateJobOutput) ToFlexTemplateJobOutput

func (o FlexTemplateJobOutput) ToFlexTemplateJobOutput() FlexTemplateJobOutput

func (FlexTemplateJobOutput) ToFlexTemplateJobOutputWithContext

func (o FlexTemplateJobOutput) ToFlexTemplateJobOutputWithContext(ctx context.Context) FlexTemplateJobOutput

func (FlexTemplateJobOutput) ToOutput added in v6.65.1

func (FlexTemplateJobOutput) TransformNameMapping added in v6.57.0

func (o FlexTemplateJobOutput) TransformNameMapping() pulumi.MapOutput

Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job.

func (FlexTemplateJobOutput) Type added in v6.57.0

The type of this job, selected from the JobType enum.

type FlexTemplateJobState

type FlexTemplateJobState struct {
	// List of experiments that should be used by the job. An example value is ["enable_stackdriver_agent_metrics"].
	AdditionalExperiments pulumi.StringArrayInput
	// The algorithm to use for autoscaling
	AutoscalingAlgorithm pulumi.StringPtrInput
	// The GCS path to the Dataflow job Flex
	// Template.
	//
	// ***
	ContainerSpecGcsPath pulumi.StringPtrInput
	// Indicates if the job should use the streaming engine feature.
	EnableStreamingEngine pulumi.BoolPtrInput
	// The configuration for VM IPs. Options are "WORKER_IP_PUBLIC" or "WORKER_IP_PRIVATE".
	IpConfiguration pulumi.StringPtrInput
	// The unique ID of this job.
	JobId pulumi.StringPtrInput
	// The name for the Cloud KMS key for the job. Key format is:
	// projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
	KmsKeyName pulumi.StringPtrInput
	// User labels to be specified for the job. Keys and values
	// should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions)
	// page. **Note**: This field is marked as deprecated as the API does not currently
	// support adding labels.
	// **NOTE**: Google-provided Dataflow templates often provide default labels
	// that begin with `goog-dataflow-provided`. Unless explicitly set in config, these
	// labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapInput
	// The machine type to use for launching the job. The default is n1-standard-1.
	LauncherMachineType pulumi.StringPtrInput
	// The machine type to use for the job.
	MachineType pulumi.StringPtrInput
	// The maximum number of Google Compute Engine instances to be made available to your pipeline during execution, from 1 to
	// 1000.
	MaxWorkers pulumi.IntPtrInput
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringPtrInput
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrInput
	// The initial number of Google Compute Engine instances for the job.
	NumWorkers pulumi.IntPtrInput
	// One of "drain" or "cancel". Specifies behavior of
	// deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrInput
	// Key/Value pairs to be passed to the Dataflow job (as
	// used in the template). Additional [pipeline options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-dataflow-pipeline-options)
	// such as `serviceAccount`, `workerMachineType`, etc can be specified here.
	Parameters pulumi.MapInput
	// The project in which the resource belongs. If it is not
	// provided, the provider project is used.
	Project pulumi.StringPtrInput
	// The region in which the created job should run.
	Region pulumi.StringPtrInput
	// Docker registry location of container image to use for the 'worker harness. Default is the container for the version of
	// the SDK. Note this field is only valid for portable pipelines.
	SdkContainerImage pulumi.StringPtrInput
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringPtrInput
	// If true, treat DRAINING and CANCELLING as terminal job states and do not wait for further changes before removing from
	// terraform state and moving on. WARNING: this will lead to job name conflicts if you do not ensure that the job names are
	// different, e.g. by embedding a release ID or by using a random_id.
	SkipWaitOnJobTermination pulumi.BoolPtrInput
	// The Cloud Storage path to use for staging files. Must be a valid Cloud Storage URL, beginning with gs://.
	StagingLocation pulumi.StringPtrInput
	// The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)
	State pulumi.StringPtrInput
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".
	Subnetwork pulumi.StringPtrInput
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation pulumi.StringPtrInput
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the
	// corresponding name prefixes of the new job.
	TransformNameMapping pulumi.MapInput
	// The type of this job, selected from the JobType enum.
	Type pulumi.StringPtrInput
}

func (FlexTemplateJobState) ElementType

func (FlexTemplateJobState) ElementType() reflect.Type

type Job

type Job struct {
	pulumi.CustomResourceState

	// List of experiments that should be used by the job. An example value is `["enableStackdriverAgentMetrics"]`.
	AdditionalExperiments pulumi.StringArrayOutput `pulumi:"additionalExperiments"`
	// Enable/disable the use of [Streaming Engine](https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#streaming-engine) for the job. Note that Streaming Engine is enabled by default for pipelines developed against the Beam SDK for Python v2.21.0 or later when using Python 3.
	EnableStreamingEngine pulumi.BoolPtrOutput `pulumi:"enableStreamingEngine"`
	// The configuration for VM IPs.  Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PRIVATE"`.
	IpConfiguration pulumi.StringPtrOutput `pulumi:"ipConfiguration"`
	// The unique ID of this job.
	JobId pulumi.StringOutput `pulumi:"jobId"`
	// The name for the Cloud KMS key for the job. Key format is: `projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY`
	KmsKeyName pulumi.StringPtrOutput `pulumi:"kmsKeyName"`
	// User labels to be specified for the job. Keys and values should follow the restrictions
	// specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page.
	// **NOTE**: Google-provided Dataflow templates often provide default labels that begin with `goog-dataflow-provided`.
	// Unless explicitly set in config, these labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapOutput `pulumi:"labels"`
	// The machine type to use for the job.
	MachineType pulumi.StringPtrOutput `pulumi:"machineType"`
	// The number of workers permitted to work on the job.  More workers may improve processing speed at additional cost.
	MaxWorkers pulumi.IntPtrOutput `pulumi:"maxWorkers"`
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringOutput `pulumi:"name"`
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrOutput `pulumi:"network"`
	// One of "drain" or "cancel".  Specifies behavior of deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrOutput `pulumi:"onDelete"`
	// Key/Value pairs to be passed to the Dataflow job (as used in the template).
	Parameters pulumi.MapOutput `pulumi:"parameters"`
	// The project in which the resource belongs. If it is not provided, the provider project is used.
	Project pulumi.StringOutput `pulumi:"project"`
	// The region in which the created job should run.
	Region pulumi.StringPtrOutput `pulumi:"region"`
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringPtrOutput `pulumi:"serviceAccountEmail"`
	// If set to `true`, Pulumi will treat `DRAINING` and `CANCELLING` as terminal states when deleting the resource, and will remove the resource from Pulumi state and move on.  See above note.
	SkipWaitOnJobTermination pulumi.BoolPtrOutput `pulumi:"skipWaitOnJobTermination"`
	// The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)
	State pulumi.StringOutput `pulumi:"state"`
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK". If the [subnetwork is located in a Shared VPC network](https://cloud.google.com/dataflow/docs/guides/specifying-networks#shared), you must use the complete URL. For example `"googleapis.com/compute/v1/projects/PROJECT_ID/regions/REGION/subnetworks/SUBNET_NAME"`
	Subnetwork pulumi.StringPtrOutput `pulumi:"subnetwork"`
	// A writeable location on GCS for the Dataflow job to dump its temporary data.
	//
	// ***
	TempGcsLocation pulumi.StringOutput `pulumi:"tempGcsLocation"`
	// The GCS path to the Dataflow job template.
	TemplateGcsPath pulumi.StringOutput `pulumi:"templateGcsPath"`
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job. This field is not used outside of update.
	TransformNameMapping pulumi.MapOutput `pulumi:"transformNameMapping"`
	// The type of this job, selected from the [JobType enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobType)
	Type pulumi.StringOutput `pulumi:"type"`
	// The zone in which the created job should run. If it is not provided, the provider zone is used.
	Zone pulumi.StringPtrOutput `pulumi:"zone"`
}

Creates a job on Dataflow, which is an implementation of Apache Beam running on Google Compute Engine. For more information see the official documentation for [Beam](https://beam.apache.org) and [Dataflow](https://cloud.google.com/dataflow/).

## Example Usage

```go package main

import (

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataflow.NewJob(ctx, "bigDataJob", &dataflow.JobArgs{
			Parameters: pulumi.Map{
				"baz": pulumi.Any("qux"),
				"foo": pulumi.Any("bar"),
			},
			TempGcsLocation: pulumi.String("gs://my-bucket/tmp_dir"),
			TemplateGcsPath: pulumi.String("gs://my-bucket/templates/template_file"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}

``` ### Streaming Job

```go package main

import (

"fmt"

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		topic, err := pubsub.NewTopic(ctx, "topic", nil)
		if err != nil {
			return err
		}
		bucket1, err := storage.NewBucket(ctx, "bucket1", &storage.BucketArgs{
			Location:     pulumi.String("US"),
			ForceDestroy: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = storage.NewBucket(ctx, "bucket2", &storage.BucketArgs{
			Location:     pulumi.String("US"),
			ForceDestroy: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = dataflow.NewJob(ctx, "pubsubStream", &dataflow.JobArgs{
			TemplateGcsPath:       pulumi.String("gs://my-bucket/templates/template_file"),
			TempGcsLocation:       pulumi.String("gs://my-bucket/tmp_dir"),
			EnableStreamingEngine: pulumi.Bool(true),
			Parameters: pulumi.Map{
				"inputFilePattern": bucket1.Url.ApplyT(func(url string) (string, error) {
					return fmt.Sprintf("%v/*.json", url), nil
				}).(pulumi.StringOutput),
				"outputTopic": topic.ID(),
			},
			TransformNameMapping: pulumi.Map{
				"name": pulumi.Any("test_job"),
				"env":  pulumi.Any("test"),
			},
			OnDelete: pulumi.String("cancel"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}

``` ## Note on "destroy" / "apply"

There are many types of Dataflow jobs. Some Dataflow jobs run constantly, getting new data from (e.g.) a GCS bucket, and outputting data continuously. Some jobs process a set amount of data then terminate. All jobs can fail while running due to programming errors or other issues. In this way, Dataflow jobs are different from most other Google resources.

The Dataflow resource is considered 'existing' while it is in a nonterminal state. If it reaches a terminal state (e.g. 'FAILED', 'COMPLETE', 'CANCELLED'), it will be recreated on the next 'apply'. This is as expected for jobs which run continuously, but may surprise users who use this resource for other kinds of Dataflow jobs.

A Dataflow job which is 'destroyed' may be "cancelled" or "drained". If "cancelled", the job terminates - any data written remains where it is, but no new data will be processed. If "drained", no new data will enter the pipeline, but any data currently in the pipeline will finish being processed. The default is "drain". When `onDelete` is set to `"drain"` in the configuration, you may experience a long wait for your `pulumi destroy` to complete.

You can potentially short-circuit the wait by setting `skipWaitOnJobTermination` to `true`, but beware that unless you take active steps to ensure that the job `name` parameter changes between instances, the name will conflict and the launch of the new job will fail. One way to do this is with a randomId resource, for example:

```go package main

import (

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		cfg := config.New(ctx, "")
		bigDataJobSubscriptionId := "projects/myproject/subscriptions/messages"
		if param := cfg.Get("bigDataJobSubscriptionId"); param != "" {
			bigDataJobSubscriptionId = param
		}
		_, err := random.NewRandomId(ctx, "bigDataJobNameSuffix", &random.RandomIdArgs{
			ByteLength: pulumi.Int(4),
			Keepers: pulumi.Map{
				"region":          pulumi.Any(_var.Region),
				"subscription_id": pulumi.String(bigDataJobSubscriptionId),
			},
		})
		if err != nil {
			return err
		}
		_, err = dataflow.NewFlexTemplateJob(ctx, "bigDataJob", &dataflow.FlexTemplateJobArgs{
			Region:                   pulumi.Any(_var.Region),
			ContainerSpecGcsPath:     pulumi.String("gs://my-bucket/templates/template.json"),
			SkipWaitOnJobTermination: pulumi.Bool(true),
			Parameters: pulumi.Map{
				"inputSubscription": pulumi.String(bigDataJobSubscriptionId),
			},
		}, pulumi.Provider(google_beta))
		if err != nil {
			return err
		}
		return nil
	})
}

```

## Import

Dataflow jobs can be imported using the job `id` e.g.

```sh

$ pulumi import gcp:dataflow/job:Job example 2022-07-31_06_25_42-11926927532632678660

```

func GetJob

func GetJob(ctx *pulumi.Context,
	name string, id pulumi.IDInput, state *JobState, opts ...pulumi.ResourceOption) (*Job, error)

GetJob gets an existing Job resource's state with the given name, ID, and optional state properties that are used to uniquely qualify the lookup (nil if not required).

func NewJob

func NewJob(ctx *pulumi.Context,
	name string, args *JobArgs, opts ...pulumi.ResourceOption) (*Job, error)

NewJob registers a new resource with the given unique name, arguments, and options.

func (*Job) ElementType

func (*Job) ElementType() reflect.Type

func (*Job) ToJobOutput

func (i *Job) ToJobOutput() JobOutput

func (*Job) ToJobOutputWithContext

func (i *Job) ToJobOutputWithContext(ctx context.Context) JobOutput

func (*Job) ToOutput added in v6.65.1

func (i *Job) ToOutput(ctx context.Context) pulumix.Output[*Job]

type JobArgs

type JobArgs struct {
	// List of experiments that should be used by the job. An example value is `["enableStackdriverAgentMetrics"]`.
	AdditionalExperiments pulumi.StringArrayInput
	// Enable/disable the use of [Streaming Engine](https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#streaming-engine) for the job. Note that Streaming Engine is enabled by default for pipelines developed against the Beam SDK for Python v2.21.0 or later when using Python 3.
	EnableStreamingEngine pulumi.BoolPtrInput
	// The configuration for VM IPs.  Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PRIVATE"`.
	IpConfiguration pulumi.StringPtrInput
	// The name for the Cloud KMS key for the job. Key format is: `projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY`
	KmsKeyName pulumi.StringPtrInput
	// User labels to be specified for the job. Keys and values should follow the restrictions
	// specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page.
	// **NOTE**: Google-provided Dataflow templates often provide default labels that begin with `goog-dataflow-provided`.
	// Unless explicitly set in config, these labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapInput
	// The machine type to use for the job.
	MachineType pulumi.StringPtrInput
	// The number of workers permitted to work on the job.  More workers may improve processing speed at additional cost.
	MaxWorkers pulumi.IntPtrInput
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringPtrInput
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrInput
	// One of "drain" or "cancel".  Specifies behavior of deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrInput
	// Key/Value pairs to be passed to the Dataflow job (as used in the template).
	Parameters pulumi.MapInput
	// The project in which the resource belongs. If it is not provided, the provider project is used.
	Project pulumi.StringPtrInput
	// The region in which the created job should run.
	Region pulumi.StringPtrInput
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringPtrInput
	// If set to `true`, Pulumi will treat `DRAINING` and `CANCELLING` as terminal states when deleting the resource, and will remove the resource from Pulumi state and move on.  See above note.
	SkipWaitOnJobTermination pulumi.BoolPtrInput
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK". If the [subnetwork is located in a Shared VPC network](https://cloud.google.com/dataflow/docs/guides/specifying-networks#shared), you must use the complete URL. For example `"googleapis.com/compute/v1/projects/PROJECT_ID/regions/REGION/subnetworks/SUBNET_NAME"`
	Subnetwork pulumi.StringPtrInput
	// A writeable location on GCS for the Dataflow job to dump its temporary data.
	//
	// ***
	TempGcsLocation pulumi.StringInput
	// The GCS path to the Dataflow job template.
	TemplateGcsPath pulumi.StringInput
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job. This field is not used outside of update.
	TransformNameMapping pulumi.MapInput
	// The zone in which the created job should run. If it is not provided, the provider zone is used.
	Zone pulumi.StringPtrInput
}

The set of arguments for constructing a Job resource.

func (JobArgs) ElementType

func (JobArgs) ElementType() reflect.Type

type JobArray

type JobArray []JobInput

func (JobArray) ElementType

func (JobArray) ElementType() reflect.Type

func (JobArray) ToJobArrayOutput

func (i JobArray) ToJobArrayOutput() JobArrayOutput

func (JobArray) ToJobArrayOutputWithContext

func (i JobArray) ToJobArrayOutputWithContext(ctx context.Context) JobArrayOutput

func (JobArray) ToOutput added in v6.65.1

func (i JobArray) ToOutput(ctx context.Context) pulumix.Output[[]*Job]

type JobArrayInput

type JobArrayInput interface {
	pulumi.Input

	ToJobArrayOutput() JobArrayOutput
	ToJobArrayOutputWithContext(context.Context) JobArrayOutput
}

JobArrayInput is an input type that accepts JobArray and JobArrayOutput values. You can construct a concrete instance of `JobArrayInput` via:

JobArray{ JobArgs{...} }

type JobArrayOutput

type JobArrayOutput struct{ *pulumi.OutputState }

func (JobArrayOutput) ElementType

func (JobArrayOutput) ElementType() reflect.Type

func (JobArrayOutput) Index

func (JobArrayOutput) ToJobArrayOutput

func (o JobArrayOutput) ToJobArrayOutput() JobArrayOutput

func (JobArrayOutput) ToJobArrayOutputWithContext

func (o JobArrayOutput) ToJobArrayOutputWithContext(ctx context.Context) JobArrayOutput

func (JobArrayOutput) ToOutput added in v6.65.1

func (o JobArrayOutput) ToOutput(ctx context.Context) pulumix.Output[[]*Job]

type JobInput

type JobInput interface {
	pulumi.Input

	ToJobOutput() JobOutput
	ToJobOutputWithContext(ctx context.Context) JobOutput
}

type JobMap

type JobMap map[string]JobInput

func (JobMap) ElementType

func (JobMap) ElementType() reflect.Type

func (JobMap) ToJobMapOutput

func (i JobMap) ToJobMapOutput() JobMapOutput

func (JobMap) ToJobMapOutputWithContext

func (i JobMap) ToJobMapOutputWithContext(ctx context.Context) JobMapOutput

func (JobMap) ToOutput added in v6.65.1

func (i JobMap) ToOutput(ctx context.Context) pulumix.Output[map[string]*Job]

type JobMapInput

type JobMapInput interface {
	pulumi.Input

	ToJobMapOutput() JobMapOutput
	ToJobMapOutputWithContext(context.Context) JobMapOutput
}

JobMapInput is an input type that accepts JobMap and JobMapOutput values. You can construct a concrete instance of `JobMapInput` via:

JobMap{ "key": JobArgs{...} }

type JobMapOutput

type JobMapOutput struct{ *pulumi.OutputState }

func (JobMapOutput) ElementType

func (JobMapOutput) ElementType() reflect.Type

func (JobMapOutput) MapIndex

func (o JobMapOutput) MapIndex(k pulumi.StringInput) JobOutput

func (JobMapOutput) ToJobMapOutput

func (o JobMapOutput) ToJobMapOutput() JobMapOutput

func (JobMapOutput) ToJobMapOutputWithContext

func (o JobMapOutput) ToJobMapOutputWithContext(ctx context.Context) JobMapOutput

func (JobMapOutput) ToOutput added in v6.65.1

func (o JobMapOutput) ToOutput(ctx context.Context) pulumix.Output[map[string]*Job]

type JobOutput

type JobOutput struct{ *pulumi.OutputState }

func (JobOutput) AdditionalExperiments added in v6.23.0

func (o JobOutput) AdditionalExperiments() pulumi.StringArrayOutput

List of experiments that should be used by the job. An example value is `["enableStackdriverAgentMetrics"]`.

func (JobOutput) ElementType

func (JobOutput) ElementType() reflect.Type

func (JobOutput) EnableStreamingEngine added in v6.23.0

func (o JobOutput) EnableStreamingEngine() pulumi.BoolPtrOutput

Enable/disable the use of [Streaming Engine](https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#streaming-engine) for the job. Note that Streaming Engine is enabled by default for pipelines developed against the Beam SDK for Python v2.21.0 or later when using Python 3.

func (JobOutput) IpConfiguration added in v6.23.0

func (o JobOutput) IpConfiguration() pulumi.StringPtrOutput

The configuration for VM IPs. Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PRIVATE"`.

func (JobOutput) JobId added in v6.23.0

func (o JobOutput) JobId() pulumi.StringOutput

The unique ID of this job.

func (JobOutput) KmsKeyName added in v6.23.0

func (o JobOutput) KmsKeyName() pulumi.StringPtrOutput

The name for the Cloud KMS key for the job. Key format is: `projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY`

func (JobOutput) Labels added in v6.23.0

func (o JobOutput) Labels() pulumi.MapOutput

User labels to be specified for the job. Keys and values should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page. **NOTE**: Google-provided Dataflow templates often provide default labels that begin with `goog-dataflow-provided`. Unless explicitly set in config, these labels will be ignored to prevent diffs on re-apply.

func (JobOutput) MachineType added in v6.23.0

func (o JobOutput) MachineType() pulumi.StringPtrOutput

The machine type to use for the job.

func (JobOutput) MaxWorkers added in v6.23.0

func (o JobOutput) MaxWorkers() pulumi.IntPtrOutput

The number of workers permitted to work on the job. More workers may improve processing speed at additional cost.

func (JobOutput) Name added in v6.23.0

func (o JobOutput) Name() pulumi.StringOutput

A unique name for the resource, required by Dataflow.

func (JobOutput) Network added in v6.23.0

func (o JobOutput) Network() pulumi.StringPtrOutput

The network to which VMs will be assigned. If it is not provided, "default" will be used.

func (JobOutput) OnDelete added in v6.23.0

func (o JobOutput) OnDelete() pulumi.StringPtrOutput

One of "drain" or "cancel". Specifies behavior of deletion during `pulumi destroy`. See above note.

func (JobOutput) Parameters added in v6.23.0

func (o JobOutput) Parameters() pulumi.MapOutput

Key/Value pairs to be passed to the Dataflow job (as used in the template).

func (JobOutput) Project added in v6.23.0

func (o JobOutput) Project() pulumi.StringOutput

The project in which the resource belongs. If it is not provided, the provider project is used.

func (JobOutput) Region added in v6.23.0

func (o JobOutput) Region() pulumi.StringPtrOutput

The region in which the created job should run.

func (JobOutput) ServiceAccountEmail added in v6.23.0

func (o JobOutput) ServiceAccountEmail() pulumi.StringPtrOutput

The Service Account email used to create the job.

func (JobOutput) SkipWaitOnJobTermination added in v6.23.0

func (o JobOutput) SkipWaitOnJobTermination() pulumi.BoolPtrOutput

If set to `true`, Pulumi will treat `DRAINING` and `CANCELLING` as terminal states when deleting the resource, and will remove the resource from Pulumi state and move on. See above note.

func (JobOutput) State added in v6.23.0

func (o JobOutput) State() pulumi.StringOutput

The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)

func (JobOutput) Subnetwork added in v6.23.0

func (o JobOutput) Subnetwork() pulumi.StringPtrOutput

The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK". If the [subnetwork is located in a Shared VPC network](https://cloud.google.com/dataflow/docs/guides/specifying-networks#shared), you must use the complete URL. For example `"googleapis.com/compute/v1/projects/PROJECT_ID/regions/REGION/subnetworks/SUBNET_NAME"`

func (JobOutput) TempGcsLocation added in v6.23.0

func (o JobOutput) TempGcsLocation() pulumi.StringOutput

A writeable location on GCS for the Dataflow job to dump its temporary data.

***

func (JobOutput) TemplateGcsPath added in v6.23.0

func (o JobOutput) TemplateGcsPath() pulumi.StringOutput

The GCS path to the Dataflow job template.

func (JobOutput) ToJobOutput

func (o JobOutput) ToJobOutput() JobOutput

func (JobOutput) ToJobOutputWithContext

func (o JobOutput) ToJobOutputWithContext(ctx context.Context) JobOutput

func (JobOutput) ToOutput added in v6.65.1

func (o JobOutput) ToOutput(ctx context.Context) pulumix.Output[*Job]

func (JobOutput) TransformNameMapping added in v6.23.0

func (o JobOutput) TransformNameMapping() pulumi.MapOutput

Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job. This field is not used outside of update.

func (JobOutput) Type added in v6.23.0

func (o JobOutput) Type() pulumi.StringOutput

The type of this job, selected from the [JobType enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobType)

func (JobOutput) Zone added in v6.23.0

func (o JobOutput) Zone() pulumi.StringPtrOutput

The zone in which the created job should run. If it is not provided, the provider zone is used.

type JobState

type JobState struct {
	// List of experiments that should be used by the job. An example value is `["enableStackdriverAgentMetrics"]`.
	AdditionalExperiments pulumi.StringArrayInput
	// Enable/disable the use of [Streaming Engine](https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#streaming-engine) for the job. Note that Streaming Engine is enabled by default for pipelines developed against the Beam SDK for Python v2.21.0 or later when using Python 3.
	EnableStreamingEngine pulumi.BoolPtrInput
	// The configuration for VM IPs.  Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PRIVATE"`.
	IpConfiguration pulumi.StringPtrInput
	// The unique ID of this job.
	JobId pulumi.StringPtrInput
	// The name for the Cloud KMS key for the job. Key format is: `projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY`
	KmsKeyName pulumi.StringPtrInput
	// User labels to be specified for the job. Keys and values should follow the restrictions
	// specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page.
	// **NOTE**: Google-provided Dataflow templates often provide default labels that begin with `goog-dataflow-provided`.
	// Unless explicitly set in config, these labels will be ignored to prevent diffs on re-apply.
	Labels pulumi.MapInput
	// The machine type to use for the job.
	MachineType pulumi.StringPtrInput
	// The number of workers permitted to work on the job.  More workers may improve processing speed at additional cost.
	MaxWorkers pulumi.IntPtrInput
	// A unique name for the resource, required by Dataflow.
	Name pulumi.StringPtrInput
	// The network to which VMs will be assigned. If it is not provided, "default" will be used.
	Network pulumi.StringPtrInput
	// One of "drain" or "cancel".  Specifies behavior of deletion during `pulumi destroy`.  See above note.
	OnDelete pulumi.StringPtrInput
	// Key/Value pairs to be passed to the Dataflow job (as used in the template).
	Parameters pulumi.MapInput
	// The project in which the resource belongs. If it is not provided, the provider project is used.
	Project pulumi.StringPtrInput
	// The region in which the created job should run.
	Region pulumi.StringPtrInput
	// The Service Account email used to create the job.
	ServiceAccountEmail pulumi.StringPtrInput
	// If set to `true`, Pulumi will treat `DRAINING` and `CANCELLING` as terminal states when deleting the resource, and will remove the resource from Pulumi state and move on.  See above note.
	SkipWaitOnJobTermination pulumi.BoolPtrInput
	// The current state of the resource, selected from the [JobState enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState)
	State pulumi.StringPtrInput
	// The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK". If the [subnetwork is located in a Shared VPC network](https://cloud.google.com/dataflow/docs/guides/specifying-networks#shared), you must use the complete URL. For example `"googleapis.com/compute/v1/projects/PROJECT_ID/regions/REGION/subnetworks/SUBNET_NAME"`
	Subnetwork pulumi.StringPtrInput
	// A writeable location on GCS for the Dataflow job to dump its temporary data.
	//
	// ***
	TempGcsLocation pulumi.StringPtrInput
	// The GCS path to the Dataflow job template.
	TemplateGcsPath pulumi.StringPtrInput
	// Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job. This field is not used outside of update.
	TransformNameMapping pulumi.MapInput
	// The type of this job, selected from the [JobType enum](https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobType)
	Type pulumi.StringPtrInput
	// The zone in which the created job should run. If it is not provided, the provider zone is used.
	Zone pulumi.StringPtrInput
}

func (JobState) ElementType

func (JobState) ElementType() reflect.Type

type Pipeline added in v6.67.0

type Pipeline struct {
	pulumi.CustomResourceState

	// The timestamp when the pipeline was initially created. Set by the Data Pipelines service.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	CreateTime pulumi.StringOutput `pulumi:"createTime"`
	// The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_).
	DisplayName pulumi.StringPtrOutput `pulumi:"displayName"`
	// Number of jobs.
	JobCount pulumi.IntOutput `pulumi:"jobCount"`
	// The timestamp when the pipeline was last modified. Set by the Data Pipelines service.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	LastUpdateTime pulumi.StringOutput `pulumi:"lastUpdateTime"`
	// "The pipeline name. For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID."
	// "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects."
	// "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions."
	// "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location."
	Name pulumi.StringOutput `pulumi:"name"`
	// The sources of the pipeline (for example, Dataplex). The keys and values are set by the corresponding sources during pipeline creation.
	// An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
	PipelineSources pulumi.StringMapOutput `pulumi:"pipelineSources"`
	// The ID of the project in which the resource belongs.
	// If it is not provided, the provider project is used.
	Project pulumi.StringOutput `pulumi:"project"`
	// A reference to the region
	Region pulumi.StringPtrOutput `pulumi:"region"`
	// Internal scheduling information for a pipeline. If this information is provided, periodic jobs will be created per the schedule. If not, users are responsible for creating jobs externally.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#schedulespec
	// Structure is documented below.
	ScheduleInfo PipelineScheduleInfoPtrOutput `pulumi:"scheduleInfo"`
	// Optional. A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used.
	SchedulerServiceAccountEmail pulumi.StringPtrOutput `pulumi:"schedulerServiceAccountEmail"`
	// The state of the pipeline. When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state
	// Possible values are: `STATE_UNSPECIFIED`, `STATE_RESUMING`, `STATE_ACTIVE`, `STATE_STOPPING`, `STATE_ARCHIVED`, `STATE_PAUSED`.
	//
	// ***
	State pulumi.StringOutput `pulumi:"state"`
	// The type of the pipeline. This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype
	// Possible values are: `PIPELINE_TYPE_UNSPECIFIED`, `PIPELINE_TYPE_BATCH`, `PIPELINE_TYPE_STREAMING`.
	Type pulumi.StringOutput `pulumi:"type"`
	// Workload information for creating new jobs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#workload
	// Structure is documented below.
	Workload PipelineWorkloadPtrOutput `pulumi:"workload"`
}

The main pipeline entity and all the necessary metadata for launching and managing linked jobs.

To get more information about Pipeline, see:

* [API documentation](https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines) * How-to Guides

## Example Usage ### Data Pipeline Pipeline

```go package main

import (

"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataflow"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/serviceAccount"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"

)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		serviceAccount, err := serviceAccount.NewAccount(ctx, "serviceAccount", &serviceAccount.AccountArgs{
			AccountId:   pulumi.String("my-account"),
			DisplayName: pulumi.String("Service Account"),
		})
		if err != nil {
			return err
		}
		_, err = dataflow.NewPipeline(ctx, "primary", &dataflow.PipelineArgs{
			DisplayName: pulumi.String("my-pipeline"),
			Type:        pulumi.String("PIPELINE_TYPE_BATCH"),
			State:       pulumi.String("STATE_ACTIVE"),
			Region:      pulumi.String("us-central1"),
			Workload: &dataflow.PipelineWorkloadArgs{
				DataflowLaunchTemplateRequest: &dataflow.PipelineWorkloadDataflowLaunchTemplateRequestArgs{
					ProjectId: pulumi.String("my-project"),
					GcsPath:   pulumi.String("gs://my-bucket/path"),
					LaunchParameters: &dataflow.PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs{
						JobName: pulumi.String("my-job"),
						Parameters: pulumi.StringMap{
							"name": pulumi.String("wrench"),
						},
						Environment: &dataflow.PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs{
							NumWorkers:              pulumi.Int(5),
							MaxWorkers:              pulumi.Int(5),
							Zone:                    pulumi.String("us-centra1-a"),
							ServiceAccountEmail:     serviceAccount.Email,
							Network:                 pulumi.String("default"),
							TempLocation:            pulumi.String("gs://my-bucket/tmp_dir"),
							BypassTempDirValidation: pulumi.Bool(false),
							MachineType:             pulumi.String("E2"),
							AdditionalUserLabels: pulumi.StringMap{
								"context": pulumi.String("test"),
							},
							WorkerRegion:          pulumi.String("us-central1"),
							WorkerZone:            pulumi.String("us-central1-a"),
							EnableStreamingEngine: pulumi.Bool(false),
						},
						Update: pulumi.Bool(false),
						TransformNameMapping: pulumi.StringMap{
							"name": pulumi.String("wrench"),
						},
					},
					Location: pulumi.String("us-central1"),
				},
			},
			ScheduleInfo: &dataflow.PipelineScheduleInfoArgs{
				Schedule: pulumi.String("* */2 * * *"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}

```

## Import

Pipeline can be imported using any of these accepted formats

```sh

$ pulumi import gcp:dataflow/pipeline:Pipeline default projects/{{project}}/locations/{{region}}/pipelines/{{name}}

```

```sh

$ pulumi import gcp:dataflow/pipeline:Pipeline default {{project}}/{{region}}/{{name}}

```

```sh

$ pulumi import gcp:dataflow/pipeline:Pipeline default {{region}}/{{name}}

```

```sh

$ pulumi import gcp:dataflow/pipeline:Pipeline default {{name}}

```

func GetPipeline added in v6.67.0

func GetPipeline(ctx *pulumi.Context,
	name string, id pulumi.IDInput, state *PipelineState, opts ...pulumi.ResourceOption) (*Pipeline, error)

GetPipeline gets an existing Pipeline resource's state with the given name, ID, and optional state properties that are used to uniquely qualify the lookup (nil if not required).

func NewPipeline added in v6.67.0

func NewPipeline(ctx *pulumi.Context,
	name string, args *PipelineArgs, opts ...pulumi.ResourceOption) (*Pipeline, error)

NewPipeline registers a new resource with the given unique name, arguments, and options.

func (*Pipeline) ElementType added in v6.67.0

func (*Pipeline) ElementType() reflect.Type

func (*Pipeline) ToOutput added in v6.67.0

func (i *Pipeline) ToOutput(ctx context.Context) pulumix.Output[*Pipeline]

func (*Pipeline) ToPipelineOutput added in v6.67.0

func (i *Pipeline) ToPipelineOutput() PipelineOutput

func (*Pipeline) ToPipelineOutputWithContext added in v6.67.0

func (i *Pipeline) ToPipelineOutputWithContext(ctx context.Context) PipelineOutput

type PipelineArgs added in v6.67.0

type PipelineArgs struct {
	// The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_).
	DisplayName pulumi.StringPtrInput
	// "The pipeline name. For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID."
	// "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects."
	// "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions."
	// "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location."
	Name pulumi.StringPtrInput
	// The sources of the pipeline (for example, Dataplex). The keys and values are set by the corresponding sources during pipeline creation.
	// An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
	PipelineSources pulumi.StringMapInput
	// The ID of the project in which the resource belongs.
	// If it is not provided, the provider project is used.
	Project pulumi.StringPtrInput
	// A reference to the region
	Region pulumi.StringPtrInput
	// Internal scheduling information for a pipeline. If this information is provided, periodic jobs will be created per the schedule. If not, users are responsible for creating jobs externally.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#schedulespec
	// Structure is documented below.
	ScheduleInfo PipelineScheduleInfoPtrInput
	// Optional. A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used.
	SchedulerServiceAccountEmail pulumi.StringPtrInput
	// The state of the pipeline. When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state
	// Possible values are: `STATE_UNSPECIFIED`, `STATE_RESUMING`, `STATE_ACTIVE`, `STATE_STOPPING`, `STATE_ARCHIVED`, `STATE_PAUSED`.
	//
	// ***
	State pulumi.StringInput
	// The type of the pipeline. This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype
	// Possible values are: `PIPELINE_TYPE_UNSPECIFIED`, `PIPELINE_TYPE_BATCH`, `PIPELINE_TYPE_STREAMING`.
	Type pulumi.StringInput
	// Workload information for creating new jobs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#workload
	// Structure is documented below.
	Workload PipelineWorkloadPtrInput
}

The set of arguments for constructing a Pipeline resource.

func (PipelineArgs) ElementType added in v6.67.0

func (PipelineArgs) ElementType() reflect.Type

type PipelineArray added in v6.67.0

type PipelineArray []PipelineInput

func (PipelineArray) ElementType added in v6.67.0

func (PipelineArray) ElementType() reflect.Type

func (PipelineArray) ToOutput added in v6.67.0

func (i PipelineArray) ToOutput(ctx context.Context) pulumix.Output[[]*Pipeline]

func (PipelineArray) ToPipelineArrayOutput added in v6.67.0

func (i PipelineArray) ToPipelineArrayOutput() PipelineArrayOutput

func (PipelineArray) ToPipelineArrayOutputWithContext added in v6.67.0

func (i PipelineArray) ToPipelineArrayOutputWithContext(ctx context.Context) PipelineArrayOutput

type PipelineArrayInput added in v6.67.0

type PipelineArrayInput interface {
	pulumi.Input

	ToPipelineArrayOutput() PipelineArrayOutput
	ToPipelineArrayOutputWithContext(context.Context) PipelineArrayOutput
}

PipelineArrayInput is an input type that accepts PipelineArray and PipelineArrayOutput values. You can construct a concrete instance of `PipelineArrayInput` via:

PipelineArray{ PipelineArgs{...} }

type PipelineArrayOutput added in v6.67.0

type PipelineArrayOutput struct{ *pulumi.OutputState }

func (PipelineArrayOutput) ElementType added in v6.67.0

func (PipelineArrayOutput) ElementType() reflect.Type

func (PipelineArrayOutput) Index added in v6.67.0

func (PipelineArrayOutput) ToOutput added in v6.67.0

func (PipelineArrayOutput) ToPipelineArrayOutput added in v6.67.0

func (o PipelineArrayOutput) ToPipelineArrayOutput() PipelineArrayOutput

func (PipelineArrayOutput) ToPipelineArrayOutputWithContext added in v6.67.0

func (o PipelineArrayOutput) ToPipelineArrayOutputWithContext(ctx context.Context) PipelineArrayOutput

type PipelineInput added in v6.67.0

type PipelineInput interface {
	pulumi.Input

	ToPipelineOutput() PipelineOutput
	ToPipelineOutputWithContext(ctx context.Context) PipelineOutput
}

type PipelineMap added in v6.67.0

type PipelineMap map[string]PipelineInput

func (PipelineMap) ElementType added in v6.67.0

func (PipelineMap) ElementType() reflect.Type

func (PipelineMap) ToOutput added in v6.67.0

func (i PipelineMap) ToOutput(ctx context.Context) pulumix.Output[map[string]*Pipeline]

func (PipelineMap) ToPipelineMapOutput added in v6.67.0

func (i PipelineMap) ToPipelineMapOutput() PipelineMapOutput

func (PipelineMap) ToPipelineMapOutputWithContext added in v6.67.0

func (i PipelineMap) ToPipelineMapOutputWithContext(ctx context.Context) PipelineMapOutput

type PipelineMapInput added in v6.67.0

type PipelineMapInput interface {
	pulumi.Input

	ToPipelineMapOutput() PipelineMapOutput
	ToPipelineMapOutputWithContext(context.Context) PipelineMapOutput
}

PipelineMapInput is an input type that accepts PipelineMap and PipelineMapOutput values. You can construct a concrete instance of `PipelineMapInput` via:

PipelineMap{ "key": PipelineArgs{...} }

type PipelineMapOutput added in v6.67.0

type PipelineMapOutput struct{ *pulumi.OutputState }

func (PipelineMapOutput) ElementType added in v6.67.0

func (PipelineMapOutput) ElementType() reflect.Type

func (PipelineMapOutput) MapIndex added in v6.67.0

func (PipelineMapOutput) ToOutput added in v6.67.0

func (PipelineMapOutput) ToPipelineMapOutput added in v6.67.0

func (o PipelineMapOutput) ToPipelineMapOutput() PipelineMapOutput

func (PipelineMapOutput) ToPipelineMapOutputWithContext added in v6.67.0

func (o PipelineMapOutput) ToPipelineMapOutputWithContext(ctx context.Context) PipelineMapOutput

type PipelineOutput added in v6.67.0

type PipelineOutput struct{ *pulumi.OutputState }

func (PipelineOutput) CreateTime added in v6.67.0

func (o PipelineOutput) CreateTime() pulumi.StringOutput

The timestamp when the pipeline was initially created. Set by the Data Pipelines service. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

func (PipelineOutput) DisplayName added in v6.67.0

func (o PipelineOutput) DisplayName() pulumi.StringPtrOutput

The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_).

func (PipelineOutput) ElementType added in v6.67.0

func (PipelineOutput) ElementType() reflect.Type

func (PipelineOutput) JobCount added in v6.67.0

func (o PipelineOutput) JobCount() pulumi.IntOutput

Number of jobs.

func (PipelineOutput) LastUpdateTime added in v6.67.0

func (o PipelineOutput) LastUpdateTime() pulumi.StringOutput

The timestamp when the pipeline was last modified. Set by the Data Pipelines service. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

func (PipelineOutput) Name added in v6.67.0

"The pipeline name. For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID." "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects." "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions." "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location."

func (PipelineOutput) PipelineSources added in v6.67.0

func (o PipelineOutput) PipelineSources() pulumi.StringMapOutput

The sources of the pipeline (for example, Dataplex). The keys and values are set by the corresponding sources during pipeline creation. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.

func (PipelineOutput) Project added in v6.67.0

func (o PipelineOutput) Project() pulumi.StringOutput

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

func (PipelineOutput) Region added in v6.67.0

A reference to the region

func (PipelineOutput) ScheduleInfo added in v6.67.0

Internal scheduling information for a pipeline. If this information is provided, periodic jobs will be created per the schedule. If not, users are responsible for creating jobs externally. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#schedulespec Structure is documented below.

func (PipelineOutput) SchedulerServiceAccountEmail added in v6.67.0

func (o PipelineOutput) SchedulerServiceAccountEmail() pulumi.StringPtrOutput

Optional. A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used.

func (PipelineOutput) State added in v6.67.0

The state of the pipeline. When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state Possible values are: `STATE_UNSPECIFIED`, `STATE_RESUMING`, `STATE_ACTIVE`, `STATE_STOPPING`, `STATE_ARCHIVED`, `STATE_PAUSED`.

***

func (PipelineOutput) ToOutput added in v6.67.0

func (PipelineOutput) ToPipelineOutput added in v6.67.0

func (o PipelineOutput) ToPipelineOutput() PipelineOutput

func (PipelineOutput) ToPipelineOutputWithContext added in v6.67.0

func (o PipelineOutput) ToPipelineOutputWithContext(ctx context.Context) PipelineOutput

func (PipelineOutput) Type added in v6.67.0

The type of the pipeline. This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype Possible values are: `PIPELINE_TYPE_UNSPECIFIED`, `PIPELINE_TYPE_BATCH`, `PIPELINE_TYPE_STREAMING`.

func (PipelineOutput) Workload added in v6.67.0

Workload information for creating new jobs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#workload Structure is documented below.

type PipelineScheduleInfo added in v6.67.0

type PipelineScheduleInfo struct {
	// (Output)
	// When the next Scheduler job is going to run.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	NextJobTime *string `pulumi:"nextJobTime"`
	// Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.
	Schedule *string `pulumi:"schedule"`
	// Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.
	TimeZone *string `pulumi:"timeZone"`
}

type PipelineScheduleInfoArgs added in v6.67.0

type PipelineScheduleInfoArgs struct {
	// (Output)
	// When the next Scheduler job is going to run.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	NextJobTime pulumi.StringPtrInput `pulumi:"nextJobTime"`
	// Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.
	Schedule pulumi.StringPtrInput `pulumi:"schedule"`
	// Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.
	TimeZone pulumi.StringPtrInput `pulumi:"timeZone"`
}

func (PipelineScheduleInfoArgs) ElementType added in v6.67.0

func (PipelineScheduleInfoArgs) ElementType() reflect.Type

func (PipelineScheduleInfoArgs) ToOutput added in v6.67.0

func (PipelineScheduleInfoArgs) ToPipelineScheduleInfoOutput added in v6.67.0

func (i PipelineScheduleInfoArgs) ToPipelineScheduleInfoOutput() PipelineScheduleInfoOutput

func (PipelineScheduleInfoArgs) ToPipelineScheduleInfoOutputWithContext added in v6.67.0

func (i PipelineScheduleInfoArgs) ToPipelineScheduleInfoOutputWithContext(ctx context.Context) PipelineScheduleInfoOutput

func (PipelineScheduleInfoArgs) ToPipelineScheduleInfoPtrOutput added in v6.67.0

func (i PipelineScheduleInfoArgs) ToPipelineScheduleInfoPtrOutput() PipelineScheduleInfoPtrOutput

func (PipelineScheduleInfoArgs) ToPipelineScheduleInfoPtrOutputWithContext added in v6.67.0

func (i PipelineScheduleInfoArgs) ToPipelineScheduleInfoPtrOutputWithContext(ctx context.Context) PipelineScheduleInfoPtrOutput

type PipelineScheduleInfoInput added in v6.67.0

type PipelineScheduleInfoInput interface {
	pulumi.Input

	ToPipelineScheduleInfoOutput() PipelineScheduleInfoOutput
	ToPipelineScheduleInfoOutputWithContext(context.Context) PipelineScheduleInfoOutput
}

PipelineScheduleInfoInput is an input type that accepts PipelineScheduleInfoArgs and PipelineScheduleInfoOutput values. You can construct a concrete instance of `PipelineScheduleInfoInput` via:

PipelineScheduleInfoArgs{...}

type PipelineScheduleInfoOutput added in v6.67.0

type PipelineScheduleInfoOutput struct{ *pulumi.OutputState }

func (PipelineScheduleInfoOutput) ElementType added in v6.67.0

func (PipelineScheduleInfoOutput) ElementType() reflect.Type

func (PipelineScheduleInfoOutput) NextJobTime added in v6.67.0

(Output) When the next Scheduler job is going to run. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

func (PipelineScheduleInfoOutput) Schedule added in v6.67.0

Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.

func (PipelineScheduleInfoOutput) TimeZone added in v6.67.0

Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.

func (PipelineScheduleInfoOutput) ToOutput added in v6.67.0

func (PipelineScheduleInfoOutput) ToPipelineScheduleInfoOutput added in v6.67.0

func (o PipelineScheduleInfoOutput) ToPipelineScheduleInfoOutput() PipelineScheduleInfoOutput

func (PipelineScheduleInfoOutput) ToPipelineScheduleInfoOutputWithContext added in v6.67.0

func (o PipelineScheduleInfoOutput) ToPipelineScheduleInfoOutputWithContext(ctx context.Context) PipelineScheduleInfoOutput

func (PipelineScheduleInfoOutput) ToPipelineScheduleInfoPtrOutput added in v6.67.0

func (o PipelineScheduleInfoOutput) ToPipelineScheduleInfoPtrOutput() PipelineScheduleInfoPtrOutput

func (PipelineScheduleInfoOutput) ToPipelineScheduleInfoPtrOutputWithContext added in v6.67.0

func (o PipelineScheduleInfoOutput) ToPipelineScheduleInfoPtrOutputWithContext(ctx context.Context) PipelineScheduleInfoPtrOutput

type PipelineScheduleInfoPtrInput added in v6.67.0

type PipelineScheduleInfoPtrInput interface {
	pulumi.Input

	ToPipelineScheduleInfoPtrOutput() PipelineScheduleInfoPtrOutput
	ToPipelineScheduleInfoPtrOutputWithContext(context.Context) PipelineScheduleInfoPtrOutput
}

PipelineScheduleInfoPtrInput is an input type that accepts PipelineScheduleInfoArgs, PipelineScheduleInfoPtr and PipelineScheduleInfoPtrOutput values. You can construct a concrete instance of `PipelineScheduleInfoPtrInput` via:

        PipelineScheduleInfoArgs{...}

or:

        nil

func PipelineScheduleInfoPtr added in v6.67.0

func PipelineScheduleInfoPtr(v *PipelineScheduleInfoArgs) PipelineScheduleInfoPtrInput

type PipelineScheduleInfoPtrOutput added in v6.67.0

type PipelineScheduleInfoPtrOutput struct{ *pulumi.OutputState }

func (PipelineScheduleInfoPtrOutput) Elem added in v6.67.0

func (PipelineScheduleInfoPtrOutput) ElementType added in v6.67.0

func (PipelineScheduleInfoPtrOutput) NextJobTime added in v6.67.0

(Output) When the next Scheduler job is going to run. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

func (PipelineScheduleInfoPtrOutput) Schedule added in v6.67.0

Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.

func (PipelineScheduleInfoPtrOutput) TimeZone added in v6.67.0

Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.

func (PipelineScheduleInfoPtrOutput) ToOutput added in v6.67.0

func (PipelineScheduleInfoPtrOutput) ToPipelineScheduleInfoPtrOutput added in v6.67.0

func (o PipelineScheduleInfoPtrOutput) ToPipelineScheduleInfoPtrOutput() PipelineScheduleInfoPtrOutput

func (PipelineScheduleInfoPtrOutput) ToPipelineScheduleInfoPtrOutputWithContext added in v6.67.0

func (o PipelineScheduleInfoPtrOutput) ToPipelineScheduleInfoPtrOutputWithContext(ctx context.Context) PipelineScheduleInfoPtrOutput

type PipelineState added in v6.67.0

type PipelineState struct {
	// The timestamp when the pipeline was initially created. Set by the Data Pipelines service.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	CreateTime pulumi.StringPtrInput
	// The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_).
	DisplayName pulumi.StringPtrInput
	// Number of jobs.
	JobCount pulumi.IntPtrInput
	// The timestamp when the pipeline was last modified. Set by the Data Pipelines service.
	// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
	LastUpdateTime pulumi.StringPtrInput
	// "The pipeline name. For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID."
	// "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects."
	// "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions."
	// "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location."
	Name pulumi.StringPtrInput
	// The sources of the pipeline (for example, Dataplex). The keys and values are set by the corresponding sources during pipeline creation.
	// An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
	PipelineSources pulumi.StringMapInput
	// The ID of the project in which the resource belongs.
	// If it is not provided, the provider project is used.
	Project pulumi.StringPtrInput
	// A reference to the region
	Region pulumi.StringPtrInput
	// Internal scheduling information for a pipeline. If this information is provided, periodic jobs will be created per the schedule. If not, users are responsible for creating jobs externally.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#schedulespec
	// Structure is documented below.
	ScheduleInfo PipelineScheduleInfoPtrInput
	// Optional. A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used.
	SchedulerServiceAccountEmail pulumi.StringPtrInput
	// The state of the pipeline. When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state
	// Possible values are: `STATE_UNSPECIFIED`, `STATE_RESUMING`, `STATE_ACTIVE`, `STATE_STOPPING`, `STATE_ARCHIVED`, `STATE_PAUSED`.
	//
	// ***
	State pulumi.StringPtrInput
	// The type of the pipeline. This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype
	// Possible values are: `PIPELINE_TYPE_UNSPECIFIED`, `PIPELINE_TYPE_BATCH`, `PIPELINE_TYPE_STREAMING`.
	Type pulumi.StringPtrInput
	// Workload information for creating new jobs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#workload
	// Structure is documented below.
	Workload PipelineWorkloadPtrInput
}

func (PipelineState) ElementType added in v6.67.0

func (PipelineState) ElementType() reflect.Type

type PipelineWorkload added in v6.67.0

type PipelineWorkload struct {
	// Template information and additional parameters needed to launch a Dataflow job using the flex launch API.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplaterequest
	// Structure is documented below.
	DataflowFlexTemplateRequest *PipelineWorkloadDataflowFlexTemplateRequest `pulumi:"dataflowFlexTemplateRequest"`
	// Template information and additional parameters needed to launch a Dataflow job using the standard launch API.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplaterequest
	// Structure is documented below.
	DataflowLaunchTemplateRequest *PipelineWorkloadDataflowLaunchTemplateRequest `pulumi:"dataflowLaunchTemplateRequest"`
}

type PipelineWorkloadArgs added in v6.67.0

type PipelineWorkloadArgs struct {
	// Template information and additional parameters needed to launch a Dataflow job using the flex launch API.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplaterequest
	// Structure is documented below.
	DataflowFlexTemplateRequest PipelineWorkloadDataflowFlexTemplateRequestPtrInput `pulumi:"dataflowFlexTemplateRequest"`
	// Template information and additional parameters needed to launch a Dataflow job using the standard launch API.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplaterequest
	// Structure is documented below.
	DataflowLaunchTemplateRequest PipelineWorkloadDataflowLaunchTemplateRequestPtrInput `pulumi:"dataflowLaunchTemplateRequest"`
}

func (PipelineWorkloadArgs) ElementType added in v6.67.0

func (PipelineWorkloadArgs) ElementType() reflect.Type

func (PipelineWorkloadArgs) ToOutput added in v6.67.0

func (PipelineWorkloadArgs) ToPipelineWorkloadOutput added in v6.67.0

func (i PipelineWorkloadArgs) ToPipelineWorkloadOutput() PipelineWorkloadOutput

func (PipelineWorkloadArgs) ToPipelineWorkloadOutputWithContext added in v6.67.0

func (i PipelineWorkloadArgs) ToPipelineWorkloadOutputWithContext(ctx context.Context) PipelineWorkloadOutput

func (PipelineWorkloadArgs) ToPipelineWorkloadPtrOutput added in v6.67.0

func (i PipelineWorkloadArgs) ToPipelineWorkloadPtrOutput() PipelineWorkloadPtrOutput

func (PipelineWorkloadArgs) ToPipelineWorkloadPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadArgs) ToPipelineWorkloadPtrOutputWithContext(ctx context.Context) PipelineWorkloadPtrOutput

type PipelineWorkloadDataflowFlexTemplateRequest added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequest struct {
	// Parameter to launch a job from a Flex Template.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplateparameter
	// Structure is documented below.
	LaunchParameter PipelineWorkloadDataflowFlexTemplateRequestLaunchParameter `pulumi:"launchParameter"`
	// The regional endpoint to which to direct the request. For example, us-central1, us-west1.
	Location string `pulumi:"location"`
	// The ID of the Cloud Platform project that the job belongs to.
	ProjectId string `pulumi:"projectId"`
	// If true, the request is validated but not actually executed. Defaults to false.
	ValidateOnly *bool `pulumi:"validateOnly"`
}

type PipelineWorkloadDataflowFlexTemplateRequestArgs added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestArgs struct {
	// Parameter to launch a job from a Flex Template.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplateparameter
	// Structure is documented below.
	LaunchParameter PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterInput `pulumi:"launchParameter"`
	// The regional endpoint to which to direct the request. For example, us-central1, us-west1.
	Location pulumi.StringInput `pulumi:"location"`
	// The ID of the Cloud Platform project that the job belongs to.
	ProjectId pulumi.StringInput `pulumi:"projectId"`
	// If true, the request is validated but not actually executed. Defaults to false.
	ValidateOnly pulumi.BoolPtrInput `pulumi:"validateOnly"`
}

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestOutput added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestOutput() PipelineWorkloadDataflowFlexTemplateRequestOutput

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestOutput

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput() PipelineWorkloadDataflowFlexTemplateRequestPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestArgs) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestPtrOutput

type PipelineWorkloadDataflowFlexTemplateRequestInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestOutput() PipelineWorkloadDataflowFlexTemplateRequestOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestOutput
}

PipelineWorkloadDataflowFlexTemplateRequestInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestArgs and PipelineWorkloadDataflowFlexTemplateRequestOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestInput` via:

PipelineWorkloadDataflowFlexTemplateRequestArgs{...}

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameter added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameter struct {
	// Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.
	ContainerSpecGcsPath *string `pulumi:"containerSpecGcsPath"`
	// The runtime environment for the Flex Template job.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexTemplateRuntimeEnvironment
	// Structure is documented below.
	Environment *PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment `pulumi:"environment"`
	// The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.
	JobName string `pulumi:"jobName"`
	// Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	LaunchOptions map[string]string `pulumi:"launchOptions"`
	// 'The parameters for the Flex Template. Example: {"numWorkers":"5"}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	Parameters map[string]string `pulumi:"parameters"`
	// 'Use this to pass transform name mappings for streaming update jobs. Example: {"oldTransformName":"newTransformName",...}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	TransformNameMappings map[string]string `pulumi:"transformNameMappings"`
	// Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.
	Update *bool `pulumi:"update"`
}

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs struct {
	// Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.
	ContainerSpecGcsPath pulumi.StringPtrInput `pulumi:"containerSpecGcsPath"`
	// The runtime environment for the Flex Template job.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexTemplateRuntimeEnvironment
	// Structure is documented below.
	Environment PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrInput `pulumi:"environment"`
	// The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.
	JobName pulumi.StringInput `pulumi:"jobName"`
	// Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	LaunchOptions pulumi.StringMapInput `pulumi:"launchOptions"`
	// 'The parameters for the Flex Template. Example: {"numWorkers":"5"}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	Parameters pulumi.StringMapInput `pulumi:"parameters"`
	// 'Use this to pass transform name mappings for streaming update jobs. Example: {"oldTransformName":"newTransformName",...}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	TransformNameMappings pulumi.StringMapInput `pulumi:"transformNameMappings"`
	// Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.
	Update pulumi.BoolPtrInput `pulumi:"update"`
}

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment struct {
	// Additional experiment flags for the job.
	AdditionalExperiments []string `pulumi:"additionalExperiments"`
	// Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	AdditionalUserLabels map[string]string `pulumi:"additionalUserLabels"`
	// Whether to enable Streaming Engine for the job.
	EnableStreamingEngine *bool `pulumi:"enableStreamingEngine"`
	// Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal
	// Possible values are: `FLEXRS_UNSPECIFIED`, `FLEXRS_SPEED_OPTIMIZED`, `FLEXRS_COST_OPTIMIZED`.
	FlexrsGoal *string `pulumi:"flexrsGoal"`
	// Configuration for VM IPs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration
	// Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.
	IpConfiguration *string `pulumi:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'
	KmsKeyName *string `pulumi:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	MachineType *string `pulumi:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	MaxWorkers *int `pulumi:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	Network *string `pulumi:"network"`
	// The initial number of Compute Engine instances for the job.
	NumWorkers *int `pulumi:"numWorkers"`
	// The email address of the service account to run the job as.
	ServiceAccountEmail *string `pulumi:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	Subnetwork *string `pulumi:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation *string `pulumi:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	WorkerRegion *string `pulumi:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	WorkerZone *string `pulumi:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.
	Zone *string `pulumi:"zone"`
}

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs struct {
	// Additional experiment flags for the job.
	AdditionalExperiments pulumi.StringArrayInput `pulumi:"additionalExperiments"`
	// Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	AdditionalUserLabels pulumi.StringMapInput `pulumi:"additionalUserLabels"`
	// Whether to enable Streaming Engine for the job.
	EnableStreamingEngine pulumi.BoolPtrInput `pulumi:"enableStreamingEngine"`
	// Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal
	// Possible values are: `FLEXRS_UNSPECIFIED`, `FLEXRS_SPEED_OPTIMIZED`, `FLEXRS_COST_OPTIMIZED`.
	FlexrsGoal pulumi.StringPtrInput `pulumi:"flexrsGoal"`
	// Configuration for VM IPs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration
	// Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.
	IpConfiguration pulumi.StringPtrInput `pulumi:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'
	KmsKeyName pulumi.StringPtrInput `pulumi:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	MachineType pulumi.StringPtrInput `pulumi:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	MaxWorkers pulumi.IntPtrInput `pulumi:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	Network pulumi.StringPtrInput `pulumi:"network"`
	// The initial number of Compute Engine instances for the job.
	NumWorkers pulumi.IntPtrInput `pulumi:"numWorkers"`
	// The email address of the service account to run the job as.
	ServiceAccountEmail pulumi.StringPtrInput `pulumi:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	Subnetwork pulumi.StringPtrInput `pulumi:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation pulumi.StringPtrInput `pulumi:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	WorkerRegion pulumi.StringPtrInput `pulumi:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	WorkerZone pulumi.StringPtrInput `pulumi:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.
	Zone pulumi.StringPtrInput `pulumi:"zone"`
}

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput() PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput
}

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs and PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentInput` via:

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs{...}

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) AdditionalExperiments added in v6.67.0

Additional experiment flags for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) AdditionalUserLabels added in v6.67.0

Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) EnableStreamingEngine added in v6.67.0

Whether to enable Streaming Engine for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) FlexrsGoal added in v6.67.0

Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal Possible values are: `FLEXRS_UNSPECIFIED`, `FLEXRS_SPEED_OPTIMIZED`, `FLEXRS_COST_OPTIMIZED`.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) IpConfiguration added in v6.67.0

Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) KmsKeyName added in v6.67.0

'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) MachineType added in v6.67.0

The machine type to use for the job. Defaults to the value from the template if not specified.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) MaxWorkers added in v6.67.0

The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) Network added in v6.67.0

Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) NumWorkers added in v6.67.0

The initial number of Compute Engine instances for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ServiceAccountEmail added in v6.67.0

The email address of the service account to run the job as.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) Subnetwork added in v6.67.0

Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) TempLocation added in v6.67.0

The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) WorkerRegion added in v6.67.0

The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) WorkerZone added in v6.67.0

The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutput) Zone added in v6.67.0

The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput() PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput
}

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs, PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtr and PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrInput` via:

        PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentArgs{...}

or:

        nil

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) AdditionalExperiments added in v6.67.0

Additional experiment flags for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) AdditionalUserLabels added in v6.67.0

Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) EnableStreamingEngine added in v6.67.0

Whether to enable Streaming Engine for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) FlexrsGoal added in v6.67.0

Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal Possible values are: `FLEXRS_UNSPECIFIED`, `FLEXRS_SPEED_OPTIMIZED`, `FLEXRS_COST_OPTIMIZED`.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) IpConfiguration added in v6.67.0

Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) KmsKeyName added in v6.67.0

'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) MachineType added in v6.67.0

The machine type to use for the job. Defaults to the value from the template if not specified.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) MaxWorkers added in v6.67.0

The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) Network added in v6.67.0

Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) NumWorkers added in v6.67.0

The initial number of Compute Engine instances for the job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) ServiceAccountEmail added in v6.67.0

The email address of the service account to run the job as.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) Subnetwork added in v6.67.0

Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) TempLocation added in v6.67.0

The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) WorkerRegion added in v6.67.0

The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) WorkerZone added in v6.67.0

The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentPtrOutput) Zone added in v6.67.0

The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput() PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput
}

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs and PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterInput` via:

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs{...}

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ContainerSpecGcsPath added in v6.67.0

Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) Environment added in v6.67.0

The runtime environment for the Flex Template job. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexTemplateRuntimeEnvironment Structure is documented below.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) JobName added in v6.67.0

The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) LaunchOptions added in v6.67.0

Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) Parameters added in v6.67.0

'The parameters for the Flex Template. Example: {"numWorkers":"5"}' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) TransformNameMappings added in v6.67.0

'Use this to pass transform name mappings for streaming update jobs. Example: {"oldTransformName":"newTransformName",...}' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutput) Update added in v6.67.0

Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput() PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput
}

PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs, PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtr and PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrInput` via:

        PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterArgs{...}

or:

        nil

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ContainerSpecGcsPath added in v6.67.0

Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) Environment added in v6.67.0

The runtime environment for the Flex Template job. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexTemplateRuntimeEnvironment Structure is documented below.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) JobName added in v6.67.0

The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) LaunchOptions added in v6.67.0

Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) Parameters added in v6.67.0

'The parameters for the Flex Template. Example: {"numWorkers":"5"}' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) TransformNameMappings added in v6.67.0

'Use this to pass transform name mappings for streaming update jobs. Example: {"oldTransformName":"newTransformName",...}' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowFlexTemplateRequestLaunchParameterPtrOutput) Update added in v6.67.0

Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.

type PipelineWorkloadDataflowFlexTemplateRequestOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) LaunchParameter added in v6.67.0

Parameter to launch a job from a Flex Template. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplateparameter Structure is documented below.

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) Location added in v6.67.0

The regional endpoint to which to direct the request. For example, us-central1, us-west1.

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ProjectId added in v6.67.0

The ID of the Cloud Platform project that the job belongs to.

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestOutput added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestOutput() PipelineWorkloadDataflowFlexTemplateRequestOutput

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestOutput

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput() PipelineWorkloadDataflowFlexTemplateRequestPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestOutput) ValidateOnly added in v6.67.0

If true, the request is validated but not actually executed. Defaults to false.

type PipelineWorkloadDataflowFlexTemplateRequestPtrInput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput() PipelineWorkloadDataflowFlexTemplateRequestPtrOutput
	ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext(context.Context) PipelineWorkloadDataflowFlexTemplateRequestPtrOutput
}

PipelineWorkloadDataflowFlexTemplateRequestPtrInput is an input type that accepts PipelineWorkloadDataflowFlexTemplateRequestArgs, PipelineWorkloadDataflowFlexTemplateRequestPtr and PipelineWorkloadDataflowFlexTemplateRequestPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowFlexTemplateRequestPtrInput` via:

        PipelineWorkloadDataflowFlexTemplateRequestArgs{...}

or:

        nil

type PipelineWorkloadDataflowFlexTemplateRequestPtrOutput added in v6.67.0

type PipelineWorkloadDataflowFlexTemplateRequestPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) LaunchParameter added in v6.67.0

Parameter to launch a job from a Flex Template. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplateparameter Structure is documented below.

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) Location added in v6.67.0

The regional endpoint to which to direct the request. For example, us-central1, us-west1.

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ProjectId added in v6.67.0

The ID of the Cloud Platform project that the job belongs to.

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ToPipelineWorkloadDataflowFlexTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowFlexTemplateRequestPtrOutput

func (PipelineWorkloadDataflowFlexTemplateRequestPtrOutput) ValidateOnly added in v6.67.0

If true, the request is validated but not actually executed. Defaults to false.

type PipelineWorkloadDataflowLaunchTemplateRequest added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequest struct {
	// A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.
	GcsPath *string `pulumi:"gcsPath"`
	// The parameters of the template to launch. This should be part of the body of the POST request.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplateparameters
	// Structure is documented below.
	LaunchParameters *PipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters `pulumi:"launchParameters"`
	// The regional endpoint to which to direct the request.
	Location *string `pulumi:"location"`
	// The ID of the Cloud Platform project that the job belongs to.
	ProjectId string `pulumi:"projectId"`
	// (Optional)
	ValidateOnly *bool `pulumi:"validateOnly"`
}

type PipelineWorkloadDataflowLaunchTemplateRequestArgs added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestArgs struct {
	// A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.
	GcsPath pulumi.StringPtrInput `pulumi:"gcsPath"`
	// The parameters of the template to launch. This should be part of the body of the POST request.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplateparameters
	// Structure is documented below.
	LaunchParameters PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrInput `pulumi:"launchParameters"`
	// The regional endpoint to which to direct the request.
	Location pulumi.StringPtrInput `pulumi:"location"`
	// The ID of the Cloud Platform project that the job belongs to.
	ProjectId pulumi.StringInput `pulumi:"projectId"`
	// (Optional)
	ValidateOnly pulumi.BoolPtrInput `pulumi:"validateOnly"`
}

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestOutput added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestOutput() PipelineWorkloadDataflowLaunchTemplateRequestOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput() PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput

type PipelineWorkloadDataflowLaunchTemplateRequestInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestOutput() PipelineWorkloadDataflowLaunchTemplateRequestOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestArgs and PipelineWorkloadDataflowLaunchTemplateRequestOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestInput` via:

PipelineWorkloadDataflowLaunchTemplateRequestArgs{...}

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters struct {
	// The runtime environment for the job.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#RuntimeEnvironment
	// Structure is documented below.
	Environment *PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment `pulumi:"environment"`
	// The job name to use for the created job.
	JobName string `pulumi:"jobName"`
	// The runtime parameters to pass to the job.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	Parameters map[string]string `pulumi:"parameters"`
	// Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	TransformNameMapping map[string]string `pulumi:"transformNameMapping"`
	// If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.
	Update *bool `pulumi:"update"`
}

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs struct {
	// The runtime environment for the job.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#RuntimeEnvironment
	// Structure is documented below.
	Environment PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrInput `pulumi:"environment"`
	// The job name to use for the created job.
	JobName pulumi.StringInput `pulumi:"jobName"`
	// The runtime parameters to pass to the job.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	Parameters pulumi.StringMapInput `pulumi:"parameters"`
	// Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	TransformNameMapping pulumi.StringMapInput `pulumi:"transformNameMapping"`
	// If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.
	Update pulumi.BoolPtrInput `pulumi:"update"`
}

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment struct {
	// Additional experiment flags for the job.
	AdditionalExperiments []string `pulumi:"additionalExperiments"`
	// Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	AdditionalUserLabels map[string]string `pulumi:"additionalUserLabels"`
	// Whether to bypass the safety checks for the job's temporary directory. Use with caution.
	BypassTempDirValidation *bool `pulumi:"bypassTempDirValidation"`
	// Whether to enable Streaming Engine for the job.
	EnableStreamingEngine *bool `pulumi:"enableStreamingEngine"`
	// Configuration for VM IPs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration
	// Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.
	IpConfiguration *string `pulumi:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'
	KmsKeyName *string `pulumi:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	MachineType *string `pulumi:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	MaxWorkers *int `pulumi:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	Network *string `pulumi:"network"`
	// The initial number of Compute Engine instances for the job.
	NumWorkers *int `pulumi:"numWorkers"`
	// The email address of the service account to run the job as.
	ServiceAccountEmail *string `pulumi:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	Subnetwork *string `pulumi:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation *string `pulumi:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	WorkerRegion *string `pulumi:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	WorkerZone *string `pulumi:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.
	Zone *string `pulumi:"zone"`
}

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs struct {
	// Additional experiment flags for the job.
	AdditionalExperiments pulumi.StringArrayInput `pulumi:"additionalExperiments"`
	// Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	AdditionalUserLabels pulumi.StringMapInput `pulumi:"additionalUserLabels"`
	// Whether to bypass the safety checks for the job's temporary directory. Use with caution.
	BypassTempDirValidation pulumi.BoolPtrInput `pulumi:"bypassTempDirValidation"`
	// Whether to enable Streaming Engine for the job.
	EnableStreamingEngine pulumi.BoolPtrInput `pulumi:"enableStreamingEngine"`
	// Configuration for VM IPs.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration
	// Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.
	IpConfiguration pulumi.StringPtrInput `pulumi:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'
	KmsKeyName pulumi.StringPtrInput `pulumi:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	MachineType pulumi.StringPtrInput `pulumi:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	MaxWorkers pulumi.IntPtrInput `pulumi:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	Network pulumi.StringPtrInput `pulumi:"network"`
	// The initial number of Compute Engine instances for the job.
	NumWorkers pulumi.IntPtrInput `pulumi:"numWorkers"`
	// The email address of the service account to run the job as.
	ServiceAccountEmail pulumi.StringPtrInput `pulumi:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	Subnetwork pulumi.StringPtrInput `pulumi:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	TempLocation pulumi.StringPtrInput `pulumi:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	WorkerRegion pulumi.StringPtrInput `pulumi:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	WorkerZone pulumi.StringPtrInput `pulumi:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.
	Zone pulumi.StringPtrInput `pulumi:"zone"`
}

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutputWithContext added in v6.67.0

func (i PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput() PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs and PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentInput` via:

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs{...}

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) AdditionalExperiments added in v6.67.0

Additional experiment flags for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) AdditionalUserLabels added in v6.67.0

Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) BypassTempDirValidation added in v6.67.0

Whether to bypass the safety checks for the job's temporary directory. Use with caution.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) EnableStreamingEngine added in v6.67.0

Whether to enable Streaming Engine for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) IpConfiguration added in v6.67.0

Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) KmsKeyName added in v6.67.0

'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) MachineType added in v6.67.0

The machine type to use for the job. Defaults to the value from the template if not specified.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) MaxWorkers added in v6.67.0

The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) Network added in v6.67.0

Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) NumWorkers added in v6.67.0

The initial number of Compute Engine instances for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ServiceAccountEmail added in v6.67.0

The email address of the service account to run the job as.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) Subnetwork added in v6.67.0

Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) TempLocation added in v6.67.0

The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) WorkerRegion added in v6.67.0

The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) WorkerZone added in v6.67.0

The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutput) Zone added in v6.67.0

The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput() PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs, PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtr and PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrInput` via:

        PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentArgs{...}

or:

        nil

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) AdditionalExperiments added in v6.67.0

Additional experiment flags for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) AdditionalUserLabels added in v6.67.0

Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) BypassTempDirValidation added in v6.67.0

Whether to bypass the safety checks for the job's temporary directory. Use with caution.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) EnableStreamingEngine added in v6.67.0

Whether to enable Streaming Engine for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) IpConfiguration added in v6.67.0

Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values are: `WORKER_IP_UNSPECIFIED`, `WORKER_IP_PUBLIC`, `WORKER_IP_PRIVATE`.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) KmsKeyName added in v6.67.0

'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) MachineType added in v6.67.0

The machine type to use for the job. Defaults to the value from the template if not specified.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) MaxWorkers added in v6.67.0

The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) Network added in v6.67.0

Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) NumWorkers added in v6.67.0

The initial number of Compute Engine instances for the job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) ServiceAccountEmail added in v6.67.0

The email address of the service account to run the job as.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) Subnetwork added in v6.67.0

Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) TempLocation added in v6.67.0

The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutputWithContext added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) WorkerRegion added in v6.67.0

The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) WorkerZone added in v6.67.0

The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentPtrOutput) Zone added in v6.67.0

The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence.

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput() PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs and PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersInput` via:

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs{...}

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) Environment added in v6.67.0

The runtime environment for the job. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#RuntimeEnvironment Structure is documented below.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) JobName added in v6.67.0

The job name to use for the created job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) Parameters added in v6.67.0

The runtime parameters to pass to the job. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) TransformNameMapping added in v6.67.0

Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutput) Update added in v6.67.0

If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput() PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs, PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtr and PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrInput` via:

        PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersArgs{...}

or:

        nil

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) Environment added in v6.67.0

The runtime environment for the job. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#RuntimeEnvironment Structure is documented below.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) JobName added in v6.67.0

The job name to use for the created job.

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) Parameters added in v6.67.0

The runtime parameters to pass to the job. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) TransformNameMapping added in v6.67.0

Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline. 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'

func (PipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersPtrOutput) Update added in v6.67.0

If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.

type PipelineWorkloadDataflowLaunchTemplateRequestOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) GcsPath added in v6.67.0

A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) LaunchParameters added in v6.67.0

The parameters of the template to launch. This should be part of the body of the POST request. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplateparameters Structure is documented below.

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) Location added in v6.67.0

The regional endpoint to which to direct the request.

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ProjectId added in v6.67.0

The ID of the Cloud Platform project that the job belongs to.

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestOutput added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestOutput() PipelineWorkloadDataflowLaunchTemplateRequestOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput() PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestOutput) ValidateOnly added in v6.67.0

(Optional)

type PipelineWorkloadDataflowLaunchTemplateRequestPtrInput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput() PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput
	ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext(context.Context) PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput
}

PipelineWorkloadDataflowLaunchTemplateRequestPtrInput is an input type that accepts PipelineWorkloadDataflowLaunchTemplateRequestArgs, PipelineWorkloadDataflowLaunchTemplateRequestPtr and PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput values. You can construct a concrete instance of `PipelineWorkloadDataflowLaunchTemplateRequestPtrInput` via:

        PipelineWorkloadDataflowLaunchTemplateRequestArgs{...}

or:

        nil

type PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput added in v6.67.0

type PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) GcsPath added in v6.67.0

A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) LaunchParameters added in v6.67.0

The parameters of the template to launch. This should be part of the body of the POST request. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplateparameters Structure is documented below.

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) Location added in v6.67.0

The regional endpoint to which to direct the request.

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ProjectId added in v6.67.0

The ID of the Cloud Platform project that the job belongs to.

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutput added in v6.67.0

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ToPipelineWorkloadDataflowLaunchTemplateRequestPtrOutputWithContext(ctx context.Context) PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput

func (PipelineWorkloadDataflowLaunchTemplateRequestPtrOutput) ValidateOnly added in v6.67.0

(Optional)

type PipelineWorkloadInput added in v6.67.0

type PipelineWorkloadInput interface {
	pulumi.Input

	ToPipelineWorkloadOutput() PipelineWorkloadOutput
	ToPipelineWorkloadOutputWithContext(context.Context) PipelineWorkloadOutput
}

PipelineWorkloadInput is an input type that accepts PipelineWorkloadArgs and PipelineWorkloadOutput values. You can construct a concrete instance of `PipelineWorkloadInput` via:

PipelineWorkloadArgs{...}

type PipelineWorkloadOutput added in v6.67.0

type PipelineWorkloadOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadOutput) DataflowFlexTemplateRequest added in v6.67.0

Template information and additional parameters needed to launch a Dataflow job using the flex launch API. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplaterequest Structure is documented below.

func (PipelineWorkloadOutput) DataflowLaunchTemplateRequest added in v6.67.0

Template information and additional parameters needed to launch a Dataflow job using the standard launch API. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplaterequest Structure is documented below.

func (PipelineWorkloadOutput) ElementType added in v6.67.0

func (PipelineWorkloadOutput) ElementType() reflect.Type

func (PipelineWorkloadOutput) ToOutput added in v6.67.0

func (PipelineWorkloadOutput) ToPipelineWorkloadOutput added in v6.67.0

func (o PipelineWorkloadOutput) ToPipelineWorkloadOutput() PipelineWorkloadOutput

func (PipelineWorkloadOutput) ToPipelineWorkloadOutputWithContext added in v6.67.0

func (o PipelineWorkloadOutput) ToPipelineWorkloadOutputWithContext(ctx context.Context) PipelineWorkloadOutput

func (PipelineWorkloadOutput) ToPipelineWorkloadPtrOutput added in v6.67.0

func (o PipelineWorkloadOutput) ToPipelineWorkloadPtrOutput() PipelineWorkloadPtrOutput

func (PipelineWorkloadOutput) ToPipelineWorkloadPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadOutput) ToPipelineWorkloadPtrOutputWithContext(ctx context.Context) PipelineWorkloadPtrOutput

type PipelineWorkloadPtrInput added in v6.67.0

type PipelineWorkloadPtrInput interface {
	pulumi.Input

	ToPipelineWorkloadPtrOutput() PipelineWorkloadPtrOutput
	ToPipelineWorkloadPtrOutputWithContext(context.Context) PipelineWorkloadPtrOutput
}

PipelineWorkloadPtrInput is an input type that accepts PipelineWorkloadArgs, PipelineWorkloadPtr and PipelineWorkloadPtrOutput values. You can construct a concrete instance of `PipelineWorkloadPtrInput` via:

        PipelineWorkloadArgs{...}

or:

        nil

func PipelineWorkloadPtr added in v6.67.0

func PipelineWorkloadPtr(v *PipelineWorkloadArgs) PipelineWorkloadPtrInput

type PipelineWorkloadPtrOutput added in v6.67.0

type PipelineWorkloadPtrOutput struct{ *pulumi.OutputState }

func (PipelineWorkloadPtrOutput) DataflowFlexTemplateRequest added in v6.67.0

Template information and additional parameters needed to launch a Dataflow job using the flex launch API. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchflextemplaterequest Structure is documented below.

func (PipelineWorkloadPtrOutput) DataflowLaunchTemplateRequest added in v6.67.0

Template information and additional parameters needed to launch a Dataflow job using the standard launch API. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#launchtemplaterequest Structure is documented below.

func (PipelineWorkloadPtrOutput) Elem added in v6.67.0

func (PipelineWorkloadPtrOutput) ElementType added in v6.67.0

func (PipelineWorkloadPtrOutput) ElementType() reflect.Type

func (PipelineWorkloadPtrOutput) ToOutput added in v6.67.0

func (PipelineWorkloadPtrOutput) ToPipelineWorkloadPtrOutput added in v6.67.0

func (o PipelineWorkloadPtrOutput) ToPipelineWorkloadPtrOutput() PipelineWorkloadPtrOutput

func (PipelineWorkloadPtrOutput) ToPipelineWorkloadPtrOutputWithContext added in v6.67.0

func (o PipelineWorkloadPtrOutput) ToPipelineWorkloadPtrOutputWithContext(ctx context.Context) PipelineWorkloadPtrOutput

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL