v1alpha1

package
v0.0.0-...-30237b5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 20, 2023 License: Apache-2.0 Imports: 9 Imported by: 0

Documentation

Overview

+kubebuilder:object:generate=true +groupName=data.mongodbatlas.crossplane.io +versionName=v1alpha1

Index

Constants

View Source
const (
	CRDGroup   = "data.mongodbatlas.crossplane.io"
	CRDVersion = "v1alpha1"
)

Package type metadata.

Variables

View Source
var (
	// CRDGroupVersion is the API Group Version used to register the objects
	CRDGroupVersion = schema.GroupVersion{Group: CRDGroup, Version: CRDVersion}

	// SchemeBuilder is used to add go types to the GroupVersionKind scheme
	SchemeBuilder = &scheme.Builder{GroupVersion: CRDGroupVersion}

	// AddToScheme adds the types in this group-version to the given scheme.
	AddToScheme = SchemeBuilder.AddToScheme
)
View Source
var (
	Lake_Kind             = "Lake"
	Lake_GroupKind        = schema.GroupKind{Group: CRDGroup, Kind: Lake_Kind}.String()
	Lake_KindAPIVersion   = Lake_Kind + "." + CRDGroupVersion.String()
	Lake_GroupVersionKind = CRDGroupVersion.WithKind(Lake_Kind)
)

Repository type metadata.

View Source
var (
	LakePipeline_Kind             = "LakePipeline"
	LakePipeline_GroupKind        = schema.GroupKind{Group: CRDGroup, Kind: LakePipeline_Kind}.String()
	LakePipeline_KindAPIVersion   = LakePipeline_Kind + "." + CRDGroupVersion.String()
	LakePipeline_GroupVersionKind = CRDGroupVersion.WithKind(LakePipeline_Kind)
)

Repository type metadata.

Functions

This section is empty.

Types

type AwsObservation

type AwsObservation struct {

	// Unique identifier associated with the IAM Role that Data Lake assumes when accessing the data stores.
	ExternalID *string `json:"externalId,omitempty" tf:"external_id,omitempty"`

	// Amazon Resource Name (ARN) of the IAM Role that Data Lake assumes when accessing S3 Bucket data stores. The IAM Role must support the following actions against each S3 bucket:
	IAMAssumedRoleArn *string `json:"iamAssumedRoleArn,omitempty" tf:"iam_assumed_role_arn,omitempty"`

	// Amazon Resource Name (ARN) of the user that Data Lake assumes when accessing S3 Bucket data stores.
	IAMUserArn *string `json:"iamUserArn,omitempty" tf:"iam_user_arn,omitempty"`

	// Unique identifier of the role that Data Lake can use to access the data stores. If necessary, use the Atlas UI or API to retrieve the role ID. You must also specify the aws.0.test_s3_bucket.
	RoleID *string `json:"roleId,omitempty" tf:"role_id,omitempty"`

	// Name of the S3 data bucket that the provided role ID is authorized to access. You must also specify the aws.0.role_id.
	TestS3Bucket *string `json:"testS3Bucket,omitempty" tf:"test_s3_bucket,omitempty"`
}

func (*AwsObservation) DeepCopy

func (in *AwsObservation) DeepCopy() *AwsObservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsObservation.

func (*AwsObservation) DeepCopyInto

func (in *AwsObservation) DeepCopyInto(out *AwsObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type AwsParameters

type AwsParameters struct {

	// Unique identifier of the role that Data Lake can use to access the data stores. If necessary, use the Atlas UI or API to retrieve the role ID. You must also specify the aws.0.test_s3_bucket.
	// +kubebuilder:validation:Required
	RoleID *string `json:"roleId" tf:"role_id,omitempty"`

	// Name of the S3 data bucket that the provided role ID is authorized to access. You must also specify the aws.0.role_id.
	// +kubebuilder:validation:Required
	TestS3Bucket *string `json:"testS3Bucket" tf:"test_s3_bucket,omitempty"`
}

func (*AwsParameters) DeepCopy

func (in *AwsParameters) DeepCopy() *AwsParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsParameters.

func (*AwsParameters) DeepCopyInto

func (in *AwsParameters) DeepCopyInto(out *AwsParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type CollectionsObservation

type CollectionsObservation struct {

	// Array of objects where each object represents a stores data store to map with the collection.
	DataSources []DataSourcesObservation `json:"dataSources,omitempty" tf:"data_sources,omitempty"`

	// Name of the data store.
	Name *string `json:"name,omitempty" tf:"name,omitempty"`
}

func (*CollectionsObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CollectionsObservation.

func (*CollectionsObservation) DeepCopyInto

func (in *CollectionsObservation) DeepCopyInto(out *CollectionsObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type CollectionsParameters

type CollectionsParameters struct {
}

func (*CollectionsParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CollectionsParameters.

func (*CollectionsParameters) DeepCopyInto

func (in *CollectionsParameters) DeepCopyInto(out *CollectionsParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataProcessRegionObservation

type DataProcessRegionObservation struct {

	// Name of the cloud service provider. Atlas Data Lake only supports AWS.
	CloudProvider *string `json:"cloudProvider,omitempty" tf:"cloud_provider,omitempty"`

	// Name of the AWS region in which the S3 bucket is hosted.
	Region *string `json:"region,omitempty" tf:"region,omitempty"`
}

func (*DataProcessRegionObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProcessRegionObservation.

func (*DataProcessRegionObservation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataProcessRegionParameters

type DataProcessRegionParameters struct {

	// Name of the cloud service provider. Atlas Data Lake only supports AWS.
	// +kubebuilder:validation:Required
	CloudProvider *string `json:"cloudProvider" tf:"cloud_provider,omitempty"`

	// Name of the AWS region in which the S3 bucket is hosted.
	// +kubebuilder:validation:Required
	Region *string `json:"region" tf:"region,omitempty"`
}

func (*DataProcessRegionParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProcessRegionParameters.

func (*DataProcessRegionParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataSourcesObservation

type DataSourcesObservation struct {

	// Default format that Data Lake assumes if it encounters a file without an extension while searching the storeName.
	DefaultFormat *string `json:"defaultFormat,omitempty" tf:"default_format,omitempty"`

	// Controls how Atlas Data Lake searches for and parses files in the storeName before mapping them to the <collection>.
	Path *string `json:"path,omitempty" tf:"path,omitempty"`

	// Name of a data store to map to the <collection>. Must match the name of an object in the stores array.
	StoreName *string `json:"storeName,omitempty" tf:"store_name,omitempty"`
}

func (*DataSourcesObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourcesObservation.

func (*DataSourcesObservation) DeepCopyInto

func (in *DataSourcesObservation) DeepCopyInto(out *DataSourcesObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type DataSourcesParameters

type DataSourcesParameters struct {
}

func (*DataSourcesParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourcesParameters.

func (*DataSourcesParameters) DeepCopyInto

func (in *DataSourcesParameters) DeepCopyInto(out *DataSourcesParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type IngestionSchedulesObservation

type IngestionSchedulesObservation struct {

	// Number that indicates the frequency interval for a set of snapshots.
	FrequencyInterval *float64 `json:"frequencyInterval,omitempty" tf:"frequency_interval,omitempty"`

	// Human-readable label that identifies how often this snapshot triggers.
	FrequencyType *string `json:"frequencyType,omitempty" tf:"frequency_type,omitempty"`

	// Unique 24-hexadecimal digit string that identifies the snapshot.
	ID *string `json:"id,omitempty" tf:"id,omitempty"`

	// Unit of time in which MongoDB Atlas measures snapshot retention.
	RetentionUnit *string `json:"retentionUnit,omitempty" tf:"retention_unit,omitempty"`

	// Duration in days, weeks, or months that MongoDB Atlas retains the snapshot.
	RetentionValue *float64 `json:"retentionValue,omitempty" tf:"retention_value,omitempty"`
}

func (*IngestionSchedulesObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new IngestionSchedulesObservation.

func (*IngestionSchedulesObservation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type IngestionSchedulesParameters

type IngestionSchedulesParameters struct {
}

func (*IngestionSchedulesParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new IngestionSchedulesParameters.

func (*IngestionSchedulesParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type Lake

type Lake struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	// +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.aws)",message="aws is a required parameter"
	// +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.projectId)",message="projectId is a required parameter"
	Spec   LakeSpec   `json:"spec"`
	Status LakeStatus `json:"status,omitempty"`
}

Lake is the Schema for the Lakes API. Provides a Data Lake resource. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,mongodbatlas}

func (*Lake) DeepCopy

func (in *Lake) DeepCopy() *Lake

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Lake.

func (*Lake) DeepCopyInto

func (in *Lake) DeepCopyInto(out *Lake)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Lake) DeepCopyObject

func (in *Lake) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Lake) GetCondition

func (mg *Lake) GetCondition(ct xpv1.ConditionType) xpv1.Condition

GetCondition of this Lake.

func (*Lake) GetConnectionDetailsMapping

func (tr *Lake) GetConnectionDetailsMapping() map[string]string

GetConnectionDetailsMapping for this Lake

func (*Lake) GetDeletionPolicy

func (mg *Lake) GetDeletionPolicy() xpv1.DeletionPolicy

GetDeletionPolicy of this Lake.

func (*Lake) GetID

func (tr *Lake) GetID() string

GetID returns ID of underlying Terraform resource of this Lake

func (*Lake) GetManagementPolicy

func (mg *Lake) GetManagementPolicy() xpv1.ManagementPolicy

GetManagementPolicy of this Lake.

func (*Lake) GetObservation

func (tr *Lake) GetObservation() (map[string]any, error)

GetObservation of this Lake

func (*Lake) GetParameters

func (tr *Lake) GetParameters() (map[string]any, error)

GetParameters of this Lake

func (*Lake) GetProviderConfigReference

func (mg *Lake) GetProviderConfigReference() *xpv1.Reference

GetProviderConfigReference of this Lake.

func (*Lake) GetProviderReference

func (mg *Lake) GetProviderReference() *xpv1.Reference

GetProviderReference of this Lake. Deprecated: Use GetProviderConfigReference.

func (*Lake) GetPublishConnectionDetailsTo

func (mg *Lake) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo

GetPublishConnectionDetailsTo of this Lake.

func (*Lake) GetTerraformResourceType

func (mg *Lake) GetTerraformResourceType() string

GetTerraformResourceType returns Terraform resource type for this Lake

func (*Lake) GetTerraformSchemaVersion

func (tr *Lake) GetTerraformSchemaVersion() int

GetTerraformSchemaVersion returns the associated Terraform schema version

func (*Lake) GetWriteConnectionSecretToReference

func (mg *Lake) GetWriteConnectionSecretToReference() *xpv1.SecretReference

GetWriteConnectionSecretToReference of this Lake.

func (*Lake) LateInitialize

func (tr *Lake) LateInitialize(attrs []byte) (bool, error)

LateInitialize this Lake using its observed tfState. returns True if there are any spec changes for the resource.

func (*Lake) SetConditions

func (mg *Lake) SetConditions(c ...xpv1.Condition)

SetConditions of this Lake.

func (*Lake) SetDeletionPolicy

func (mg *Lake) SetDeletionPolicy(r xpv1.DeletionPolicy)

SetDeletionPolicy of this Lake.

func (*Lake) SetManagementPolicy

func (mg *Lake) SetManagementPolicy(r xpv1.ManagementPolicy)

SetManagementPolicy of this Lake.

func (*Lake) SetObservation

func (tr *Lake) SetObservation(obs map[string]any) error

SetObservation for this Lake

func (*Lake) SetParameters

func (tr *Lake) SetParameters(params map[string]any) error

SetParameters for this Lake

func (*Lake) SetProviderConfigReference

func (mg *Lake) SetProviderConfigReference(r *xpv1.Reference)

SetProviderConfigReference of this Lake.

func (*Lake) SetProviderReference

func (mg *Lake) SetProviderReference(r *xpv1.Reference)

SetProviderReference of this Lake. Deprecated: Use SetProviderConfigReference.

func (*Lake) SetPublishConnectionDetailsTo

func (mg *Lake) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)

SetPublishConnectionDetailsTo of this Lake.

func (*Lake) SetWriteConnectionSecretToReference

func (mg *Lake) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)

SetWriteConnectionSecretToReference of this Lake.

type LakeList

type LakeList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	Items           []Lake `json:"items"`
}

LakeList contains a list of Lakes

func (*LakeList) DeepCopy

func (in *LakeList) DeepCopy() *LakeList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakeList.

func (*LakeList) DeepCopyInto

func (in *LakeList) DeepCopyInto(out *LakeList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LakeList) DeepCopyObject

func (in *LakeList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LakeList) GetItems

func (l *LakeList) GetItems() []resource.Managed

GetItems of this LakeList.

type LakeObservation

type LakeObservation struct {

	// AWS provider of the cloud service where Data Lake can access the S3 Bucket.
	Aws []AwsObservation `json:"aws,omitempty" tf:"aws,omitempty"`

	// The cloud provider region to which Atlas Data Lake routes client connections for data processing. Set to null to direct Atlas Data Lake to route client connections to the region nearest to the client based on DNS resolution.
	DataProcessRegion []DataProcessRegionObservation `json:"dataProcessRegion,omitempty" tf:"data_process_region,omitempty"`

	// The list of hostnames assigned to the Atlas Data Lake. Each string in the array is a hostname assigned to the Atlas Data Lake.
	Hostnames []*string `json:"hostnames,omitempty" tf:"hostnames,omitempty"`

	ID *string `json:"id,omitempty" tf:"id,omitempty"`

	// The unique ID for the project to create a data lake.
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`

	// Current state of the Atlas Data Lake:
	State *string `json:"state,omitempty" tf:"state,omitempty"`

	// Configuration details for mapping each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see databases. An empty object indicates that the Data Lake has no mapping configuration for any data store.
	StorageDatabases []StorageDatabasesObservation `json:"storageDatabases,omitempty" tf:"storage_databases,omitempty"`

	// Each object in the array represents a data store. Data Lake uses the storage.databases configuration details to map data in each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see stores. An empty object indicates that the Data Lake has no configured data stores.
	StorageStores []StorageStoresObservation `json:"storageStores,omitempty" tf:"storage_stores,omitempty"`
}

func (*LakeObservation) DeepCopy

func (in *LakeObservation) DeepCopy() *LakeObservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakeObservation.

func (*LakeObservation) DeepCopyInto

func (in *LakeObservation) DeepCopyInto(out *LakeObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakeParameters

type LakeParameters struct {

	// AWS provider of the cloud service where Data Lake can access the S3 Bucket.
	// +kubebuilder:validation:Optional
	Aws []AwsParameters `json:"aws,omitempty" tf:"aws,omitempty"`

	// The cloud provider region to which Atlas Data Lake routes client connections for data processing. Set to null to direct Atlas Data Lake to route client connections to the region nearest to the client based on DNS resolution.
	// +kubebuilder:validation:Optional
	DataProcessRegion []DataProcessRegionParameters `json:"dataProcessRegion,omitempty" tf:"data_process_region,omitempty"`

	// The unique ID for the project to create a data lake.
	// +kubebuilder:validation:Optional
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`
}

func (*LakeParameters) DeepCopy

func (in *LakeParameters) DeepCopy() *LakeParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakeParameters.

func (*LakeParameters) DeepCopyInto

func (in *LakeParameters) DeepCopyInto(out *LakeParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakePipeline

type LakePipeline struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata,omitempty"`
	// +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.projectId)",message="projectId is a required parameter"
	Spec   LakePipelineSpec   `json:"spec"`
	Status LakePipelineStatus `json:"status,omitempty"`
}

LakePipeline is the Schema for the LakePipelines API. Provides a Data Lake Pipeline resource. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,mongodbatlas}

func (*LakePipeline) DeepCopy

func (in *LakePipeline) DeepCopy() *LakePipeline

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipeline.

func (*LakePipeline) DeepCopyInto

func (in *LakePipeline) DeepCopyInto(out *LakePipeline)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LakePipeline) DeepCopyObject

func (in *LakePipeline) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LakePipeline) GetCondition

func (mg *LakePipeline) GetCondition(ct xpv1.ConditionType) xpv1.Condition

GetCondition of this LakePipeline.

func (*LakePipeline) GetConnectionDetailsMapping

func (tr *LakePipeline) GetConnectionDetailsMapping() map[string]string

GetConnectionDetailsMapping for this LakePipeline

func (*LakePipeline) GetDeletionPolicy

func (mg *LakePipeline) GetDeletionPolicy() xpv1.DeletionPolicy

GetDeletionPolicy of this LakePipeline.

func (*LakePipeline) GetID

func (tr *LakePipeline) GetID() string

GetID returns ID of underlying Terraform resource of this LakePipeline

func (*LakePipeline) GetManagementPolicy

func (mg *LakePipeline) GetManagementPolicy() xpv1.ManagementPolicy

GetManagementPolicy of this LakePipeline.

func (*LakePipeline) GetObservation

func (tr *LakePipeline) GetObservation() (map[string]any, error)

GetObservation of this LakePipeline

func (*LakePipeline) GetParameters

func (tr *LakePipeline) GetParameters() (map[string]any, error)

GetParameters of this LakePipeline

func (*LakePipeline) GetProviderConfigReference

func (mg *LakePipeline) GetProviderConfigReference() *xpv1.Reference

GetProviderConfigReference of this LakePipeline.

func (*LakePipeline) GetProviderReference

func (mg *LakePipeline) GetProviderReference() *xpv1.Reference

GetProviderReference of this LakePipeline. Deprecated: Use GetProviderConfigReference.

func (*LakePipeline) GetPublishConnectionDetailsTo

func (mg *LakePipeline) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo

GetPublishConnectionDetailsTo of this LakePipeline.

func (*LakePipeline) GetTerraformResourceType

func (mg *LakePipeline) GetTerraformResourceType() string

GetTerraformResourceType returns Terraform resource type for this LakePipeline

func (*LakePipeline) GetTerraformSchemaVersion

func (tr *LakePipeline) GetTerraformSchemaVersion() int

GetTerraformSchemaVersion returns the associated Terraform schema version

func (*LakePipeline) GetWriteConnectionSecretToReference

func (mg *LakePipeline) GetWriteConnectionSecretToReference() *xpv1.SecretReference

GetWriteConnectionSecretToReference of this LakePipeline.

func (*LakePipeline) LateInitialize

func (tr *LakePipeline) LateInitialize(attrs []byte) (bool, error)

LateInitialize this LakePipeline using its observed tfState. returns True if there are any spec changes for the resource.

func (*LakePipeline) SetConditions

func (mg *LakePipeline) SetConditions(c ...xpv1.Condition)

SetConditions of this LakePipeline.

func (*LakePipeline) SetDeletionPolicy

func (mg *LakePipeline) SetDeletionPolicy(r xpv1.DeletionPolicy)

SetDeletionPolicy of this LakePipeline.

func (*LakePipeline) SetManagementPolicy

func (mg *LakePipeline) SetManagementPolicy(r xpv1.ManagementPolicy)

SetManagementPolicy of this LakePipeline.

func (*LakePipeline) SetObservation

func (tr *LakePipeline) SetObservation(obs map[string]any) error

SetObservation for this LakePipeline

func (*LakePipeline) SetParameters

func (tr *LakePipeline) SetParameters(params map[string]any) error

SetParameters for this LakePipeline

func (*LakePipeline) SetProviderConfigReference

func (mg *LakePipeline) SetProviderConfigReference(r *xpv1.Reference)

SetProviderConfigReference of this LakePipeline.

func (*LakePipeline) SetProviderReference

func (mg *LakePipeline) SetProviderReference(r *xpv1.Reference)

SetProviderReference of this LakePipeline. Deprecated: Use SetProviderConfigReference.

func (*LakePipeline) SetPublishConnectionDetailsTo

func (mg *LakePipeline) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)

SetPublishConnectionDetailsTo of this LakePipeline.

func (*LakePipeline) SetWriteConnectionSecretToReference

func (mg *LakePipeline) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)

SetWriteConnectionSecretToReference of this LakePipeline.

type LakePipelineList

type LakePipelineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	Items           []LakePipeline `json:"items"`
}

LakePipelineList contains a list of LakePipelines

func (*LakePipelineList) DeepCopy

func (in *LakePipelineList) DeepCopy() *LakePipelineList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipelineList.

func (*LakePipelineList) DeepCopyInto

func (in *LakePipelineList) DeepCopyInto(out *LakePipelineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LakePipelineList) DeepCopyObject

func (in *LakePipelineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LakePipelineList) GetItems

func (l *LakePipelineList) GetItems() []resource.Managed

GetItems of this LakePipelineList.

type LakePipelineObservation

type LakePipelineObservation struct {

	// Timestamp that indicates when the Data Lake Pipeline was created.
	CreatedDate *string `json:"createdDate,omitempty" tf:"created_date,omitempty"`

	// Unique 24-hexadecimal digit string that identifies the Data Lake Pipeline.
	ID *string `json:"id,omitempty" tf:"id,omitempty"`

	// List of backup schedule policy items that you can use as a Data Lake Pipeline source.
	IngestionSchedules []IngestionSchedulesObservation `json:"ingestionSchedules,omitempty" tf:"ingestion_schedules,omitempty"`

	// Timestamp that indicates the last time that the Data Lake Pipeline was updated.
	LastUpdatedDate *string `json:"lastUpdatedDate,omitempty" tf:"last_updated_date,omitempty"`

	// The unique ID for the project to create a data lake pipeline.
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`

	Sink []SinkObservation `json:"sink,omitempty" tf:"sink,omitempty"`

	// List of backup snapshots that you can use to trigger an on demand pipeline run.
	Snapshots []SnapshotsObservation `json:"snapshots,omitempty" tf:"snapshots,omitempty"`

	Source []SourceObservation `json:"source,omitempty" tf:"source,omitempty"`

	// State of this Data Lake Pipeline.
	State *string `json:"state,omitempty" tf:"state,omitempty"`

	// Fields to be excluded for this Data Lake Pipeline.
	Transformations []TransformationsObservation `json:"transformations,omitempty" tf:"transformations,omitempty"`
}

func (*LakePipelineObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipelineObservation.

func (*LakePipelineObservation) DeepCopyInto

func (in *LakePipelineObservation) DeepCopyInto(out *LakePipelineObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakePipelineParameters

type LakePipelineParameters struct {

	// The unique ID for the project to create a data lake pipeline.
	// +kubebuilder:validation:Optional
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`

	// +kubebuilder:validation:Optional
	Sink []SinkParameters `json:"sink,omitempty" tf:"sink,omitempty"`

	// +kubebuilder:validation:Optional
	Source []SourceParameters `json:"source,omitempty" tf:"source,omitempty"`

	// Fields to be excluded for this Data Lake Pipeline.
	// +kubebuilder:validation:Optional
	Transformations []TransformationsParameters `json:"transformations,omitempty" tf:"transformations,omitempty"`
}

func (*LakePipelineParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipelineParameters.

func (*LakePipelineParameters) DeepCopyInto

func (in *LakePipelineParameters) DeepCopyInto(out *LakePipelineParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakePipelineSpec

type LakePipelineSpec struct {
	v1.ResourceSpec `json:",inline"`
	ForProvider     LakePipelineParameters `json:"forProvider"`
}

LakePipelineSpec defines the desired state of LakePipeline

func (*LakePipelineSpec) DeepCopy

func (in *LakePipelineSpec) DeepCopy() *LakePipelineSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipelineSpec.

func (*LakePipelineSpec) DeepCopyInto

func (in *LakePipelineSpec) DeepCopyInto(out *LakePipelineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakePipelineStatus

type LakePipelineStatus struct {
	v1.ResourceStatus `json:",inline"`
	AtProvider        LakePipelineObservation `json:"atProvider,omitempty"`
}

LakePipelineStatus defines the observed state of LakePipeline.

func (*LakePipelineStatus) DeepCopy

func (in *LakePipelineStatus) DeepCopy() *LakePipelineStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakePipelineStatus.

func (*LakePipelineStatus) DeepCopyInto

func (in *LakePipelineStatus) DeepCopyInto(out *LakePipelineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakeSpec

type LakeSpec struct {
	v1.ResourceSpec `json:",inline"`
	ForProvider     LakeParameters `json:"forProvider"`
}

LakeSpec defines the desired state of Lake

func (*LakeSpec) DeepCopy

func (in *LakeSpec) DeepCopy() *LakeSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakeSpec.

func (*LakeSpec) DeepCopyInto

func (in *LakeSpec) DeepCopyInto(out *LakeSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type LakeStatus

type LakeStatus struct {
	v1.ResourceStatus `json:",inline"`
	AtProvider        LakeObservation `json:"atProvider,omitempty"`
}

LakeStatus defines the observed state of Lake.

func (*LakeStatus) DeepCopy

func (in *LakeStatus) DeepCopy() *LakeStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LakeStatus.

func (*LakeStatus) DeepCopyInto

func (in *LakeStatus) DeepCopyInto(out *LakeStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type PartitionFieldsObservation

type PartitionFieldsObservation struct {

	// Human-readable label that identifies the field name used to partition data.
	FieldName *string `json:"fieldName,omitempty" tf:"field_name,omitempty"`

	// Sequence in which MongoDB Atlas slices the collection data to create partitions. The resource expresses this sequence starting with zero.
	Order *float64 `json:"order,omitempty" tf:"order,omitempty"`
}

func (*PartitionFieldsObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionFieldsObservation.

func (*PartitionFieldsObservation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type PartitionFieldsParameters

type PartitionFieldsParameters struct {

	// Human-readable label that identifies the field name used to partition data.
	// +kubebuilder:validation:Required
	FieldName *string `json:"fieldName" tf:"field_name,omitempty"`

	// Sequence in which MongoDB Atlas slices the collection data to create partitions. The resource expresses this sequence starting with zero.
	// +kubebuilder:validation:Required
	Order *float64 `json:"order" tf:"order,omitempty"`
}

func (*PartitionFieldsParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionFieldsParameters.

func (*PartitionFieldsParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SinkObservation

type SinkObservation struct {

	// Ordered fields used to physically organize data in the destination.
	PartitionFields []PartitionFieldsObservation `json:"partitionFields,omitempty" tf:"partition_fields,omitempty"`

	// Human-readable label that identifies the cloud provider that stores this snapshot.
	Provider *string `json:"provider,omitempty" tf:"provider,omitempty"`

	// Target cloud provider region for this Data Lake Pipeline. Supported cloud provider regions.
	Region *string `json:"region,omitempty" tf:"region,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*SinkObservation) DeepCopy

func (in *SinkObservation) DeepCopy() *SinkObservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SinkObservation.

func (*SinkObservation) DeepCopyInto

func (in *SinkObservation) DeepCopyInto(out *SinkObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SinkParameters

type SinkParameters struct {

	// Ordered fields used to physically organize data in the destination.
	// +kubebuilder:validation:Optional
	PartitionFields []PartitionFieldsParameters `json:"partitionFields,omitempty" tf:"partition_fields,omitempty"`

	// Human-readable label that identifies the cloud provider that stores this snapshot.
	// +kubebuilder:validation:Optional
	Provider *string `json:"provider,omitempty" tf:"provider,omitempty"`

	// Target cloud provider region for this Data Lake Pipeline. Supported cloud provider regions.
	// +kubebuilder:validation:Optional
	Region *string `json:"region,omitempty" tf:"region,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	// +kubebuilder:validation:Optional
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*SinkParameters) DeepCopy

func (in *SinkParameters) DeepCopy() *SinkParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SinkParameters.

func (*SinkParameters) DeepCopyInto

func (in *SinkParameters) DeepCopyInto(out *SinkParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SnapshotsObservation

type SnapshotsObservation struct {

	// List that identifies the regions to which MongoDB Atlas copies the snapshot.
	CopyRegion *string `json:"copyRegion,omitempty" tf:"copy_region,omitempty"`

	// Date and time when MongoDB Atlas took the snapshot.
	CreatedAt *string `json:"createdAt,omitempty" tf:"created_at,omitempty"`

	// Date and time when MongoDB Atlas deletes the snapshot.
	ExpiresAt *string `json:"expiresAt,omitempty" tf:"expires_at,omitempty"`

	FrequencyYype *string `json:"frequencyYype,omitempty" tf:"frequency_yype,omitempty"`

	// Unique 24-hexadecimal digit string that identifies the snapshot.
	ID *string `json:"id,omitempty" tf:"id,omitempty"`

	// Unique string that identifies the Amazon Web Services (AWS) Key Management Service (KMS) Customer Master Key (CMK) used to encrypt the snapshot.
	MasterKey *string `json:"masterKey,omitempty" tf:"master_key,omitempty"`

	// Version of the MongoDB host that this snapshot backs up.
	MongodVersion *string `json:"mongodVersion,omitempty" tf:"mongod_version,omitempty"`

	// List that contains unique identifiers for the policy items.
	Policies []*string `json:"policies,omitempty" tf:"policies,omitempty"`

	// Human-readable label that identifies the cloud provider that stores this snapshot.
	Provider *string `json:"provider,omitempty" tf:"provider,omitempty"`

	// Human-readable label that identifies the replica set from which MongoDB Atlas took this snapshot.
	ReplicaSetName *string `json:"replicaSetName,omitempty" tf:"replica_set_name,omitempty"`

	// List of backup snapshots that you can use to trigger an on demand pipeline run.
	Size *float64 `json:"size,omitempty" tf:"size,omitempty"`

	// Human-readable label that identifies when this snapshot triggers.
	SnapshotType *string `json:"snapshotType,omitempty" tf:"snapshot_type,omitempty"`

	// Human-readable label that indicates the stage of the backup process for this snapshot.
	Status *string `json:"status,omitempty" tf:"status,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*SnapshotsObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SnapshotsObservation.

func (*SnapshotsObservation) DeepCopyInto

func (in *SnapshotsObservation) DeepCopyInto(out *SnapshotsObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SnapshotsParameters

type SnapshotsParameters struct {
}

func (*SnapshotsParameters) DeepCopy

func (in *SnapshotsParameters) DeepCopy() *SnapshotsParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SnapshotsParameters.

func (*SnapshotsParameters) DeepCopyInto

func (in *SnapshotsParameters) DeepCopyInto(out *SnapshotsParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SourceObservation

type SourceObservation struct {

	// Human-readable name that identifies the cluster.
	ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"`

	// Human-readable name that identifies the collection.
	CollectionName *string `json:"collectionName,omitempty" tf:"collection_name,omitempty"`

	// Human-readable name that identifies the database.
	DatabaseName *string `json:"databaseName,omitempty" tf:"database_name,omitempty"`

	// Unique 24-hexadecimal digit string that identifies the Data Lake Pipeline.
	PolicyItemID *string `json:"policyItemId,omitempty" tf:"policy_item_id,omitempty"`

	// The unique ID for the project to create a data lake pipeline.
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*SourceObservation) DeepCopy

func (in *SourceObservation) DeepCopy() *SourceObservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceObservation.

func (*SourceObservation) DeepCopyInto

func (in *SourceObservation) DeepCopyInto(out *SourceObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type SourceParameters

type SourceParameters struct {

	// Human-readable name that identifies the cluster.
	// +kubebuilder:validation:Optional
	ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"`

	// Human-readable name that identifies the collection.
	// +kubebuilder:validation:Optional
	CollectionName *string `json:"collectionName,omitempty" tf:"collection_name,omitempty"`

	// Human-readable name that identifies the database.
	// +kubebuilder:validation:Optional
	DatabaseName *string `json:"databaseName,omitempty" tf:"database_name,omitempty"`

	// Unique 24-hexadecimal digit string that identifies the Data Lake Pipeline.
	// +kubebuilder:validation:Optional
	PolicyItemID *string `json:"policyItemId,omitempty" tf:"policy_item_id,omitempty"`

	// The unique ID for the project to create a data lake pipeline.
	// +kubebuilder:validation:Optional
	ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	// +kubebuilder:validation:Optional
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*SourceParameters) DeepCopy

func (in *SourceParameters) DeepCopy() *SourceParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceParameters.

func (*SourceParameters) DeepCopyInto

func (in *SourceParameters) DeepCopyInto(out *SourceParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type StorageDatabasesObservation

type StorageDatabasesObservation struct {

	// Array of objects where each object represents a collection and data sources that map to a stores data store.
	Collections []CollectionsObservation `json:"collections,omitempty" tf:"collections,omitempty"`

	MaxWildcardCollections *float64 `json:"maxWildcardCollections,omitempty" tf:"max_wildcard_collections,omitempty"`

	// Name of the data store.
	Name *string `json:"name,omitempty" tf:"name,omitempty"`

	// Array of objects where each object represents an aggregation pipeline on a collection. To learn more about views, see Views.
	Views []ViewsObservation `json:"views,omitempty" tf:"views,omitempty"`
}

func (*StorageDatabasesObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageDatabasesObservation.

func (*StorageDatabasesObservation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type StorageDatabasesParameters

type StorageDatabasesParameters struct {
}

func (*StorageDatabasesParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageDatabasesParameters.

func (*StorageDatabasesParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type StorageStoresObservation

type StorageStoresObservation struct {
	AdditionalStorageClasses []*string `json:"additionalStorageClasses,omitempty" tf:"additional_storage_classes,omitempty"`

	// Name of the AWS S3 bucket.
	Bucket *string `json:"bucket,omitempty" tf:"bucket,omitempty"`

	// The delimiter that separates storage_databases.#.collections.#.data_sources.#.path segments in the data store.
	Delimiter *string `json:"delimiter,omitempty" tf:"delimiter,omitempty"`

	// Determines whether or not to use S3 tags on the files in the given path as additional partition attributes.
	IncludeTags *bool `json:"includeTags,omitempty" tf:"include_tags,omitempty"`

	// Name of the data store.
	Name *string `json:"name,omitempty" tf:"name,omitempty"`

	// Prefix Data Lake applies when searching for files in the S3 bucket .
	Prefix *string `json:"prefix,omitempty" tf:"prefix,omitempty"`

	// Defines where the data is stored.
	Provider *string `json:"provider,omitempty" tf:"provider,omitempty"`

	// Name of the AWS region in which the S3 bucket is hosted.
	Region *string `json:"region,omitempty" tf:"region,omitempty"`
}

func (*StorageStoresObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageStoresObservation.

func (*StorageStoresObservation) DeepCopyInto

func (in *StorageStoresObservation) DeepCopyInto(out *StorageStoresObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type StorageStoresParameters

type StorageStoresParameters struct {
}

func (*StorageStoresParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageStoresParameters.

func (*StorageStoresParameters) DeepCopyInto

func (in *StorageStoresParameters) DeepCopyInto(out *StorageStoresParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TransformationsObservation

type TransformationsObservation struct {

	// Key in the document.
	Field *string `json:"field,omitempty" tf:"field,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*TransformationsObservation) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TransformationsObservation.

func (*TransformationsObservation) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type TransformationsParameters

type TransformationsParameters struct {

	// Key in the document.
	// +kubebuilder:validation:Optional
	Field *string `json:"field,omitempty" tf:"field,omitempty"`

	// Type of ingestion destination of this Data Lake Pipeline.
	// +kubebuilder:validation:Optional
	Type *string `json:"type,omitempty" tf:"type,omitempty"`
}

func (*TransformationsParameters) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TransformationsParameters.

func (*TransformationsParameters) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ViewsObservation

type ViewsObservation struct {

	// Name of the data store.
	Name *string `json:"name,omitempty" tf:"name,omitempty"`

	// Aggregation pipeline stage(s) to apply to the source collection.
	Pipeline *string `json:"pipeline,omitempty" tf:"pipeline,omitempty"`

	// Name of the source collection for the view.
	Source *string `json:"source,omitempty" tf:"source,omitempty"`
}

func (*ViewsObservation) DeepCopy

func (in *ViewsObservation) DeepCopy() *ViewsObservation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewsObservation.

func (*ViewsObservation) DeepCopyInto

func (in *ViewsObservation) DeepCopyInto(out *ViewsObservation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

type ViewsParameters

type ViewsParameters struct {
}

func (*ViewsParameters) DeepCopy

func (in *ViewsParameters) DeepCopy() *ViewsParameters

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewsParameters.

func (*ViewsParameters) DeepCopyInto

func (in *ViewsParameters) DeepCopyInto(out *ViewsParameters)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL