Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	DataSourceParameter_Type_name = map[int32]string{
		0: "TYPE_UNSPECIFIED",
		1: "STRING",
		2: "INTEGER",
		3: "DOUBLE",
		4: "BOOLEAN",
		5: "RECORD",
		6: "PLUS_PAGE",
	}
	DataSourceParameter_Type_value = map[string]int32{
		"TYPE_UNSPECIFIED": 0,
		"STRING":           1,
		"INTEGER":          2,
		"DOUBLE":           3,
		"BOOLEAN":          4,
		"RECORD":           5,
		"PLUS_PAGE":        6,
	}
)

Enum value maps for DataSourceParameter_Type.

View Source
var (
	DataSource_AuthorizationType_name = map[int32]string{
		0: "AUTHORIZATION_TYPE_UNSPECIFIED",
		1: "AUTHORIZATION_CODE",
		2: "GOOGLE_PLUS_AUTHORIZATION_CODE",
		3: "FIRST_PARTY_OAUTH",
	}
	DataSource_AuthorizationType_value = map[string]int32{
		"AUTHORIZATION_TYPE_UNSPECIFIED": 0,
		"AUTHORIZATION_CODE":             1,
		"GOOGLE_PLUS_AUTHORIZATION_CODE": 2,
		"FIRST_PARTY_OAUTH":              3,
	}
)

Enum value maps for DataSource_AuthorizationType.

View Source
var (
	DataSource_DataRefreshType_name = map[int32]string{
		0: "DATA_REFRESH_TYPE_UNSPECIFIED",
		1: "SLIDING_WINDOW",
		2: "CUSTOM_SLIDING_WINDOW",
	}
	DataSource_DataRefreshType_value = map[string]int32{
		"DATA_REFRESH_TYPE_UNSPECIFIED": 0,
		"SLIDING_WINDOW":                1,
		"CUSTOM_SLIDING_WINDOW":         2,
	}
)

Enum value maps for DataSource_DataRefreshType.

View Source
var (
	ListTransferRunsRequest_RunAttempt_name = map[int32]string{
		0: "RUN_ATTEMPT_UNSPECIFIED",
		1: "LATEST",
	}
	ListTransferRunsRequest_RunAttempt_value = map[string]int32{
		"RUN_ATTEMPT_UNSPECIFIED": 0,
		"LATEST":                  1,
	}
)

Enum value maps for ListTransferRunsRequest_RunAttempt.

View Source
var (
	TransferType_name = map[int32]string{
		0: "TRANSFER_TYPE_UNSPECIFIED",
		1: "BATCH",
		2: "STREAMING",
	}
	TransferType_value = map[string]int32{
		"TRANSFER_TYPE_UNSPECIFIED": 0,
		"BATCH":                     1,
		"STREAMING":                 2,
	}
)

Enum value maps for TransferType.

View Source
var (
	TransferState_name = map[int32]string{
		0: "TRANSFER_STATE_UNSPECIFIED",
		2: "PENDING",
		3: "RUNNING",
		4: "SUCCEEDED",
		5: "FAILED",
		6: "CANCELLED",
	}
	TransferState_value = map[string]int32{
		"TRANSFER_STATE_UNSPECIFIED": 0,
		"PENDING":                    2,
		"RUNNING":                    3,
		"SUCCEEDED":                  4,
		"FAILED":                     5,
		"CANCELLED":                  6,
	}
)

Enum value maps for TransferState.

View Source
var (
	TransferMessage_MessageSeverity_name = map[int32]string{
		0: "MESSAGE_SEVERITY_UNSPECIFIED",
		1: "INFO",
		2: "WARNING",
		3: "ERROR",
	}
	TransferMessage_MessageSeverity_value = map[string]int32{
		"MESSAGE_SEVERITY_UNSPECIFIED": 0,
		"INFO":                         1,
		"WARNING":                      2,
		"ERROR":                        3,
	}
)

Enum value maps for TransferMessage_MessageSeverity.

View Source
var File_google_cloud_bigquery_datatransfer_v1_datatransfer_proto protoreflect.FileDescriptor
View Source
var File_google_cloud_bigquery_datatransfer_v1_transfer_proto protoreflect.FileDescriptor
View Source
var ImportedDataInfo_Encoding_name = map[int32]string{
	0: "ENCODING_UNSPECIFIED",
	1: "ISO_8859_1",
	2: "UTF8",
}
View Source
var ImportedDataInfo_Encoding_value = map[string]int32{
	"ENCODING_UNSPECIFIED": 0,
	"ISO_8859_1":           1,
	"UTF8":                 2,
}
View Source
var ImportedDataInfo_FieldSchema_Type_name = map[int32]string{
	0:  "TYPE_UNSPECIFIED",
	1:  "STRING",
	2:  "INTEGER",
	3:  "FLOAT",
	4:  "RECORD",
	5:  "BYTES",
	6:  "BOOLEAN",
	7:  "TIMESTAMP",
	8:  "DATE",
	9:  "TIME",
	10: "DATETIME",
	11: "NUMERIC",
	12: "GEOGRAPHY",
}
View Source
var ImportedDataInfo_FieldSchema_Type_value = map[string]int32{
	"TYPE_UNSPECIFIED": 0,
	"STRING":           1,
	"INTEGER":          2,
	"FLOAT":            3,
	"RECORD":           4,
	"BYTES":            5,
	"BOOLEAN":          6,
	"TIMESTAMP":        7,
	"DATE":             8,
	"TIME":             9,
	"DATETIME":         10,
	"NUMERIC":          11,
	"GEOGRAPHY":        12,
}
View Source
var ImportedDataInfo_Format_name = map[int32]string{
	0: "FORMAT_UNSPECIFIED",
	1: "CSV",
	2: "JSON",
	3: "AVRO",
	4: "RECORDIO",
	5: "COLUMNIO",
	6: "CAPACITOR",
	7: "PARQUET",
	8: "ORC",
}
View Source
var ImportedDataInfo_Format_value = map[string]int32{
	"FORMAT_UNSPECIFIED": 0,
	"CSV":                1,
	"JSON":               2,
	"AVRO":               3,
	"RECORDIO":           4,
	"COLUMNIO":           5,
	"CAPACITOR":          6,
	"PARQUET":            7,
	"ORC":                8,
}
View Source
var WriteDisposition_name = map[int32]string{
	0: "WRITE_DISPOSITION_UNSPECIFIED",
	1: "WRITE_TRUNCATE",
	2: "WRITE_APPEND",
}
View Source
var WriteDisposition_value = map[string]int32{
	"WRITE_DISPOSITION_UNSPECIFIED": 0,
	"WRITE_TRUNCATE":                1,
	"WRITE_APPEND":                  2,
}

Functions

func RegisterDataSourceServiceServer

func RegisterDataSourceServiceServer(s *grpc.Server, srv DataSourceServiceServer)

func RegisterDataTransferServiceServer

func RegisterDataTransferServiceServer(s *grpc.Server, srv DataTransferServiceServer)

Types

type CheckValidCredsRequest

type CheckValidCredsRequest struct {

	// Required. The data source in the form:
	// `projects/{project_id}/dataSources/{data_source_id}` or
	// `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to determine whether the user has valid credentials. This method is used to limit the number of OAuth popups in the user interface. The user id is inferred from the API call context. If the data source has the Google+ authorization type, this method returns false, as it cannot be determined whether the credentials are already valid merely based on the user id.

func (*CheckValidCredsRequest) Descriptor

func (*CheckValidCredsRequest) Descriptor() ([]byte, []int)

Deprecated: Use CheckValidCredsRequest.ProtoReflect.Descriptor instead.

func (*CheckValidCredsRequest) GetName

func (x *CheckValidCredsRequest) GetName() string

func (*CheckValidCredsRequest) ProtoMessage

func (*CheckValidCredsRequest) ProtoMessage()

func (*CheckValidCredsRequest) ProtoReflect

func (x *CheckValidCredsRequest) ProtoReflect() protoreflect.Message

func (*CheckValidCredsRequest) Reset

func (x *CheckValidCredsRequest) Reset()

func (*CheckValidCredsRequest) String

func (x *CheckValidCredsRequest) String() string

type CheckValidCredsResponse

type CheckValidCredsResponse struct {

	// If set to `true`, the credentials exist and are valid.
	HasValidCreds bool `protobuf:"varint,1,opt,name=has_valid_creds,json=hasValidCreds,proto3" json:"has_valid_creds,omitempty"`
	// contains filtered or unexported fields
}

A response indicating whether the credentials exist and are valid.

func (*CheckValidCredsResponse) Descriptor

func (*CheckValidCredsResponse) Descriptor() ([]byte, []int)

Deprecated: Use CheckValidCredsResponse.ProtoReflect.Descriptor instead.

func (*CheckValidCredsResponse) GetHasValidCreds

func (x *CheckValidCredsResponse) GetHasValidCreds() bool

func (*CheckValidCredsResponse) ProtoMessage

func (*CheckValidCredsResponse) ProtoMessage()

func (*CheckValidCredsResponse) ProtoReflect

func (x *CheckValidCredsResponse) ProtoReflect() protoreflect.Message

func (*CheckValidCredsResponse) Reset

func (x *CheckValidCredsResponse) Reset()

func (*CheckValidCredsResponse) String

func (x *CheckValidCredsResponse) String() string

type CreateDataSourceDefinitionRequest

type CreateDataSourceDefinitionRequest struct {
	// The BigQuery project id for which data source definition is associated.
	// Must be in the form: `projects/{project_id}/locations/{location_id}`
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Data source definition.
	DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,2,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
	XXX_unrecognized     []byte                `json:"-"`
	XXX_sizecache        int32                 `json:"-"`
}

Represents the request of the CreateDataSourceDefinition method.

func (*CreateDataSourceDefinitionRequest) Descriptor

func (*CreateDataSourceDefinitionRequest) Descriptor() ([]byte, []int)

func (*CreateDataSourceDefinitionRequest) GetDataSourceDefinition

func (m *CreateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition

func (*CreateDataSourceDefinitionRequest) GetParent

func (*CreateDataSourceDefinitionRequest) ProtoMessage

func (*CreateDataSourceDefinitionRequest) ProtoMessage()

func (*CreateDataSourceDefinitionRequest) Reset

func (*CreateDataSourceDefinitionRequest) String

func (*CreateDataSourceDefinitionRequest) XXX_DiscardUnknown

func (m *CreateDataSourceDefinitionRequest) XXX_DiscardUnknown()

func (*CreateDataSourceDefinitionRequest) XXX_Marshal

func (m *CreateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CreateDataSourceDefinitionRequest) XXX_Merge

func (*CreateDataSourceDefinitionRequest) XXX_Size

func (m *CreateDataSourceDefinitionRequest) XXX_Size() int

func (*CreateDataSourceDefinitionRequest) XXX_Unmarshal

func (m *CreateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error

type CreateTransferConfigRequest

type CreateTransferConfigRequest struct {

	// Required. The BigQuery project id where the transfer configuration should be created.
	// Must be in the format projects/{project_id}/locations/{location_id} or
	// projects/{project_id}. If specified location and location of the
	// destination bigquery dataset do not match - the request will fail.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Required. Data transfer configuration to create.
	TransferConfig *TransferConfig `protobuf:"bytes,2,opt,name=transfer_config,json=transferConfig,proto3" json:"transfer_config,omitempty"`
	// Optional OAuth2 authorization code to use with this transfer configuration.
	// This is required if new credentials are needed, as indicated by
	// `CheckValidCreds`.
	// In order to obtain authorization_code, please make a
	// request to
	// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
	//
	// * client_id should be OAuth client_id of BigQuery DTS API for the given
	//   data source returned by ListDataSources method.
	// * data_source_scopes are the scopes returned by ListDataSources method.
	// * redirect_uri is an optional parameter. If not specified, then
	//   authorization code is posted to the opener of authorization flow window.
	//   Otherwise it will be sent to the redirect uri. A special value of
	//   urn:ietf:wg:oauth:2.0:oob means that authorization code should be
	//   returned in the title bar of the browser, with the page text prompting
	//   the user to copy the code and paste it in the application.
	AuthorizationCode string `protobuf:"bytes,3,opt,name=authorization_code,json=authorizationCode,proto3" json:"authorization_code,omitempty"`
	// Optional version info. If users want to find a very recent access token,
	// that is, immediately after approving access, users have to set the
	// version_info claim in the token request. To obtain the version_info, users
	// must use the "none+gsession" response type. which be return a
	// version_info back in the authorization response which be be put in a JWT
	// claim in the token request.
	VersionInfo string `protobuf:"bytes,5,opt,name=version_info,json=versionInfo,proto3" json:"version_info,omitempty"`
	// Optional service account name. If this field is set, transfer config will
	// be created with this service account credentials. It requires that
	// requesting user calling this API has permissions to act as this service
	// account.
	ServiceAccountName string `protobuf:"bytes,6,opt,name=service_account_name,json=serviceAccountName,proto3" json:"service_account_name,omitempty"`
	// contains filtered or unexported fields
}

A request to create a data transfer configuration. If new credentials are needed for this transfer configuration, an authorization code must be provided. If an authorization code is provided, the transfer configuration will be associated with the user id corresponding to the authorization code. Otherwise, the transfer configuration will be associated with the calling user.

func (*CreateTransferConfigRequest) Descriptor

func (*CreateTransferConfigRequest) Descriptor() ([]byte, []int)

Deprecated: Use CreateTransferConfigRequest.ProtoReflect.Descriptor instead.

func (*CreateTransferConfigRequest) GetAuthorizationCode

func (x *CreateTransferConfigRequest) GetAuthorizationCode() string

func (*CreateTransferConfigRequest) GetParent

func (x *CreateTransferConfigRequest) GetParent() string

func (*CreateTransferConfigRequest) GetServiceAccountName

func (x *CreateTransferConfigRequest) GetServiceAccountName() string

func (*CreateTransferConfigRequest) GetTransferConfig

func (x *CreateTransferConfigRequest) GetTransferConfig() *TransferConfig

func (*CreateTransferConfigRequest) GetVersionInfo

func (x *CreateTransferConfigRequest) GetVersionInfo() string

func (*CreateTransferConfigRequest) ProtoMessage

func (*CreateTransferConfigRequest) ProtoMessage()

func (*CreateTransferConfigRequest) ProtoReflect

func (*CreateTransferConfigRequest) Reset

func (x *CreateTransferConfigRequest) Reset()

func (*CreateTransferConfigRequest) String

func (x *CreateTransferConfigRequest) String() string

type DataSource

type DataSource struct {

	// Output only. Data source resource name.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Data source id.
	DataSourceId string `protobuf:"bytes,2,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"`
	// User friendly data source name.
	DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"`
	// User friendly data source description string.
	Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
	// Data source client id which should be used to receive refresh token.
	ClientId string `protobuf:"bytes,5,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"`
	// Api auth scopes for which refresh token needs to be obtained. These are
	// scopes needed by a data source to prepare data and ingest them into
	// BigQuery, e.g., https://www.googleapis.com/auth/bigquery
	Scopes []string `protobuf:"bytes,6,rep,name=scopes,proto3" json:"scopes,omitempty"`
	// Deprecated. This field has no effect.
	//
	// Deprecated: Do not use.
	TransferType TransferType `` /* 154-byte string literal not displayed */
	// Deprecated. This field has no effect.
	//
	// Deprecated: Do not use.
	SupportsMultipleTransfers bool `` /* 139-byte string literal not displayed */
	// The number of seconds to wait for an update from the data source
	// before the Data Transfer Service marks the transfer as FAILED.
	UpdateDeadlineSeconds int32 `` /* 127-byte string literal not displayed */
	// Default data transfer schedule.
	// Examples of valid schedules include:
	// `1st,3rd monday of month 15:30`,
	// `every wed,fri of jan,jun 13:15`, and
	// `first sunday of quarter 00:00`.
	DefaultSchedule string `protobuf:"bytes,10,opt,name=default_schedule,json=defaultSchedule,proto3" json:"default_schedule,omitempty"`
	// Specifies whether the data source supports a user defined schedule, or
	// operates on the default schedule.
	// When set to `true`, user can override default schedule.
	SupportsCustomSchedule bool `` /* 131-byte string literal not displayed */
	// Data source parameters.
	Parameters []*DataSourceParameter `protobuf:"bytes,12,rep,name=parameters,proto3" json:"parameters,omitempty"`
	// Url for the help document for this data source.
	HelpUrl string `protobuf:"bytes,13,opt,name=help_url,json=helpUrl,proto3" json:"help_url,omitempty"`
	// Indicates the type of authorization.
	AuthorizationType DataSource_AuthorizationType `` /* 186-byte string literal not displayed */
	// Specifies whether the data source supports automatic data refresh for the
	// past few days, and how it's supported.
	// For some data sources, data might not be complete until a few days later,
	// so it's useful to refresh data automatically.
	DataRefreshType DataSource_DataRefreshType `` /* 180-byte string literal not displayed */
	// Default data refresh window on days.
	// Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`.
	DefaultDataRefreshWindowDays int32 `` /* 153-byte string literal not displayed */
	// Disables backfilling and manual run scheduling
	// for the data source.
	ManualRunsDisabled bool `protobuf:"varint,17,opt,name=manual_runs_disabled,json=manualRunsDisabled,proto3" json:"manual_runs_disabled,omitempty"`
	// The minimum interval for scheduler to schedule runs.
	MinimumScheduleInterval *durationpb.Duration `` /* 133-byte string literal not displayed */
	// contains filtered or unexported fields
}

Represents data source metadata. Metadata is sufficient to render UI and request proper OAuth tokens.

func (*DataSource) Descriptor

func (*DataSource) Descriptor() ([]byte, []int)

Deprecated: Use DataSource.ProtoReflect.Descriptor instead.

func (*DataSource) GetAuthorizationType

func (x *DataSource) GetAuthorizationType() DataSource_AuthorizationType

func (*DataSource) GetClientId

func (x *DataSource) GetClientId() string

func (*DataSource) GetDataRefreshType

func (x *DataSource) GetDataRefreshType() DataSource_DataRefreshType

func (*DataSource) GetDataSourceId

func (x *DataSource) GetDataSourceId() string

func (*DataSource) GetDefaultDataRefreshWindowDays

func (x *DataSource) GetDefaultDataRefreshWindowDays() int32

func (*DataSource) GetDefaultSchedule

func (x *DataSource) GetDefaultSchedule() string

func (*DataSource) GetDescription

func (x *DataSource) GetDescription() string

func (*DataSource) GetDisplayName

func (x *DataSource) GetDisplayName() string

func (*DataSource) GetHelpUrl

func (x *DataSource) GetHelpUrl() string

func (*DataSource) GetManualRunsDisabled

func (x *DataSource) GetManualRunsDisabled() bool

func (*DataSource) GetMinimumScheduleInterval

func (x *DataSource) GetMinimumScheduleInterval() *durationpb.Duration

func (*DataSource) GetName

func (x *DataSource) GetName() string

func (*DataSource) GetParameters

func (x *DataSource) GetParameters() []*DataSourceParameter

func (*DataSource) GetScopes

func (x *DataSource) GetScopes() []string

func (*DataSource) GetSupportsCustomSchedule

func (x *DataSource) GetSupportsCustomSchedule() bool

func (*DataSource) GetSupportsMultipleTransfers

func (x *DataSource) GetSupportsMultipleTransfers() bool

Deprecated: Do not use.

func (*DataSource) GetTransferType

func (x *DataSource) GetTransferType() TransferType

Deprecated: Do not use.

func (*DataSource) GetUpdateDeadlineSeconds

func (x *DataSource) GetUpdateDeadlineSeconds() int32

func (*DataSource) ProtoMessage

func (*DataSource) ProtoMessage()

func (*DataSource) ProtoReflect

func (x *DataSource) ProtoReflect() protoreflect.Message

func (*DataSource) Reset

func (x *DataSource) Reset()

func (*DataSource) String

func (x *DataSource) String() string

type DataSourceDefinition

type DataSourceDefinition struct {
	// The resource name of the data source definition.
	// Data source definition names have the form
	// `projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}`.
	Name string `protobuf:"bytes,21,opt,name=name,proto3" json:"name,omitempty"`
	// Data source metadata.
	DataSource *DataSource `protobuf:"bytes,1,opt,name=data_source,json=dataSource,proto3" json:"data_source,omitempty"`
	// The Pub/Sub topic to be used for broadcasting a message when a transfer run
	// is created. Both this topic and transfer_config_pubsub_topic can be
	// set to a custom topic. By default, both topics are auto-generated if none
	// of them is provided when creating the definition. However, if one topic is
	// manually set, the other topic has to be manually set as well. The only
	// difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub
	// topic, but transfer_config_pubsub_topic can be set to empty. The comments
	// about "{location}" for transfer_config_pubsub_topic apply here too.
	TransferRunPubsubTopic string `` /* 132-byte string literal not displayed */
	// Duration which should be added to schedule_time to calculate
	// run_time when job is scheduled. Only applicable for automatically
	// scheduled transfer runs. Used to start a run early on a data source that
	// supports continuous data refresh to compensate for unknown timezone
	// offsets. Use a negative number to start a run late for data sources not
	// supporting continuous data refresh.
	RunTimeOffset *duration.Duration `protobuf:"bytes,16,opt,name=run_time_offset,json=runTimeOffset,proto3" json:"run_time_offset,omitempty"`
	// Support e-mail address of the OAuth client's Brand, which contains the
	// consent screen data.
	SupportEmail string `protobuf:"bytes,22,opt,name=support_email,json=supportEmail,proto3" json:"support_email,omitempty"`
	// When service account is specified, BigQuery will share created dataset
	// with the given service account. Also, this service account will be
	// eligible to perform status updates and message logging for data transfer
	// runs for the corresponding data_source_id.
	ServiceAccount string `protobuf:"bytes,2,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"`
	// Is data source disabled? If true, data_source is not visible.
	// API will also stop returning any data transfer configs and/or runs
	// associated with the data source. This setting has higher priority
	// than whitelisted_project_ids.
	Disabled bool `protobuf:"varint,5,opt,name=disabled,proto3" json:"disabled,omitempty"`
	// The Pub/Sub topic to use for broadcasting a message for transfer config. If
	// empty, a message will not be broadcasted. Both this topic and
	// transfer_run_pubsub_topic are auto-generated if none of them is provided
	// when creating the definition. It is recommended to provide
	// transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is
	// provided. Otherwise, it will be set to empty. If "{location}" is found in
	// the value, then that means, data source wants to handle message separately
	// for datasets in different regions. We will replace {location} with the
	// actual dataset location, as the actual topic name. For example,
	// projects/connector/topics/scheduler-{location} could become
	// projects/connector/topics/scheduler-us. If "{location}" is not found, then
	// we will use the input value as topic name.
	TransferConfigPubsubTopic string `` /* 141-byte string literal not displayed */
	// Supported location_ids used for deciding in which locations Pub/Sub topics
	// need to be created. If custom Pub/Sub topics are used and they contains
	// '{location}', the location_ids will be used for validating the topics by
	// replacing the '{location}' with the individual location in the list. The
	// valid values are the "location_id" field of the response of `GET
	// https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations`
	// In addition, if the data source needs to support all available regions,
	// supported_location_ids can be set to "global" (a single string element).
	// When "global" is specified:
	// 1) the data source implementation is supposed to stage the data in proper
	// region of the destination dataset;
	// 2) Data source developer should be aware of the implications (e.g., network
	// traffic latency, potential charge associated with cross-region traffic,
	// etc.) of supporting the "global" region;
	SupportedLocationIds []string `protobuf:"bytes,23,rep,name=supported_location_ids,json=supportedLocationIds,proto3" json:"supported_location_ids,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Represents the data source definition.

func (*DataSourceDefinition) Descriptor

func (*DataSourceDefinition) Descriptor() ([]byte, []int)

func (*DataSourceDefinition) GetDataSource

func (m *DataSourceDefinition) GetDataSource() *DataSource

func (*DataSourceDefinition) GetDisabled

func (m *DataSourceDefinition) GetDisabled() bool

func (*DataSourceDefinition) GetName

func (m *DataSourceDefinition) GetName() string

func (*DataSourceDefinition) GetRunTimeOffset

func (m *DataSourceDefinition) GetRunTimeOffset() *duration.Duration

func (*DataSourceDefinition) GetServiceAccount

func (m *DataSourceDefinition) GetServiceAccount() string

func (*DataSourceDefinition) GetSupportEmail

func (m *DataSourceDefinition) GetSupportEmail() string

func (*DataSourceDefinition) GetSupportedLocationIds

func (m *DataSourceDefinition) GetSupportedLocationIds() []string

func (*DataSourceDefinition) GetTransferConfigPubsubTopic

func (m *DataSourceDefinition) GetTransferConfigPubsubTopic() string

func (*DataSourceDefinition) GetTransferRunPubsubTopic

func (m *DataSourceDefinition) GetTransferRunPubsubTopic() string

func (*DataSourceDefinition) ProtoMessage

func (*DataSourceDefinition) ProtoMessage()

func (*DataSourceDefinition) Reset

func (m *DataSourceDefinition) Reset()

func (*DataSourceDefinition) String

func (m *DataSourceDefinition) String() string

func (*DataSourceDefinition) XXX_DiscardUnknown

func (m *DataSourceDefinition) XXX_DiscardUnknown()

func (*DataSourceDefinition) XXX_Marshal

func (m *DataSourceDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceDefinition) XXX_Merge

func (m *DataSourceDefinition) XXX_Merge(src proto.Message)

func (*DataSourceDefinition) XXX_Size

func (m *DataSourceDefinition) XXX_Size() int

func (*DataSourceDefinition) XXX_Unmarshal

func (m *DataSourceDefinition) XXX_Unmarshal(b []byte) error

type DataSourceParameter

type DataSourceParameter struct {

	// Parameter identifier.
	ParamId string `protobuf:"bytes,1,opt,name=param_id,json=paramId,proto3" json:"param_id,omitempty"`
	// Parameter display name in the user interface.
	DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"`
	// Parameter description.
	Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
	// Parameter type.
	Type DataSourceParameter_Type `` /* 130-byte string literal not displayed */
	// Is parameter required.
	Required bool `protobuf:"varint,5,opt,name=required,proto3" json:"required,omitempty"`
	// Deprecated. This field has no effect.
	Repeated bool `protobuf:"varint,6,opt,name=repeated,proto3" json:"repeated,omitempty"`
	// Regular expression which can be used for parameter validation.
	ValidationRegex string `protobuf:"bytes,7,opt,name=validation_regex,json=validationRegex,proto3" json:"validation_regex,omitempty"`
	// All possible values for the parameter.
	AllowedValues []string `protobuf:"bytes,8,rep,name=allowed_values,json=allowedValues,proto3" json:"allowed_values,omitempty"`
	// For integer and double values specifies minimum allowed value.
	MinValue *wrapperspb.DoubleValue `protobuf:"bytes,9,opt,name=min_value,json=minValue,proto3" json:"min_value,omitempty"`
	// For integer and double values specifies maxminum allowed value.
	MaxValue *wrapperspb.DoubleValue `protobuf:"bytes,10,opt,name=max_value,json=maxValue,proto3" json:"max_value,omitempty"`
	// Deprecated. This field has no effect.
	Fields []*DataSourceParameter `protobuf:"bytes,11,rep,name=fields,proto3" json:"fields,omitempty"`
	// Description of the requirements for this field, in case the user input does
	// not fulfill the regex pattern or min/max values.
	ValidationDescription string `protobuf:"bytes,12,opt,name=validation_description,json=validationDescription,proto3" json:"validation_description,omitempty"`
	// URL to a help document to further explain the naming requirements.
	ValidationHelpUrl string `protobuf:"bytes,13,opt,name=validation_help_url,json=validationHelpUrl,proto3" json:"validation_help_url,omitempty"`
	// Cannot be changed after initial creation.
	Immutable bool `protobuf:"varint,14,opt,name=immutable,proto3" json:"immutable,omitempty"`
	// Deprecated. This field has no effect.
	Recurse bool `protobuf:"varint,15,opt,name=recurse,proto3" json:"recurse,omitempty"`
	// If true, it should not be used in new transfers, and it should not be
	// visible to users.
	Deprecated bool `protobuf:"varint,20,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
	// contains filtered or unexported fields
}

Represents a data source parameter with validation rules, so that parameters can be rendered in the UI. These parameters are given to us by supported data sources, and include all needed information for rendering and validation. Thus, whoever uses this api can decide to generate either generic ui, or custom data source specific forms.

func (*DataSourceParameter) Descriptor

func (*DataSourceParameter) Descriptor() ([]byte, []int)

Deprecated: Use DataSourceParameter.ProtoReflect.Descriptor instead.

func (*DataSourceParameter) GetAllowedValues

func (x *DataSourceParameter) GetAllowedValues() []string

func (*DataSourceParameter) GetDeprecated

func (x *DataSourceParameter) GetDeprecated() bool

func (*DataSourceParameter) GetDescription

func (x *DataSourceParameter) GetDescription() string

func (*DataSourceParameter) GetDisplayName

func (x *DataSourceParameter) GetDisplayName() string

func (*DataSourceParameter) GetFields

func (x *DataSourceParameter) GetFields() []*DataSourceParameter

func (*DataSourceParameter) GetImmutable

func (x *DataSourceParameter) GetImmutable() bool

func (*DataSourceParameter) GetMaxValue

func (x *DataSourceParameter) GetMaxValue() *wrapperspb.DoubleValue

func (*DataSourceParameter) GetMinValue

func (x *DataSourceParameter) GetMinValue() *wrapperspb.DoubleValue

func (*DataSourceParameter) GetParamId

func (x *DataSourceParameter) GetParamId() string

func (*DataSourceParameter) GetRecurse

func (x *DataSourceParameter) GetRecurse() bool

func (*DataSourceParameter) GetRepeated

func (x *DataSourceParameter) GetRepeated() bool

func (*DataSourceParameter) GetRequired

func (x *DataSourceParameter) GetRequired() bool

func (*DataSourceParameter) GetType

func (*DataSourceParameter) GetValidationDescription

func (x *DataSourceParameter) GetValidationDescription() string

func (*DataSourceParameter) GetValidationHelpUrl

func (x *DataSourceParameter) GetValidationHelpUrl() string

func (*DataSourceParameter) GetValidationRegex

func (x *DataSourceParameter) GetValidationRegex() string

func (*DataSourceParameter) ProtoMessage

func (*DataSourceParameter) ProtoMessage()

func (*DataSourceParameter) ProtoReflect

func (x *DataSourceParameter) ProtoReflect() protoreflect.Message

func (*DataSourceParameter) Reset

func (x *DataSourceParameter) Reset()

func (*DataSourceParameter) String

func (x *DataSourceParameter) String() string

type DataSourceParameter_Type

type DataSourceParameter_Type int32

Parameter type.

const (
	// Type unspecified.
	DataSourceParameter_TYPE_UNSPECIFIED DataSourceParameter_Type = 0
	// String parameter.
	DataSourceParameter_STRING DataSourceParameter_Type = 1
	// Integer parameter (64-bits).
	// Will be serialized to json as string.
	DataSourceParameter_INTEGER DataSourceParameter_Type = 2
	// Double precision floating point parameter.
	DataSourceParameter_DOUBLE DataSourceParameter_Type = 3
	// Boolean parameter.
	DataSourceParameter_BOOLEAN DataSourceParameter_Type = 4
	// Deprecated. This field has no effect.
	DataSourceParameter_RECORD DataSourceParameter_Type = 5
	// Page ID for a Google+ Page.
	DataSourceParameter_PLUS_PAGE DataSourceParameter_Type = 6
)

func (DataSourceParameter_Type) Descriptor

func (DataSourceParameter_Type) Enum

func (DataSourceParameter_Type) EnumDescriptor

func (DataSourceParameter_Type) EnumDescriptor() ([]byte, []int)

Deprecated: Use DataSourceParameter_Type.Descriptor instead.

func (DataSourceParameter_Type) Number

func (DataSourceParameter_Type) String

func (x DataSourceParameter_Type) String() string

func (DataSourceParameter_Type) Type

type DataSourceServiceClient

type DataSourceServiceClient interface {
	// Update a transfer run. If successful, resets
	// data_source.update_deadline_seconds timer.
	UpdateTransferRun(ctx context.Context, in *UpdateTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error)
	// Log messages for a transfer run. If successful (at least 1 message), resets
	// data_source.update_deadline_seconds timer.
	LogTransferRunMessages(ctx context.Context, in *LogTransferRunMessagesRequest, opts ...grpc.CallOption) (*empty.Empty, error)
	// Notify the Data Transfer Service that data is ready for loading.
	// The Data Transfer Service will start and monitor multiple BigQuery Load
	// jobs for a transfer run. Monitored jobs will be automatically retried
	// and produce log messages when starting and finishing a job.
	// Can be called multiple times for the same transfer run.
	StartBigQueryJobs(ctx context.Context, in *StartBigQueryJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error)
	// Notify the Data Transfer Service that the data source is done processing
	// the run. No more status updates or requests to start/monitor jobs will be
	// accepted. The run will be finalized by the Data Transfer Service when all
	// monitored jobs are completed.
	// Does not need to be called if the run is set to FAILED.
	FinishRun(ctx context.Context, in *FinishRunRequest, opts ...grpc.CallOption) (*empty.Empty, error)
	// Creates a data source definition.  Calling this method will automatically
	// use your credentials to create the following Google Cloud resources in
	// YOUR Google Cloud project.
	// 1. OAuth client
	// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
	// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
	// The field data_source.client_id should be left empty in the input request,
	// as the API will create a new OAuth client on behalf of the caller. On the
	// other hand data_source.scopes usually need to be set when there are OAuth
	// scopes that need to be granted by end users.
	// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
	// Operations. This also applies to update and delete data source definition.
	CreateDataSourceDefinition(ctx context.Context, in *CreateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
	// Updates an existing data source definition. If changing
	// supported_location_ids, triggers same effects as mentioned in "Create a
	// data source definition."
	UpdateDataSourceDefinition(ctx context.Context, in *UpdateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
	// Deletes a data source definition, all of the transfer configs associated
	// with this data source definition (if any) must be deleted first by the user
	// in ALL regions, in order to delete the data source definition.
	// This method is primarily meant for deleting data sources created during
	// testing stage.
	// If the data source is referenced by transfer configs in the region
	// specified in the request URL, the method will fail immediately. If in the
	// current region (e.g., US) it's not used by any transfer configs, but in
	// another region (e.g., EU) it is, then although the method will succeed in
	// region US, but it will fail when the deletion operation is replicated to
	// region EU. And eventually, the system will replicate the data source
	// definition back from EU to US, in order to bring all regions to
	// consistency. The final effect is that the data source appears to be
	// 'undeleted' in the US region.
	DeleteDataSourceDefinition(ctx context.Context, in *DeleteDataSourceDefinitionRequest, opts ...grpc.CallOption) (*empty.Empty, error)
	// Retrieves an existing data source definition.
	GetDataSourceDefinition(ctx context.Context, in *GetDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
	// Lists supported data source definitions.
	ListDataSourceDefinitions(ctx context.Context, in *ListDataSourceDefinitionsRequest, opts ...grpc.CallOption) (*ListDataSourceDefinitionsResponse, error)
}

DataSourceServiceClient is the client API for DataSourceService service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

type DataSourceServiceServer

type DataSourceServiceServer interface {
	// Update a transfer run. If successful, resets
	// data_source.update_deadline_seconds timer.
	UpdateTransferRun(context.Context, *UpdateTransferRunRequest) (*TransferRun, error)
	// Log messages for a transfer run. If successful (at least 1 message), resets
	// data_source.update_deadline_seconds timer.
	LogTransferRunMessages(context.Context, *LogTransferRunMessagesRequest) (*empty.Empty, error)
	// Notify the Data Transfer Service that data is ready for loading.
	// The Data Transfer Service will start and monitor multiple BigQuery Load
	// jobs for a transfer run. Monitored jobs will be automatically retried
	// and produce log messages when starting and finishing a job.
	// Can be called multiple times for the same transfer run.
	StartBigQueryJobs(context.Context, *StartBigQueryJobsRequest) (*empty.Empty, error)
	// Notify the Data Transfer Service that the data source is done processing
	// the run. No more status updates or requests to start/monitor jobs will be
	// accepted. The run will be finalized by the Data Transfer Service when all
	// monitored jobs are completed.
	// Does not need to be called if the run is set to FAILED.
	FinishRun(context.Context, *FinishRunRequest) (*empty.Empty, error)
	// Creates a data source definition.  Calling this method will automatically
	// use your credentials to create the following Google Cloud resources in
	// YOUR Google Cloud project.
	// 1. OAuth client
	// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
	// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
	// The field data_source.client_id should be left empty in the input request,
	// as the API will create a new OAuth client on behalf of the caller. On the
	// other hand data_source.scopes usually need to be set when there are OAuth
	// scopes that need to be granted by end users.
	// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
	// Operations. This also applies to update and delete data source definition.
	CreateDataSourceDefinition(context.Context, *CreateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
	// Updates an existing data source definition. If changing
	// supported_location_ids, triggers same effects as mentioned in "Create a
	// data source definition."
	UpdateDataSourceDefinition(context.Context, *UpdateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
	// Deletes a data source definition, all of the transfer configs associated
	// with this data source definition (if any) must be deleted first by the user
	// in ALL regions, in order to delete the data source definition.
	// This method is primarily meant for deleting data sources created during
	// testing stage.
	// If the data source is referenced by transfer configs in the region
	// specified in the request URL, the method will fail immediately. If in the
	// current region (e.g., US) it's not used by any transfer configs, but in
	// another region (e.g., EU) it is, then although the method will succeed in
	// region US, but it will fail when the deletion operation is replicated to
	// region EU. And eventually, the system will replicate the data source
	// definition back from EU to US, in order to bring all regions to
	// consistency. The final effect is that the data source appears to be
	// 'undeleted' in the US region.
	DeleteDataSourceDefinition(context.Context, *DeleteDataSourceDefinitionRequest) (*empty.Empty, error)
	// Retrieves an existing data source definition.
	GetDataSourceDefinition(context.Context, *GetDataSourceDefinitionRequest) (*DataSourceDefinition, error)
	// Lists supported data source definitions.
	ListDataSourceDefinitions(context.Context, *ListDataSourceDefinitionsRequest) (*ListDataSourceDefinitionsResponse, error)
}

DataSourceServiceServer is the server API for DataSourceService service.

type DataSource_AuthorizationType

type DataSource_AuthorizationType int32

The type of authorization needed for this data source.

const (
	// Type unspecified.
	DataSource_AUTHORIZATION_TYPE_UNSPECIFIED DataSource_AuthorizationType = 0
	// Use OAuth 2 authorization codes that can be exchanged
	// for a refresh token on the backend.
	DataSource_AUTHORIZATION_CODE DataSource_AuthorizationType = 1
	// Return an authorization code for a given Google+ page that can then be
	// exchanged for a refresh token on the backend.
	DataSource_GOOGLE_PLUS_AUTHORIZATION_CODE DataSource_AuthorizationType = 2
	// Use First Party Client OAuth. First Party Client OAuth doesn't require a
	// refresh token to get an offline access token. Instead, it uses a
	// client-signed JWT assertion to retrieve an access token.
	DataSource_FIRST_PARTY_OAUTH DataSource_AuthorizationType = 3
)

func (DataSource_AuthorizationType) Descriptor

func (DataSource_AuthorizationType) Enum

func (DataSource_AuthorizationType) EnumDescriptor

func (DataSource_AuthorizationType) EnumDescriptor() ([]byte, []int)

Deprecated: Use DataSource_AuthorizationType.Descriptor instead.

func (DataSource_AuthorizationType) Number

func (DataSource_AuthorizationType) String

func (DataSource_AuthorizationType) Type

type DataSource_DataRefreshType

type DataSource_DataRefreshType int32

Represents how the data source supports data auto refresh.

const (
	// The data source won't support data auto refresh, which is default value.
	DataSource_DATA_REFRESH_TYPE_UNSPECIFIED DataSource_DataRefreshType = 0
	// The data source supports data auto refresh, and runs will be scheduled
	// for the past few days. Does not allow custom values to be set for each
	// transfer config.
	DataSource_SLIDING_WINDOW DataSource_DataRefreshType = 1
	// The data source supports data auto refresh, and runs will be scheduled
	// for the past few days. Allows custom values to be set for each transfer
	// config.
	DataSource_CUSTOM_SLIDING_WINDOW DataSource_DataRefreshType = 2
)

func (DataSource_DataRefreshType) Descriptor

func (DataSource_DataRefreshType) Enum

func (DataSource_DataRefreshType) EnumDescriptor

func (DataSource_DataRefreshType) EnumDescriptor() ([]byte, []int)

Deprecated: Use DataSource_DataRefreshType.Descriptor instead.

func (DataSource_DataRefreshType) Number

func (DataSource_DataRefreshType) String

func (DataSource_DataRefreshType) Type

type DataTransferServiceClient

type DataTransferServiceClient interface {
	// Retrieves a supported data source and returns its settings,
	// which can be used for UI rendering.
	GetDataSource(ctx context.Context, in *GetDataSourceRequest, opts ...grpc.CallOption) (*DataSource, error)
	// Lists supported data sources and returns their settings,
	// which can be used for UI rendering.
	ListDataSources(ctx context.Context, in *ListDataSourcesRequest, opts ...grpc.CallOption) (*ListDataSourcesResponse, error)
	// Creates a new data transfer configuration.
	CreateTransferConfig(ctx context.Context, in *CreateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error)
	// Updates a data transfer configuration.
	// All fields must be set, even if they are not updated.
	UpdateTransferConfig(ctx context.Context, in *UpdateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error)
	// Deletes a data transfer configuration,
	// including any associated transfer runs and logs.
	DeleteTransferConfig(ctx context.Context, in *DeleteTransferConfigRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
	// Returns information about a data transfer config.
	GetTransferConfig(ctx context.Context, in *GetTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error)
	// Returns information about all data transfers in the project.
	ListTransferConfigs(ctx context.Context, in *ListTransferConfigsRequest, opts ...grpc.CallOption) (*ListTransferConfigsResponse, error)
	// Deprecated: Do not use.
	// Creates transfer runs for a time range [start_time, end_time].
	// For each date - or whatever granularity the data source supports - in the
	// range, one transfer run is created.
	// Note that runs are created per UTC time in the time range.
	// DEPRECATED: use StartManualTransferRuns instead.
	ScheduleTransferRuns(ctx context.Context, in *ScheduleTransferRunsRequest, opts ...grpc.CallOption) (*ScheduleTransferRunsResponse, error)
	// Start manual transfer runs to be executed now with schedule_time equal to
	// current time. The transfer runs can be created for a time range where the
	// run_time is between start_time (inclusive) and end_time (exclusive), or for
	// a specific run_time.
	StartManualTransferRuns(ctx context.Context, in *StartManualTransferRunsRequest, opts ...grpc.CallOption) (*StartManualTransferRunsResponse, error)
	// Returns information about the particular transfer run.
	GetTransferRun(ctx context.Context, in *GetTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error)
	// Deletes the specified transfer run.
	DeleteTransferRun(ctx context.Context, in *DeleteTransferRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
	// Returns information about running and completed jobs.
	ListTransferRuns(ctx context.Context, in *ListTransferRunsRequest, opts ...grpc.CallOption) (*ListTransferRunsResponse, error)
	// Returns user facing log messages for the data transfer run.
	ListTransferLogs(ctx context.Context, in *ListTransferLogsRequest, opts ...grpc.CallOption) (*ListTransferLogsResponse, error)
	// Returns true if valid credentials exist for the given data source and
	// requesting user.
	// Some data sources doesn't support service account, so we need to talk to
	// them on behalf of the end user. This API just checks whether we have OAuth
	// token for the particular user, which is a pre-requisite before user can
	// create a transfer config.
	CheckValidCreds(ctx context.Context, in *CheckValidCredsRequest, opts ...grpc.CallOption) (*CheckValidCredsResponse, error)
}

DataTransferServiceClient is the client API for DataTransferService service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

type DataTransferServiceServer

type DataTransferServiceServer interface {
	// Retrieves a supported data source and returns its settings,
	// which can be used for UI rendering.
	GetDataSource(context.Context, *GetDataSourceRequest) (*DataSource, error)
	// Lists supported data sources and returns their settings,
	// which can be used for UI rendering.
	ListDataSources(context.Context, *ListDataSourcesRequest) (*ListDataSourcesResponse, error)
	// Creates a new data transfer configuration.
	CreateTransferConfig(context.Context, *CreateTransferConfigRequest) (*TransferConfig, error)
	// Updates a data transfer configuration.
	// All fields must be set, even if they are not updated.
	UpdateTransferConfig(context.Context, *UpdateTransferConfigRequest) (*TransferConfig, error)
	// Deletes a data transfer configuration,
	// including any associated transfer runs and logs.
	DeleteTransferConfig(context.Context, *DeleteTransferConfigRequest) (*emptypb.Empty, error)
	// Returns information about a data transfer config.
	GetTransferConfig(context.Context, *GetTransferConfigRequest) (*TransferConfig, error)
	// Returns information about all data transfers in the project.
	ListTransferConfigs(context.Context, *ListTransferConfigsRequest) (*ListTransferConfigsResponse, error)
	// Deprecated: Do not use.
	// Creates transfer runs for a time range [start_time, end_time].
	// For each date - or whatever granularity the data source supports - in the
	// range, one transfer run is created.
	// Note that runs are created per UTC time in the time range.
	// DEPRECATED: use StartManualTransferRuns instead.
	ScheduleTransferRuns(context.Context, *ScheduleTransferRunsRequest) (*ScheduleTransferRunsResponse, error)
	// Start manual transfer runs to be executed now with schedule_time equal to
	// current time. The transfer runs can be created for a time range where the
	// run_time is between start_time (inclusive) and end_time (exclusive), or for
	// a specific run_time.
	StartManualTransferRuns(context.Context, *StartManualTransferRunsRequest) (*StartManualTransferRunsResponse, error)
	// Returns information about the particular transfer run.
	GetTransferRun(context.Context, *GetTransferRunRequest) (*TransferRun, error)
	// Deletes the specified transfer run.
	DeleteTransferRun(context.Context, *DeleteTransferRunRequest) (*emptypb.Empty, error)
	// Returns information about running and completed jobs.
	ListTransferRuns(context.Context, *ListTransferRunsRequest) (*ListTransferRunsResponse, error)
	// Returns user facing log messages for the data transfer run.
	ListTransferLogs(context.Context, *ListTransferLogsRequest) (*ListTransferLogsResponse, error)
	// Returns true if valid credentials exist for the given data source and
	// requesting user.
	// Some data sources doesn't support service account, so we need to talk to
	// them on behalf of the end user. This API just checks whether we have OAuth
	// token for the particular user, which is a pre-requisite before user can
	// create a transfer config.
	CheckValidCreds(context.Context, *CheckValidCredsRequest) (*CheckValidCredsResponse, error)
}

DataTransferServiceServer is the server API for DataTransferService service.

type DeleteDataSourceDefinitionRequest

type DeleteDataSourceDefinitionRequest struct {
	// The field will contain name of the resource requested, for example:
	// `projects/{project_id}/locations/{location_id}/dataSourceDefinitions/{data_source_id}`
	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Represents the request of the DeleteDataSourceDefinition method. All transfer configs associated with the data source must be deleted first, before the data source can be deleted.

func (*DeleteDataSourceDefinitionRequest) Descriptor

func (*DeleteDataSourceDefinitionRequest) Descriptor() ([]byte, []int)

func (*DeleteDataSourceDefinitionRequest) GetName

func (*DeleteDataSourceDefinitionRequest) ProtoMessage

func (*DeleteDataSourceDefinitionRequest) ProtoMessage()

func (*DeleteDataSourceDefinitionRequest) Reset

func (*DeleteDataSourceDefinitionRequest) String

func (*DeleteDataSourceDefinitionRequest) XXX_DiscardUnknown

func (m *DeleteDataSourceDefinitionRequest) XXX_DiscardUnknown()

func (*DeleteDataSourceDefinitionRequest) XXX_Marshal

func (m *DeleteDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DeleteDataSourceDefinitionRequest) XXX_Merge

func (*DeleteDataSourceDefinitionRequest) XXX_Size

func (m *DeleteDataSourceDefinitionRequest) XXX_Size() int

func (*DeleteDataSourceDefinitionRequest) XXX_Unmarshal

func (m *DeleteDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error

type DeleteTransferConfigRequest

type DeleteTransferConfigRequest struct {

	// Required. The field will contain name of the resource requested, for example:
	// `projects/{project_id}/transferConfigs/{config_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to delete data transfer information. All associated transfer runs and log messages will be deleted as well.

func (*DeleteTransferConfigRequest) Descriptor

func (*DeleteTransferConfigRequest) Descriptor() ([]byte, []int)

Deprecated: Use DeleteTransferConfigRequest.ProtoReflect.Descriptor instead.

func (*DeleteTransferConfigRequest) GetName

func (x *DeleteTransferConfigRequest) GetName() string

func (*DeleteTransferConfigRequest) ProtoMessage

func (*DeleteTransferConfigRequest) ProtoMessage()

func (*DeleteTransferConfigRequest) ProtoReflect

func (*DeleteTransferConfigRequest) Reset

func (x *DeleteTransferConfigRequest) Reset()

func (*DeleteTransferConfigRequest) String

func (x *DeleteTransferConfigRequest) String() string

type DeleteTransferRunRequest

type DeleteTransferRunRequest struct {

	// Required. The field will contain name of the resource requested, for example:
	// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to delete data transfer run information.

func (*DeleteTransferRunRequest) Descriptor

func (*DeleteTransferRunRequest) Descriptor() ([]byte, []int)

Deprecated: Use DeleteTransferRunRequest.ProtoReflect.Descriptor instead.

func (*DeleteTransferRunRequest) GetName

func (x *DeleteTransferRunRequest) GetName() string

func (*DeleteTransferRunRequest) ProtoMessage

func (*DeleteTransferRunRequest) ProtoMessage()

func (*DeleteTransferRunRequest) ProtoReflect

func (x *DeleteTransferRunRequest) ProtoReflect() protoreflect.Message

func (*DeleteTransferRunRequest) Reset

func (x *DeleteTransferRunRequest) Reset()

func (*DeleteTransferRunRequest) String

func (x *DeleteTransferRunRequest) String() string

type EmailPreferences

type EmailPreferences struct {

	// If true, email notifications will be sent on transfer run failures.
	EnableFailureEmail bool `protobuf:"varint,1,opt,name=enable_failure_email,json=enableFailureEmail,proto3" json:"enable_failure_email,omitempty"`
	// contains filtered or unexported fields
}

Represents preferences for sending email notifications for transfer run events.

func (*EmailPreferences) Descriptor

func (*EmailPreferences) Descriptor() ([]byte, []int)

Deprecated: Use EmailPreferences.ProtoReflect.Descriptor instead.

func (*EmailPreferences) GetEnableFailureEmail

func (x *EmailPreferences) GetEnableFailureEmail() bool

func (*EmailPreferences) ProtoMessage

func (*EmailPreferences) ProtoMessage()

func (*EmailPreferences) ProtoReflect

func (x *EmailPreferences) ProtoReflect() protoreflect.Message

func (*EmailPreferences) Reset

func (x *EmailPreferences) Reset()

func (*EmailPreferences) String

func (x *EmailPreferences) String() string

type FinishRunRequest

type FinishRunRequest struct {
	// Name of the resource in the form:
	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

A request to finish a run.

func (*FinishRunRequest) Descriptor

func (*FinishRunRequest) Descriptor() ([]byte, []int)

func (*FinishRunRequest) GetName

func (m *FinishRunRequest) GetName() string

func (*FinishRunRequest) ProtoMessage

func (*FinishRunRequest) ProtoMessage()

func (*FinishRunRequest) Reset

func (m *FinishRunRequest) Reset()

func (*FinishRunRequest) String

func (m *FinishRunRequest) String() string

func (*FinishRunRequest) XXX_DiscardUnknown

func (m *FinishRunRequest) XXX_DiscardUnknown()

func (*FinishRunRequest) XXX_Marshal

func (m *FinishRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FinishRunRequest) XXX_Merge

func (m *FinishRunRequest) XXX_Merge(src proto.Message)

func (*FinishRunRequest) XXX_Size

func (m *FinishRunRequest) XXX_Size() int

func (*FinishRunRequest) XXX_Unmarshal

func (m *FinishRunRequest) XXX_Unmarshal(b []byte) error

type GetDataSourceDefinitionRequest

type GetDataSourceDefinitionRequest struct {
	// The field will contain name of the resource requested.
	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Represents the request of the GetDataSourceDefinition method.

func (*GetDataSourceDefinitionRequest) Descriptor

func (*GetDataSourceDefinitionRequest) Descriptor() ([]byte, []int)

func (*GetDataSourceDefinitionRequest) GetName

func (*GetDataSourceDefinitionRequest) ProtoMessage

func (*GetDataSourceDefinitionRequest) ProtoMessage()

func (*GetDataSourceDefinitionRequest) Reset

func (m *GetDataSourceDefinitionRequest) Reset()

func (*GetDataSourceDefinitionRequest) String

func (*GetDataSourceDefinitionRequest) XXX_DiscardUnknown

func (m *GetDataSourceDefinitionRequest) XXX_DiscardUnknown()

func (*GetDataSourceDefinitionRequest) XXX_Marshal

func (m *GetDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GetDataSourceDefinitionRequest) XXX_Merge

func (m *GetDataSourceDefinitionRequest) XXX_Merge(src proto.Message)

func (*GetDataSourceDefinitionRequest) XXX_Size

func (m *GetDataSourceDefinitionRequest) XXX_Size() int

func (*GetDataSourceDefinitionRequest) XXX_Unmarshal

func (m *GetDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error

type GetDataSourceRequest

type GetDataSourceRequest struct {

	// Required. The field will contain name of the resource requested, for example:
	// `projects/{project_id}/dataSources/{data_source_id}` or
	// `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to get data source info.

func (*GetDataSourceRequest) Descriptor

func (*GetDataSourceRequest) Descriptor() ([]byte, []int)

Deprecated: Use GetDataSourceRequest.ProtoReflect.Descriptor instead.

func (*GetDataSourceRequest) GetName

func (x *GetDataSourceRequest) GetName() string

func (*GetDataSourceRequest) ProtoMessage

func (*GetDataSourceRequest) ProtoMessage()

func (*GetDataSourceRequest) ProtoReflect

func (x *GetDataSourceRequest) ProtoReflect() protoreflect.Message

func (*GetDataSourceRequest) Reset

func (x *GetDataSourceRequest) Reset()

func (*GetDataSourceRequest) String

func (x *GetDataSourceRequest) String() string

type GetTransferConfigRequest

type GetTransferConfigRequest struct {

	// Required. The field will contain name of the resource requested, for example:
	// `projects/{project_id}/transferConfigs/{config_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to get data transfer information.

func (*GetTransferConfigRequest) Descriptor

func (*GetTransferConfigRequest) Descriptor() ([]byte, []int)

Deprecated: Use GetTransferConfigRequest.ProtoReflect.Descriptor instead.

func (*GetTransferConfigRequest) GetName

func (x *GetTransferConfigRequest) GetName() string

func (*GetTransferConfigRequest) ProtoMessage

func (*GetTransferConfigRequest) ProtoMessage()

func (*GetTransferConfigRequest) ProtoReflect

func (x *GetTransferConfigRequest) ProtoReflect() protoreflect.Message

func (*GetTransferConfigRequest) Reset

func (x *GetTransferConfigRequest) Reset()

func (*GetTransferConfigRequest) String

func (x *GetTransferConfigRequest) String() string

type GetTransferRunRequest

type GetTransferRunRequest struct {

	// Required. The field will contain name of the resource requested, for example:
	// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

A request to get data transfer run information.

func (*GetTransferRunRequest) Descriptor

func (*GetTransferRunRequest) Descriptor() ([]byte, []int)

Deprecated: Use GetTransferRunRequest.ProtoReflect.Descriptor instead.

func (*GetTransferRunRequest) GetName

func (x *GetTransferRunRequest) GetName() string

func (*GetTransferRunRequest) ProtoMessage

func (*GetTransferRunRequest) ProtoMessage()

func (*GetTransferRunRequest) ProtoReflect

func (x *GetTransferRunRequest) ProtoReflect() protoreflect.Message

func (*GetTransferRunRequest) Reset

func (x *GetTransferRunRequest) Reset()

func (*GetTransferRunRequest) String

func (x *GetTransferRunRequest) String() string

type ImportedDataInfo

type ImportedDataInfo struct {
	// SQL query to run. When empty, API checks that there is only one
	// table_def specified and loads this table. Only Standard SQL queries
	// are accepted. Legacy SQL is not allowed.
	Sql string `protobuf:"bytes,1,opt,name=sql,proto3" json:"sql,omitempty"`
	// Table where results should be written.
	DestinationTableId string `protobuf:"bytes,2,opt,name=destination_table_id,json=destinationTableId,proto3" json:"destination_table_id,omitempty"`
	// The description of a destination table. This can be several sentences
	// or paragraphs describing the table contents in detail.
	DestinationTableDescription string `` /* 145-byte string literal not displayed */
	// When used WITHOUT the "sql" parameter, describes the schema of the
	// destination table.
	// When used WITH the "sql" parameter, describes tables with data stored
	// outside of BigQuery.
	TableDefs []*ImportedDataInfo_TableDefinition `protobuf:"bytes,3,rep,name=table_defs,json=tableDefs,proto3" json:"table_defs,omitempty"`
	// Inline code for User-defined function resources.
	// Ignored when "sql" parameter is empty.
	UserDefinedFunctions []string `protobuf:"bytes,4,rep,name=user_defined_functions,json=userDefinedFunctions,proto3" json:"user_defined_functions,omitempty"`
	// Specifies the action if the destination table already exists.
	WriteDisposition     WriteDisposition `` /* 170-byte string literal not displayed */
	XXX_NoUnkeyedLiteral struct{}         `json:"-"`
	XXX_unrecognized     []byte           `json:"-"`
	XXX_sizecache        int32            `json:"-"`
}

Describes data which should be imported.

func (*ImportedDataInfo) Descriptor

func (*ImportedDataInfo) Descriptor() ([]byte, []int)

func (*ImportedDataInfo) GetDestinationTableDescription

func (m *ImportedDataInfo) GetDestinationTableDescription() string

func (*ImportedDataInfo) GetDestinationTableId

func (m *ImportedDataInfo) GetDestinationTableId() string

func (*ImportedDataInfo) GetSql

func (m *ImportedDataInfo) GetSql() string

func (*ImportedDataInfo) GetTableDefs

func (*ImportedDataInfo) GetUserDefinedFunctions

func (m *ImportedDataInfo) GetUserDefinedFunctions() []string

func (*ImportedDataInfo) GetWriteDisposition

func (m *ImportedDataInfo) GetWriteDisposition() WriteDisposition

func (*ImportedDataInfo) ProtoMessage

func (*ImportedDataInfo) ProtoMessage()

func (*ImportedDataInfo) Reset

func (m *ImportedDataInfo) Reset()

func (*ImportedDataInfo) String

func (m *ImportedDataInfo) String() string

func (*ImportedDataInfo) XXX_DiscardUnknown

func (m *ImportedDataInfo) XXX_DiscardUnknown()

func (*ImportedDataInfo) XXX_Marshal

func (m *ImportedDataInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImportedDataInfo) XXX_Merge

func (m *ImportedDataInfo) XXX_Merge(src proto.Message)

func (*ImportedDataInfo) XXX_Size

func (m *ImportedDataInfo) XXX_Size() int

func (*ImportedDataInfo) XXX_Unmarshal

func (m *ImportedDataInfo) XXX_Unmarshal(b []byte) error

type ImportedDataInfo_Encoding

type ImportedDataInfo_Encoding int32

Encoding of input data in CSV/JSON format.

const (
	// Default encoding (UTF8).
	ImportedDataInfo_ENCODING_UNSPECIFIED ImportedDataInfo_Encoding = 0
	// ISO_8859_1 encoding.
	ImportedDataInfo_ISO_8859_1 ImportedDataInfo_Encoding = 1
	// UTF8 encoding.
	ImportedDataInfo_UTF8 ImportedDataInfo_Encoding = 2
)

func (ImportedDataInfo_Encoding) EnumDescriptor

func (ImportedDataInfo_Encoding) EnumDescriptor() ([]byte, []int)

func (ImportedDataInfo_Encoding) String

func (x ImportedDataInfo_Encoding) String() string

type ImportedDataInfo_FieldSchema

type ImportedDataInfo_FieldSchema struct {
	// Field name. Matches: [A-Za-z_][A-Za-z_0-9]{0,127}
	FieldName string `protobuf:"bytes,1,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"`
	// Field type
	Type ImportedDataInfo_FieldSchema_Type `` /* 139-byte string literal not displayed */
	// Is field repeated.
	IsRepeated bool `protobuf:"varint,3,opt,name=is_repeated,json=isRepeated,proto3" json:"is_repeated,omitempty"`
	// Description for this field.
	Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
	// Present iff type == RECORD.
	Schema               *ImportedDataInfo_RecordSchema `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"`
	XXX_NoUnkeyedLiteral struct{}                       `json:"-"`
	XXX_unrecognized     []byte                         `json:"-"`
	XXX_sizecache        int32                          `json:"-"`
}

Defines schema of a field in the imported data.

func (*ImportedDataInfo_FieldSchema) Descriptor

func (*ImportedDataInfo_FieldSchema) Descriptor() ([]byte, []int)

func (*ImportedDataInfo_FieldSchema) GetDescription

func (m *ImportedDataInfo_FieldSchema) GetDescription() string

func (*ImportedDataInfo_FieldSchema) GetFieldName

func (m *ImportedDataInfo_FieldSchema) GetFieldName() string

func (*ImportedDataInfo_FieldSchema) GetIsRepeated

func (m *ImportedDataInfo_FieldSchema) GetIsRepeated() bool

func (*ImportedDataInfo_FieldSchema) GetSchema

func (*ImportedDataInfo_FieldSchema) GetType

func (*ImportedDataInfo_FieldSchema) ProtoMessage

func (*ImportedDataInfo_FieldSchema) ProtoMessage()

func (*ImportedDataInfo_FieldSchema) Reset

func (m *ImportedDataInfo_FieldSchema) Reset()

func (*ImportedDataInfo_FieldSchema) String

func (*ImportedDataInfo_FieldSchema) XXX_DiscardUnknown

func (m *ImportedDataInfo_FieldSchema) XXX_DiscardUnknown()

func (*ImportedDataInfo_FieldSchema) XXX_Marshal

func (m *ImportedDataInfo_FieldSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImportedDataInfo_FieldSchema) XXX_Merge

func (m *ImportedDataInfo_FieldSchema) XXX_Merge(src proto.Message)

func (*ImportedDataInfo_FieldSchema) XXX_Size

func (m *ImportedDataInfo_FieldSchema) XXX_Size() int

func (*ImportedDataInfo_FieldSchema) XXX_Unmarshal

func (m *ImportedDataInfo_FieldSchema) XXX_Unmarshal(b []byte) error

type ImportedDataInfo_FieldSchema_Type

type ImportedDataInfo_FieldSchema_Type int32

LINT.IfChange Field type.

const (
	// Illegal value.
	ImportedDataInfo_FieldSchema_TYPE_UNSPECIFIED ImportedDataInfo_FieldSchema_Type = 0
	// 64K, UTF8.
	ImportedDataInfo_FieldSchema_STRING ImportedDataInfo_FieldSchema_Type = 1
	// 64-bit signed.
	ImportedDataInfo_FieldSchema_INTEGER ImportedDataInfo_FieldSchema_Type = 2
	// 64-bit IEEE floating point.
	ImportedDataInfo_FieldSchema_FLOAT ImportedDataInfo_FieldSchema_Type = 3
	// Aggregate type.
	ImportedDataInfo_FieldSchema_RECORD ImportedDataInfo_FieldSchema_Type = 4
	// 64K, Binary.
	ImportedDataInfo_FieldSchema_BYTES ImportedDataInfo_FieldSchema_Type = 5
	// 2-valued.
	ImportedDataInfo_FieldSchema_BOOLEAN ImportedDataInfo_FieldSchema_Type = 6
	// 64-bit signed usec since UTC epoch.
	ImportedDataInfo_FieldSchema_TIMESTAMP ImportedDataInfo_FieldSchema_Type = 7
	// Civil date - Year, Month, Day.
	ImportedDataInfo_FieldSchema_DATE ImportedDataInfo_FieldSchema_Type = 8
	// Civil time - Hour, Minute, Second, Microseconds.
	ImportedDataInfo_FieldSchema_TIME ImportedDataInfo_FieldSchema_Type = 9
	// Combination of civil date and civil time.
	ImportedDataInfo_FieldSchema_DATETIME ImportedDataInfo_FieldSchema_Type = 10
	// Numeric type with 38 decimal digits of precision and 9 decimal digits
	// of scale.
	ImportedDataInfo_FieldSchema_NUMERIC ImportedDataInfo_FieldSchema_Type = 11
	// Geography object (go/googlesql_geography).
	ImportedDataInfo_FieldSchema_GEOGRAPHY ImportedDataInfo_FieldSchema_Type = 12
)

func (ImportedDataInfo_FieldSchema_Type) EnumDescriptor

func (ImportedDataInfo_FieldSchema_Type) EnumDescriptor() ([]byte, []int)

func (ImportedDataInfo_FieldSchema_Type) String

type ImportedDataInfo_Format

type ImportedDataInfo_Format int32

Data format.

const (
	// Unspecified format. In this case, we have to infer the format from the
	// data source.
	ImportedDataInfo_FORMAT_UNSPECIFIED ImportedDataInfo_Format = 0
	// CSV format.
	ImportedDataInfo_CSV ImportedDataInfo_Format = 1
	// Newline-delimited JSON.
	ImportedDataInfo_JSON ImportedDataInfo_Format = 2
	// Avro format. See http://avro.apache.org .
	ImportedDataInfo_AVRO ImportedDataInfo_Format = 3
	// RecordIO.
	ImportedDataInfo_RECORDIO ImportedDataInfo_Format = 4
	// ColumnIO.
	ImportedDataInfo_COLUMNIO ImportedDataInfo_Format = 5
	// Capacitor.
	ImportedDataInfo_CAPACITOR ImportedDataInfo_Format = 6
	// Parquet format. See https://parquet.apache.org .
	ImportedDataInfo_PARQUET ImportedDataInfo_Format = 7
	// ORC format. See https://orc.apache.org .
	ImportedDataInfo_ORC ImportedDataInfo_Format = 8
)

func (ImportedDataInfo_Format) EnumDescriptor

func (ImportedDataInfo_Format) EnumDescriptor() ([]byte, []int)

func (ImportedDataInfo_Format) String

func (x ImportedDataInfo_Format) String() string

type ImportedDataInfo_RecordSchema

type ImportedDataInfo_RecordSchema struct {
	// One field per column in the record.
	Fields               []*ImportedDataInfo_FieldSchema `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"`
	XXX_NoUnkeyedLiteral struct{}                        `json:"-"`
	XXX_unrecognized     []byte                          `json:"-"`
	XXX_sizecache        int32                           `json:"-"`
}

Describes schema of the data to be ingested.

func (*ImportedDataInfo_RecordSchema) Descriptor

func (*ImportedDataInfo_RecordSchema) Descriptor() ([]byte, []int)

func (*ImportedDataInfo_RecordSchema) GetFields

func (*ImportedDataInfo_RecordSchema) ProtoMessage

func (*ImportedDataInfo_RecordSchema) ProtoMessage()

func (*ImportedDataInfo_RecordSchema) Reset

func (m *ImportedDataInfo_RecordSchema) Reset()

func (*ImportedDataInfo_RecordSchema) String

func (*ImportedDataInfo_RecordSchema) XXX_DiscardUnknown

func (m *ImportedDataInfo_RecordSchema) XXX_DiscardUnknown()

func (*ImportedDataInfo_RecordSchema) XXX_Marshal

func (m *ImportedDataInfo_RecordSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImportedDataInfo_RecordSchema) XXX_Merge

func (m *ImportedDataInfo_RecordSchema) XXX_Merge(src proto.Message)

func (*ImportedDataInfo_RecordSchema) XXX_Size

func (m *ImportedDataInfo_RecordSchema) XXX_Size() int

func (*ImportedDataInfo_RecordSchema) XXX_Unmarshal

func (m *ImportedDataInfo_RecordSchema) XXX_Unmarshal(b []byte) error

type ImportedDataInfo_TableDefinition

type ImportedDataInfo_TableDefinition struct {
	// BigQuery table_id (required). This will be used to reference this
	// table in the query.
	TableId string `protobuf:"bytes,1,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"`
	// URIs for the data to be imported. All URIs must be from the same storage
	// system.
	SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"`
	// Describes the format of the data in source_uri.
	Format ImportedDataInfo_Format `` /* 133-byte string literal not displayed */
	// Specify the maximum number of bad records that can be ignored.
	// If bad records exceed this threshold the query is aborted.
	MaxBadRecords int32 `protobuf:"varint,4,opt,name=max_bad_records,json=maxBadRecords,proto3" json:"max_bad_records,omitempty"`
	// Character encoding of the input when applicable (CSV, JSON).
	// Defaults to UTF8.
	Encoding ImportedDataInfo_Encoding `` /* 139-byte string literal not displayed */
	// CSV specific options.
	CsvOptions *ImportedDataInfo_TableDefinition_CsvOptions `protobuf:"bytes,6,opt,name=csv_options,json=csvOptions,proto3" json:"csv_options,omitempty"`
	// Optional schema for the data. When not specified for JSON and CSV formats
	// we will try to detect it automatically.
	Schema *ImportedDataInfo_RecordSchema `protobuf:"bytes,7,opt,name=schema,proto3" json:"schema,omitempty"`
	// Indicates if extra values that are not represented in the table schema is
	// allowed.
	IgnoreUnknownValues  *wrappers.BoolValue `protobuf:"bytes,10,opt,name=ignore_unknown_values,json=ignoreUnknownValues,proto3" json:"ignore_unknown_values,omitempty"`
	XXX_NoUnkeyedLiteral struct{}            `json:"-"`
	XXX_unrecognized     []byte              `json:"-"`
	XXX_sizecache        int32               `json:"-"`
}

External table definition. These tables can be referenced with 'name' in the query and can be read just like any other table.

func (*ImportedDataInfo_TableDefinition) Descriptor

func (*ImportedDataInfo_TableDefinition) Descriptor() ([]byte, []int)

func (*ImportedDataInfo_TableDefinition) GetCsvOptions

func (*ImportedDataInfo_TableDefinition) GetEncoding

func (*ImportedDataInfo_TableDefinition) GetFormat

func (*ImportedDataInfo_TableDefinition) GetIgnoreUnknownValues

func (m *ImportedDataInfo_TableDefinition) GetIgnoreUnknownValues() *wrappers.BoolValue

func (*ImportedDataInfo_TableDefinition) GetMaxBadRecords

func (m *ImportedDataInfo_TableDefinition) GetMaxBadRecords() int32

func (*ImportedDataInfo_TableDefinition) GetSchema

func (*ImportedDataInfo_TableDefinition) GetSourceUris

func (m *ImportedDataInfo_TableDefinition) GetSourceUris() []string

func (*ImportedDataInfo_TableDefinition) GetTableId

func (m *ImportedDataInfo_TableDefinition) GetTableId() string

func (*ImportedDataInfo_TableDefinition) ProtoMessage

func (*ImportedDataInfo_TableDefinition) ProtoMessage()

func (*ImportedDataInfo_TableDefinition) Reset

func (*ImportedDataInfo_TableDefinition) String

func (*ImportedDataInfo_TableDefinition) XXX_DiscardUnknown

func (m *ImportedDataInfo_TableDefinition) XXX_DiscardUnknown()

func (*ImportedDataInfo_TableDefinition) XXX_Marshal

func (m *ImportedDataInfo_TableDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImportedDataInfo_TableDefinition) XXX_Merge

func (*ImportedDataInfo_TableDefinition) XXX_Size

func (m *ImportedDataInfo_TableDefinition) XXX_Size() int

func (*ImportedDataInfo_TableDefinition) XXX_Unmarshal

func (m *ImportedDataInfo_TableDefinition) XXX_Unmarshal(b []byte) error

type ImportedDataInfo_TableDefinition_CsvOptions

type ImportedDataInfo_TableDefinition_CsvOptions struct {
	// The delimiter.  We currently restrict this to U+0001 to U+00FF and
	// apply additional constraints during validation.
	FieldDelimiter *wrappers.StringValue `protobuf:"bytes,1,opt,name=field_delimiter,json=fieldDelimiter,proto3" json:"field_delimiter,omitempty"`
	// Whether CSV files are allowed to have quoted newlines. If quoted
	// newlines are allowed, we can't split CSV files.
	AllowQuotedNewlines *wrappers.BoolValue `protobuf:"bytes,2,opt,name=allow_quoted_newlines,json=allowQuotedNewlines,proto3" json:"allow_quoted_newlines,omitempty"`
	// The quote character.  We currently restrict this to U+0000 to U+00FF
	// and apply additional constraints during validation. Set to '\0' to
	// indicate no quote is used.
	QuoteChar *wrappers.StringValue `protobuf:"bytes,3,opt,name=quote_char,json=quoteChar,proto3" json:"quote_char,omitempty"`
	// Number of leading rows to skip.
	SkipLeadingRows *wrappers.Int64Value `protobuf:"bytes,4,opt,name=skip_leading_rows,json=skipLeadingRows,proto3" json:"skip_leading_rows,omitempty"`
	// Accept rows that are missing trailing optional columns.
	AllowJaggedRows      *wrappers.BoolValue `protobuf:"bytes,5,opt,name=allow_jagged_rows,json=allowJaggedRows,proto3" json:"allow_jagged_rows,omitempty"`
	XXX_NoUnkeyedLiteral struct{}            `json:"-"`
	XXX_unrecognized     []byte              `json:"-"`
	XXX_sizecache        int32               `json:"-"`
}

CSV specific options.

func (*ImportedDataInfo_TableDefinition_CsvOptions) Descriptor

func (*ImportedDataInfo_TableDefinition_CsvOptions) GetAllowJaggedRows

func (*ImportedDataInfo_TableDefinition_CsvOptions) GetAllowQuotedNewlines

func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetAllowQuotedNewlines() *wrappers.BoolValue

func (*ImportedDataInfo_TableDefinition_CsvOptions) GetFieldDelimiter

func (*ImportedDataInfo_TableDefinition_CsvOptions) GetQuoteChar

func (*ImportedDataInfo_TableDefinition_CsvOptions) GetSkipLeadingRows

func (*ImportedDataInfo_TableDefinition_CsvOptions) ProtoMessage

func (*ImportedDataInfo_TableDefinition_CsvOptions) Reset

func (*ImportedDataInfo_TableDefinition_CsvOptions) String

func (*ImportedDataInfo_TableDefinition_CsvOptions) XXX_DiscardUnknown

func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_DiscardUnknown()

func (*ImportedDataInfo_TableDefinition_CsvOptions) XXX_Marshal

func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImportedDataInfo_TableDefinition_CsvOptions) XXX_Merge

func (*ImportedDataInfo_TableDefinition_CsvOptions) XXX_Size

func (*ImportedDataInfo_TableDefinition_CsvOptions) XXX_Unmarshal

type ListDataSourceDefinitionsRequest

type ListDataSourceDefinitionsRequest struct {
	// The BigQuery project id for which data sources should be returned.
	// Must be in the form: `projects/{project_id}/locations/{location_id}`
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Pagination token, which can be used to request a specific page
	// of `ListDataSourceDefinitionsRequest` list results. For multiple-page
	// results, `ListDataSourceDefinitionsResponse` outputs a `next_page` token,
	// which can be used as the `page_token` value to request the next page of
	// the list results.
	PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// Page size. The default page size is the maximum value of 1000 results.
	PageSize             int32    `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Represents the request of the ListDataSourceDefinitions method.

func (*ListDataSourceDefinitionsRequest) Descriptor

func (*ListDataSourceDefinitionsRequest) Descriptor() ([]byte, []int)

func (*ListDataSourceDefinitionsRequest) GetPageSize

func (m *ListDataSourceDefinitionsRequest) GetPageSize() int32

func (*ListDataSourceDefinitionsRequest) GetPageToken

func (m *ListDataSourceDefinitionsRequest) GetPageToken() string

func (*ListDataSourceDefinitionsRequest) GetParent

func (*ListDataSourceDefinitionsRequest) ProtoMessage

func (*ListDataSourceDefinitionsRequest) ProtoMessage()

func (*ListDataSourceDefinitionsRequest) Reset

func (*ListDataSourceDefinitionsRequest) String

func (*ListDataSourceDefinitionsRequest) XXX_DiscardUnknown

func (m *ListDataSourceDefinitionsRequest) XXX_DiscardUnknown()

func (*ListDataSourceDefinitionsRequest) XXX_Marshal

func (m *ListDataSourceDefinitionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ListDataSourceDefinitionsRequest) XXX_Merge

func (*ListDataSourceDefinitionsRequest) XXX_Size

func (m *ListDataSourceDefinitionsRequest) XXX_Size() int

func (*ListDataSourceDefinitionsRequest) XXX_Unmarshal

func (m *ListDataSourceDefinitionsRequest) XXX_Unmarshal(b []byte) error

type ListDataSourceDefinitionsResponse

type ListDataSourceDefinitionsResponse struct {
	// List of supported data source definitions.
	DataSourceDefinitions []*DataSourceDefinition `` /* 126-byte string literal not displayed */
	// Output only. The next-pagination token. For multiple-page list results,
	// this token can be used as the
	// `ListDataSourceDefinitionsRequest.page_token`
	// to request the next page of the list results.
	NextPageToken        string   `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

Returns a list of supported data source definitions.

func (*ListDataSourceDefinitionsResponse) Descriptor

func (*ListDataSourceDefinitionsResponse) Descriptor() ([]byte, []int)

func (*ListDataSourceDefinitionsResponse) GetDataSourceDefinitions

func (m *ListDataSourceDefinitionsResponse) GetDataSourceDefinitions() []*DataSourceDefinition

func (*ListDataSourceDefinitionsResponse) GetNextPageToken

func (m *ListDataSourceDefinitionsResponse) GetNextPageToken() string

func (*ListDataSourceDefinitionsResponse) ProtoMessage

func (*ListDataSourceDefinitionsResponse) ProtoMessage()

func (*ListDataSourceDefinitionsResponse) Reset

func (*ListDataSourceDefinitionsResponse) String

func (*ListDataSourceDefinitionsResponse) XXX_DiscardUnknown

func (m *ListDataSourceDefinitionsResponse) XXX_DiscardUnknown()

func (*ListDataSourceDefinitionsResponse) XXX_Marshal

func (m *ListDataSourceDefinitionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ListDataSourceDefinitionsResponse) XXX_Merge

func (*ListDataSourceDefinitionsResponse) XXX_Size

func (m *ListDataSourceDefinitionsResponse) XXX_Size() int

func (*ListDataSourceDefinitionsResponse) XXX_Unmarshal

func (m *ListDataSourceDefinitionsResponse) XXX_Unmarshal(b []byte) error

type ListDataSourcesRequest

type ListDataSourcesRequest struct {

	// Required. The BigQuery project id for which data sources should be returned.
	// Must be in the form: `projects/{project_id}` or
	// `projects/{project_id}/locations/{location_id}
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Pagination token, which can be used to request a specific page
	// of `ListDataSourcesRequest` list results. For multiple-page
	// results, `ListDataSourcesResponse` outputs
	// a `next_page` token, which can be used as the
	// `page_token` value to request the next page of list results.
	PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// Page size. The default page size is the maximum value of 1000 results.
	PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	// contains filtered or unexported fields
}

Request to list supported data sources and their data transfer settings.

func (*ListDataSourcesRequest) Descriptor

func (*ListDataSourcesRequest) Descriptor() ([]byte, []int)

Deprecated: Use ListDataSourcesRequest.ProtoReflect.Descriptor instead.

func (*ListDataSourcesRequest) GetPageSize

func (x *ListDataSourcesRequest) GetPageSize() int32

func (*ListDataSourcesRequest) GetPageToken

func (x *ListDataSourcesRequest) GetPageToken() string

func (*ListDataSourcesRequest) GetParent

func (x *ListDataSourcesRequest) GetParent() string

func (*ListDataSourcesRequest) ProtoMessage

func (*ListDataSourcesRequest) ProtoMessage()

func (*ListDataSourcesRequest) ProtoReflect

func (x *ListDataSourcesRequest) ProtoReflect() protoreflect.Message

func (*ListDataSourcesRequest) Reset

func (x *ListDataSourcesRequest) Reset()

func (*ListDataSourcesRequest) String

func (x *ListDataSourcesRequest) String() string

type ListDataSourcesResponse

type ListDataSourcesResponse struct {

	// List of supported data sources and their transfer settings.
	DataSources []*DataSource `protobuf:"bytes,1,rep,name=data_sources,json=dataSources,proto3" json:"data_sources,omitempty"`
	// Output only. The next-pagination token. For multiple-page list results,
	// this token can be used as the
	// `ListDataSourcesRequest.page_token`
	// to request the next page of list results.
	NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	// contains filtered or unexported fields
}

Returns list of supported data sources and their metadata.

func (*ListDataSourcesResponse) Descriptor

func (*ListDataSourcesResponse) Descriptor() ([]byte, []int)

Deprecated: Use ListDataSourcesResponse.ProtoReflect.Descriptor instead.

func (*ListDataSourcesResponse) GetDataSources

func (x *ListDataSourcesResponse) GetDataSources() []*DataSource

func (*ListDataSourcesResponse) GetNextPageToken

func (x *ListDataSourcesResponse) GetNextPageToken() string

func (*ListDataSourcesResponse) ProtoMessage

func (*ListDataSourcesResponse) ProtoMessage()

func (*ListDataSourcesResponse) ProtoReflect

func (x *ListDataSourcesResponse) ProtoReflect() protoreflect.Message

func (*ListDataSourcesResponse) Reset

func (x *ListDataSourcesResponse) Reset()

func (*ListDataSourcesResponse) String

func (x *ListDataSourcesResponse) String() string

type ListTransferConfigsRequest

type ListTransferConfigsRequest struct {

	// Required. The BigQuery project id for which data sources
	// should be returned: `projects/{project_id}` or
	// `projects/{project_id}/locations/{location_id}`
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// When specified, only configurations of requested data sources are returned.
	DataSourceIds []string `protobuf:"bytes,2,rep,name=data_source_ids,json=dataSourceIds,proto3" json:"data_source_ids,omitempty"`
	// Pagination token, which can be used to request a specific page
	// of `ListTransfersRequest` list results. For multiple-page
	// results, `ListTransfersResponse` outputs
	// a `next_page` token, which can be used as the
	// `page_token` value to request the next page of list results.
	PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// Page size. The default page size is the maximum value of 1000 results.
	PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	// contains filtered or unexported fields
}

A request to list data transfers configured for a BigQuery project.

func (*ListTransferConfigsRequest) Descriptor

func (*ListTransferConfigsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferConfigsRequest.ProtoReflect.Descriptor instead.

func (*ListTransferConfigsRequest) GetDataSourceIds

func (x *ListTransferConfigsRequest) GetDataSourceIds() []string

func (*ListTransferConfigsRequest) GetPageSize

func (x *ListTransferConfigsRequest) GetPageSize() int32

func (*ListTransferConfigsRequest) GetPageToken

func (x *ListTransferConfigsRequest) GetPageToken() string

func (*ListTransferConfigsRequest) GetParent

func (x *ListTransferConfigsRequest) GetParent() string

func (*ListTransferConfigsRequest) ProtoMessage

func (*ListTransferConfigsRequest) ProtoMessage()

func (*ListTransferConfigsRequest) ProtoReflect

func (*ListTransferConfigsRequest) Reset

func (x *ListTransferConfigsRequest) Reset()

func (*ListTransferConfigsRequest) String

func (x *ListTransferConfigsRequest) String() string

type ListTransferConfigsResponse

type ListTransferConfigsResponse struct {

	// Output only. The stored pipeline transfer configurations.
	TransferConfigs []*TransferConfig `protobuf:"bytes,1,rep,name=transfer_configs,json=transferConfigs,proto3" json:"transfer_configs,omitempty"`
	// Output only. The next-pagination token. For multiple-page list results,
	// this token can be used as the
	// `ListTransferConfigsRequest.page_token`
	// to request the next page of list results.
	NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	// contains filtered or unexported fields
}

The returned list of pipelines in the project.

func (*ListTransferConfigsResponse) Descriptor

func (*ListTransferConfigsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferConfigsResponse.ProtoReflect.Descriptor instead.

func (*ListTransferConfigsResponse) GetNextPageToken

func (x *ListTransferConfigsResponse) GetNextPageToken() string

func (*ListTransferConfigsResponse) GetTransferConfigs

func (x *ListTransferConfigsResponse) GetTransferConfigs() []*TransferConfig

func (*ListTransferConfigsResponse) ProtoMessage

func (*ListTransferConfigsResponse) ProtoMessage()

func (*ListTransferConfigsResponse) ProtoReflect

func (*ListTransferConfigsResponse) Reset

func (x *ListTransferConfigsResponse) Reset()

func (*ListTransferConfigsResponse) String

func (x *ListTransferConfigsResponse) String() string

type ListTransferLogsRequest

type ListTransferLogsRequest struct {

	// Required. Transfer run name in the form:
	// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Pagination token, which can be used to request a specific page
	// of `ListTransferLogsRequest` list results. For multiple-page
	// results, `ListTransferLogsResponse` outputs
	// a `next_page` token, which can be used as the
	// `page_token` value to request the next page of list results.
	PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// Page size. The default page size is the maximum value of 1000 results.
	PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	// Message types to return. If not populated - INFO, WARNING and ERROR
	// messages are returned.
	MessageTypes []TransferMessage_MessageSeverity `` /* 180-byte string literal not displayed */
	// contains filtered or unexported fields
}

A request to get user facing log messages associated with data transfer run.

func (*ListTransferLogsRequest) Descriptor

func (*ListTransferLogsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferLogsRequest.ProtoReflect.Descriptor instead.

func (*ListTransferLogsRequest) GetMessageTypes

func (*ListTransferLogsRequest) GetPageSize

func (x *ListTransferLogsRequest) GetPageSize() int32

func (*ListTransferLogsRequest) GetPageToken

func (x *ListTransferLogsRequest) GetPageToken() string

func (*ListTransferLogsRequest) GetParent

func (x *ListTransferLogsRequest) GetParent() string

func (*ListTransferLogsRequest) ProtoMessage

func (*ListTransferLogsRequest) ProtoMessage()

func (*ListTransferLogsRequest) ProtoReflect

func (x *ListTransferLogsRequest) ProtoReflect() protoreflect.Message

func (*ListTransferLogsRequest) Reset

func (x *ListTransferLogsRequest) Reset()

func (*ListTransferLogsRequest) String

func (x *ListTransferLogsRequest) String() string

type ListTransferLogsResponse

type ListTransferLogsResponse struct {

	// Output only. The stored pipeline transfer messages.
	TransferMessages []*TransferMessage `protobuf:"bytes,1,rep,name=transfer_messages,json=transferMessages,proto3" json:"transfer_messages,omitempty"`
	// Output only. The next-pagination token. For multiple-page list results,
	// this token can be used as the
	// `GetTransferRunLogRequest.page_token`
	// to request the next page of list results.
	NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	// contains filtered or unexported fields
}

The returned list transfer run messages.

func (*ListTransferLogsResponse) Descriptor

func (*ListTransferLogsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferLogsResponse.ProtoReflect.Descriptor instead.

func (*ListTransferLogsResponse) GetNextPageToken

func (x *ListTransferLogsResponse) GetNextPageToken() string

func (*ListTransferLogsResponse) GetTransferMessages

func (x *ListTransferLogsResponse) GetTransferMessages() []*TransferMessage

func (*ListTransferLogsResponse) ProtoMessage

func (*ListTransferLogsResponse) ProtoMessage()

func (*ListTransferLogsResponse) ProtoReflect

func (x *ListTransferLogsResponse) ProtoReflect() protoreflect.Message

func (*ListTransferLogsResponse) Reset

func (x *ListTransferLogsResponse) Reset()

func (*ListTransferLogsResponse) String

func (x *ListTransferLogsResponse) String() string

type ListTransferRunsRequest

type ListTransferRunsRequest struct {

	// Required. Name of transfer configuration for which transfer runs should be retrieved.
	// Format of transfer configuration resource name is:
	// `projects/{project_id}/transferConfigs/{config_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// When specified, only transfer runs with requested states are returned.
	States []TransferState `` /* 130-byte string literal not displayed */
	// Pagination token, which can be used to request a specific page
	// of `ListTransferRunsRequest` list results. For multiple-page
	// results, `ListTransferRunsResponse` outputs
	// a `next_page` token, which can be used as the
	// `page_token` value to request the next page of list results.
	PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// Page size. The default page size is the maximum value of 1000 results.
	PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	// Indicates how run attempts are to be pulled.
	RunAttempt ListTransferRunsRequest_RunAttempt `` /* 170-byte string literal not displayed */
	// contains filtered or unexported fields
}

A request to list data transfer runs. UI can use this method to show/filter specific data transfer runs. The data source can use this method to request all scheduled transfer runs.

func (*ListTransferRunsRequest) Descriptor

func (*ListTransferRunsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferRunsRequest.ProtoReflect.Descriptor instead.

func (*ListTransferRunsRequest) GetPageSize

func (x *ListTransferRunsRequest) GetPageSize() int32

func (*ListTransferRunsRequest) GetPageToken

func (x *ListTransferRunsRequest) GetPageToken() string

func (*ListTransferRunsRequest) GetParent

func (x *ListTransferRunsRequest) GetParent() string

func (*ListTransferRunsRequest) GetRunAttempt

func (*ListTransferRunsRequest) GetStates

func (x *ListTransferRunsRequest) GetStates() []TransferState

func (*ListTransferRunsRequest) ProtoMessage

func (*ListTransferRunsRequest) ProtoMessage()

func (*ListTransferRunsRequest) ProtoReflect

func (x *ListTransferRunsRequest) ProtoReflect() protoreflect.Message

func (*ListTransferRunsRequest) Reset

func (x *ListTransferRunsRequest) Reset()

func (*ListTransferRunsRequest) String

func (x *ListTransferRunsRequest) String() string

type ListTransferRunsRequest_RunAttempt

type ListTransferRunsRequest_RunAttempt int32

Represents which runs should be pulled.

const (
	// All runs should be returned.
	ListTransferRunsRequest_RUN_ATTEMPT_UNSPECIFIED ListTransferRunsRequest_RunAttempt = 0
	// Only latest run per day should be returned.
	ListTransferRunsRequest_LATEST ListTransferRunsRequest_RunAttempt = 1
)

func (ListTransferRunsRequest_RunAttempt) Descriptor

func (ListTransferRunsRequest_RunAttempt) Enum

func (ListTransferRunsRequest_RunAttempt) EnumDescriptor

func (ListTransferRunsRequest_RunAttempt) EnumDescriptor() ([]byte, []int)

Deprecated: Use ListTransferRunsRequest_RunAttempt.Descriptor instead.

func (ListTransferRunsRequest_RunAttempt) Number

func (ListTransferRunsRequest_RunAttempt) String

func (ListTransferRunsRequest_RunAttempt) Type

type ListTransferRunsResponse

type ListTransferRunsResponse struct {

	// Output only. The stored pipeline transfer runs.
	TransferRuns []*TransferRun `protobuf:"bytes,1,rep,name=transfer_runs,json=transferRuns,proto3" json:"transfer_runs,omitempty"`
	// Output only. The next-pagination token. For multiple-page list results,
	// this token can be used as the
	// `ListTransferRunsRequest.page_token`
	// to request the next page of list results.
	NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	// contains filtered or unexported fields
}

The returned list of pipelines in the project.

func (*ListTransferRunsResponse) Descriptor

func (*ListTransferRunsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ListTransferRunsResponse.ProtoReflect.Descriptor instead.

func (*ListTransferRunsResponse) GetNextPageToken

func (x *ListTransferRunsResponse) GetNextPageToken() string

func (*ListTransferRunsResponse) GetTransferRuns

func (x *ListTransferRunsResponse) GetTransferRuns() []*TransferRun

func (*ListTransferRunsResponse) ProtoMessage

func (*ListTransferRunsResponse) ProtoMessage()

func (*ListTransferRunsResponse) ProtoReflect

func (x *ListTransferRunsResponse) ProtoReflect() protoreflect.Message

func (*ListTransferRunsResponse) Reset

func (x *ListTransferRunsResponse) Reset()

func (*ListTransferRunsResponse) String

func (x *ListTransferRunsResponse) String() string

type LogTransferRunMessagesRequest

type LogTransferRunMessagesRequest struct {
	// Name of the resource in the form:
	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Messages to append.
	TransferMessages     []*TransferMessage `protobuf:"bytes,2,rep,name=transfer_messages,json=transferMessages,proto3" json:"transfer_messages,omitempty"`
	XXX_NoUnkeyedLiteral struct{}           `json:"-"`
	XXX_unrecognized     []byte             `json:"-"`
	XXX_sizecache        int32              `json:"-"`
}

A request to add transfer status messages to the run.

func (*LogTransferRunMessagesRequest) Descriptor

func (*LogTransferRunMessagesRequest) Descriptor() ([]byte, []int)

func (*LogTransferRunMessagesRequest) GetName

func (*LogTransferRunMessagesRequest) GetTransferMessages

func (m *LogTransferRunMessagesRequest) GetTransferMessages() []*TransferMessage

func (*LogTransferRunMessagesRequest) ProtoMessage

func (*LogTransferRunMessagesRequest) ProtoMessage()

func (*LogTransferRunMessagesRequest) Reset

func (m *LogTransferRunMessagesRequest) Reset()

func (*LogTransferRunMessagesRequest) String

func (*LogTransferRunMessagesRequest) XXX_DiscardUnknown

func (m *LogTransferRunMessagesRequest) XXX_DiscardUnknown()

func (*LogTransferRunMessagesRequest) XXX_Marshal

func (m *LogTransferRunMessagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LogTransferRunMessagesRequest) XXX_Merge

func (m *LogTransferRunMessagesRequest) XXX_Merge(src proto.Message)

func (*LogTransferRunMessagesRequest) XXX_Size

func (m *LogTransferRunMessagesRequest) XXX_Size() int

func (*LogTransferRunMessagesRequest) XXX_Unmarshal

func (m *LogTransferRunMessagesRequest) XXX_Unmarshal(b []byte) error

type ScheduleOptions

type ScheduleOptions struct {

	// If true, automatic scheduling of data transfer runs for this configuration
	// will be disabled. The runs can be started on ad-hoc basis using
	// StartManualTransferRuns API. When automatic scheduling is disabled, the
	// TransferConfig.schedule field will be ignored.
	DisableAutoScheduling bool `` /* 127-byte string literal not displayed */
	// Specifies time to start scheduling transfer runs. The first run will be
	// scheduled at or after the start time according to a recurrence pattern
	// defined in the schedule string. The start time can be changed at any
	// moment. The time when a data transfer can be trigerred manually is not
	// limited by this option.
	StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
	// Defines time to stop scheduling transfer runs. A transfer run cannot be
	// scheduled at or after the end time. The end time can be changed at any
	// moment. The time when a data transfer can be trigerred manually is not
	// limited by this option.
	EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
	// contains filtered or unexported fields
}

Options customizing the data transfer schedule.

func (*ScheduleOptions) Descriptor

func (*ScheduleOptions) Descriptor() ([]byte, []int)

Deprecated: Use ScheduleOptions.ProtoReflect.Descriptor instead.

func (*ScheduleOptions) GetDisableAutoScheduling

func (x *ScheduleOptions) GetDisableAutoScheduling() bool

func (*ScheduleOptions) GetEndTime

func (x *ScheduleOptions) GetEndTime() *timestamppb.Timestamp

func (*ScheduleOptions) GetStartTime

func (x *ScheduleOptions) GetStartTime() *timestamppb.Timestamp

func (*ScheduleOptions) ProtoMessage

func (*ScheduleOptions) ProtoMessage()

func (*ScheduleOptions) ProtoReflect

func (x *ScheduleOptions) ProtoReflect() protoreflect.Message

func (*ScheduleOptions) Reset

func (x *ScheduleOptions) Reset()

func (*ScheduleOptions) String

func (x *ScheduleOptions) String() string

type ScheduleTransferRunsRequest

type ScheduleTransferRunsRequest struct {

	// Required. Transfer configuration name in the form:
	// `projects/{project_id}/transferConfigs/{config_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Required. Start time of the range of transfer runs. For example,
	// `"2017-05-25T00:00:00+00:00"`.
	StartTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
	// Required. End time of the range of transfer runs. For example,
	// `"2017-05-30T00:00:00+00:00"`.
	EndTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
	// contains filtered or unexported fields
}

A request to schedule transfer runs for a time range.

func (*ScheduleTransferRunsRequest) Descriptor

func (*ScheduleTransferRunsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ScheduleTransferRunsRequest.ProtoReflect.Descriptor instead.

func (*ScheduleTransferRunsRequest) GetEndTime

func (*ScheduleTransferRunsRequest) GetParent

func (x *ScheduleTransferRunsRequest) GetParent() string

func (*ScheduleTransferRunsRequest) GetStartTime

func (*ScheduleTransferRunsRequest) ProtoMessage

func (*ScheduleTransferRunsRequest) ProtoMessage()

func (*ScheduleTransferRunsRequest) ProtoReflect

func (*ScheduleTransferRunsRequest) Reset

func (x *ScheduleTransferRunsRequest) Reset()

func (*ScheduleTransferRunsRequest) String

func (x *ScheduleTransferRunsRequest) String() string

type ScheduleTransferRunsResponse

type ScheduleTransferRunsResponse struct {

	// The transfer runs that were scheduled.
	Runs []*TransferRun `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"`
	// contains filtered or unexported fields
}

A response to schedule transfer runs for a time range.

func (*ScheduleTransferRunsResponse) Descriptor

func (*ScheduleTransferRunsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ScheduleTransferRunsResponse.ProtoReflect.Descriptor instead.

func (*ScheduleTransferRunsResponse) GetRuns

func (x *ScheduleTransferRunsResponse) GetRuns() []*TransferRun

func (*ScheduleTransferRunsResponse) ProtoMessage

func (*ScheduleTransferRunsResponse) ProtoMessage()

func (*ScheduleTransferRunsResponse) ProtoReflect

func (*ScheduleTransferRunsResponse) Reset

func (x *ScheduleTransferRunsResponse) Reset()

func (*ScheduleTransferRunsResponse) String

type StartBigQueryJobsRequest

type StartBigQueryJobsRequest struct {
	// Name of the resource in the form:
	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Import jobs which should be started and monitored.
	ImportedData []*ImportedDataInfo `protobuf:"bytes,2,rep,name=imported_data,json=importedData,proto3" json:"imported_data,omitempty"`
	// User credentials which should be used to start/monitor
	// BigQuery jobs. If not specified, then jobs
	// are started using data source service account credentials.
	// This may be OAuth token or JWT token.
	UserCredentials []byte `protobuf:"bytes,3,opt,name=user_credentials,json=userCredentials,proto3" json:"user_credentials,omitempty"`
	// The number of BQ Jobs that can run in parallel.
	MaxParallelism       int32    `protobuf:"varint,8,opt,name=max_parallelism,json=maxParallelism,proto3" json:"max_parallelism,omitempty"`
	XXX_NoUnkeyedLiteral struct{} `json:"-"`
	XXX_unrecognized     []byte   `json:"-"`
	XXX_sizecache        int32    `json:"-"`
}

A request to start and monitor a BigQuery load job.

func (*StartBigQueryJobsRequest) Descriptor

func (*StartBigQueryJobsRequest) Descriptor() ([]byte, []int)

func (*StartBigQueryJobsRequest) GetImportedData

func (m *StartBigQueryJobsRequest) GetImportedData() []*ImportedDataInfo

func (*StartBigQueryJobsRequest) GetMaxParallelism

func (m *StartBigQueryJobsRequest) GetMaxParallelism() int32

func (*StartBigQueryJobsRequest) GetName

func (m *StartBigQueryJobsRequest) GetName() string

func (*StartBigQueryJobsRequest) GetUserCredentials

func (m *StartBigQueryJobsRequest) GetUserCredentials() []byte

func (*StartBigQueryJobsRequest) ProtoMessage

func (*StartBigQueryJobsRequest) ProtoMessage()

func (*StartBigQueryJobsRequest) Reset

func (m *StartBigQueryJobsRequest) Reset()

func (*StartBigQueryJobsRequest) String

func (m *StartBigQueryJobsRequest) String() string

func (*StartBigQueryJobsRequest) XXX_DiscardUnknown

func (m *StartBigQueryJobsRequest) XXX_DiscardUnknown()

func (*StartBigQueryJobsRequest) XXX_Marshal

func (m *StartBigQueryJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*StartBigQueryJobsRequest) XXX_Merge

func (m *StartBigQueryJobsRequest) XXX_Merge(src proto.Message)

func (*StartBigQueryJobsRequest) XXX_Size

func (m *StartBigQueryJobsRequest) XXX_Size() int

func (*StartBigQueryJobsRequest) XXX_Unmarshal

func (m *StartBigQueryJobsRequest) XXX_Unmarshal(b []byte) error

type StartManualTransferRunsRequest

type StartManualTransferRunsRequest struct {

	// Transfer configuration name in the form:
	// `projects/{project_id}/transferConfigs/{config_id}` or
	// `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// The requested time specification - this can be a time range or a specific
	// run_time.
	//
	// Types that are assignable to Time:
	//	*StartManualTransferRunsRequest_RequestedTimeRange
	//	*StartManualTransferRunsRequest_RequestedRunTime
	Time isStartManualTransferRunsRequest_Time `protobuf_oneof:"time"`
	// contains filtered or unexported fields
}

A request to start manual transfer runs.

func (*StartManualTransferRunsRequest) Descriptor

func (*StartManualTransferRunsRequest) Descriptor() ([]byte, []int)

Deprecated: Use StartManualTransferRunsRequest.ProtoReflect.Descriptor instead.

func (*StartManualTransferRunsRequest) GetParent

func (x *StartManualTransferRunsRequest) GetParent() string

func (*StartManualTransferRunsRequest) GetRequestedRunTime

func (x *StartManualTransferRunsRequest) GetRequestedRunTime() *timestamppb.Timestamp

func (*StartManualTransferRunsRequest) GetRequestedTimeRange

func (*StartManualTransferRunsRequest) GetTime

func (m *StartManualTransferRunsRequest) GetTime() isStartManualTransferRunsRequest_Time

func (*StartManualTransferRunsRequest) ProtoMessage

func (*StartManualTransferRunsRequest) ProtoMessage()

func (*StartManualTransferRunsRequest) ProtoReflect

func (*StartManualTransferRunsRequest) Reset

func (x *StartManualTransferRunsRequest) Reset()

func (*StartManualTransferRunsRequest) String

type StartManualTransferRunsRequest_RequestedRunTime

type StartManualTransferRunsRequest_RequestedRunTime struct {
	// Specific run_time for a transfer run to be started. The
	// requested_run_time must not be in the future.
	RequestedRunTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=requested_run_time,json=requestedRunTime,proto3,oneof"`
}

type StartManualTransferRunsRequest_RequestedTimeRange

type StartManualTransferRunsRequest_RequestedTimeRange struct {
	// Time range for the transfer runs that should be started.
	RequestedTimeRange *StartManualTransferRunsRequest_TimeRange `protobuf:"bytes,3,opt,name=requested_time_range,json=requestedTimeRange,proto3,oneof"`
}

type StartManualTransferRunsRequest_TimeRange

type StartManualTransferRunsRequest_TimeRange struct {

	// Start time of the range of transfer runs. For example,
	// `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
	// the end_time. Creates transfer runs where run_time is in the range betwen
	// start_time (inclusive) and end_time (exlusive).
	StartTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
	// End time of the range of transfer runs. For example,
	// `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
	// Creates transfer runs where run_time is in the range betwen start_time
	// (inclusive) and end_time (exlusive).
	EndTime *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
	// contains filtered or unexported fields
}

A specification for a time range, this will request transfer runs with run_time between start_time (inclusive) and end_time (exclusive).

func (*StartManualTransferRunsRequest_TimeRange) Descriptor

func (*StartManualTransferRunsRequest_TimeRange) Descriptor() ([]byte, []int)

Deprecated: Use StartManualTransferRunsRequest_TimeRange.ProtoReflect.Descriptor instead.

func (*StartManualTransferRunsRequest_TimeRange) GetEndTime

func (*StartManualTransferRunsRequest_TimeRange) GetStartTime

func (*StartManualTransferRunsRequest_TimeRange) ProtoMessage

func (*StartManualTransferRunsRequest_TimeRange) ProtoReflect

func (*StartManualTransferRunsRequest_TimeRange) Reset

func (*StartManualTransferRunsRequest_TimeRange) String

type StartManualTransferRunsResponse

type StartManualTransferRunsResponse struct {

	// The transfer runs that were created.
	Runs []*TransferRun `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"`
	// contains filtered or unexported fields
}

A response to start manual transfer runs.

func (*StartManualTransferRunsResponse) Descriptor

func (*StartManualTransferRunsResponse) Descriptor() ([]byte, []int)

Deprecated: Use StartManualTransferRunsResponse.ProtoReflect.Descriptor instead.

func (*StartManualTransferRunsResponse) GetRuns

func (*StartManualTransferRunsResponse) ProtoMessage

func (*StartManualTransferRunsResponse) ProtoMessage()

func (*StartManualTransferRunsResponse) ProtoReflect

func (*StartManualTransferRunsResponse) Reset

func (*StartManualTransferRunsResponse) String

type TransferConfig

type TransferConfig struct {

	// The resource name of the transfer config.
	// Transfer config names have the form of
	// `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
	// The name is automatically generated based on the config_id specified in
	// CreateTransferConfigRequest along with project_id and region. If config_id
	// is not provided, usually a uuid, even though it is not guaranteed or
	// required, will be generated for config_id.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// The desination of the transfer config.
	//
	// Types that are assignable to Destination:
	//	*TransferConfig_DestinationDatasetId
	Destination isTransferConfig_Destination `protobuf_oneof:"destination"`
	// User specified display name for the data transfer.
	DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"`
	// Data source id. Cannot be changed once data transfer is created.
	DataSourceId string `protobuf:"bytes,5,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"`
	// Data transfer specific parameters.
	Params *structpb.Struct `protobuf:"bytes,9,opt,name=params,proto3" json:"params,omitempty"`
	// Data transfer schedule.
	// If the data source does not support a custom schedule, this should be
	// empty. If it is empty, the default value for the data source will be
	// used.
	// The specified times are in UTC.
	// Examples of valid format:
	// `1st,3rd monday of month 15:30`,
	// `every wed,fri of jan,jun 13:15`, and
	// `first sunday of quarter 00:00`.
	// See more explanation about the format here:
	// https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
	// NOTE: the granularity should be at least 8 hours, or less frequent.
	Schedule string `protobuf:"bytes,7,opt,name=schedule,proto3" json:"schedule,omitempty"`
	// Options customizing the data transfer schedule.
	ScheduleOptions *ScheduleOptions `protobuf:"bytes,24,opt,name=schedule_options,json=scheduleOptions,proto3" json:"schedule_options,omitempty"`
	// The number of days to look back to automatically refresh the data.
	// For example, if `data_refresh_window_days = 10`, then every day
	// BigQuery reingests data for [today-10, today-1], rather than ingesting data
	// for just [today-1].
	// Only valid if the data source supports the feature. Set the value to  0
	// to use the default value.
	DataRefreshWindowDays int32 `` /* 130-byte string literal not displayed */
	// Is this config disabled. When set to true, no runs are scheduled
	// for a given transfer.
	Disabled bool `protobuf:"varint,13,opt,name=disabled,proto3" json:"disabled,omitempty"`
	// Output only. Data transfer modification time. Ignored by server on input.
	UpdateTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"`
	// Output only. Next time when data transfer will run.
	NextRunTime *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=next_run_time,json=nextRunTime,proto3" json:"next_run_time,omitempty"`
	// Output only. State of the most recently updated transfer run.
	State TransferState `protobuf:"varint,10,opt,name=state,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferState" json:"state,omitempty"`
	// Deprecated. Unique ID of the user on whose behalf transfer is done.
	UserId int64 `protobuf:"varint,11,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"`
	// Output only. Region in which BigQuery dataset is located.
	DatasetRegion string `protobuf:"bytes,14,opt,name=dataset_region,json=datasetRegion,proto3" json:"dataset_region,omitempty"`
	// Pub/Sub topic where notifications will be sent after transfer runs
	// associated with this transfer config finish.
	NotificationPubsubTopic string `` /* 133-byte string literal not displayed */
	// Email notifications will be sent according to these preferences
	// to the email address of the user who owns this transfer config.
	EmailPreferences *EmailPreferences `protobuf:"bytes,18,opt,name=email_preferences,json=emailPreferences,proto3" json:"email_preferences,omitempty"`
	// contains filtered or unexported fields
}

Represents a data transfer configuration. A transfer configuration contains all metadata needed to perform a data transfer. For example, `destination_dataset_id` specifies where data should be stored. When a new transfer configuration is created, the specified `destination_dataset_id` is created when needed and shared with the appropriate data source service account.

func (*TransferConfig) Descriptor

func (*TransferConfig) Descriptor() ([]byte, []int)

Deprecated: Use TransferConfig.ProtoReflect.Descriptor instead.

func (*TransferConfig) GetDataRefreshWindowDays

func (x *TransferConfig) GetDataRefreshWindowDays() int32

func (*TransferConfig) GetDataSourceId

func (x *TransferConfig) GetDataSourceId() string

func (*TransferConfig) GetDatasetRegion

func (x *TransferConfig) GetDatasetRegion() string

func (*TransferConfig) GetDestination

func (m *TransferConfig) GetDestination() isTransferConfig_Destination

func (*TransferConfig) GetDestinationDatasetId

func (x *TransferConfig) GetDestinationDatasetId() string

func (*TransferConfig) GetDisabled

func (x *TransferConfig) GetDisabled() bool

func (*TransferConfig) GetDisplayName

func (x *TransferConfig) GetDisplayName() string

func (*TransferConfig) GetEmailPreferences

func (x *TransferConfig) GetEmailPreferences() *EmailPreferences

func (*TransferConfig) GetName

func (x *TransferConfig) GetName() string

func (*TransferConfig) GetNextRunTime

func (x *TransferConfig) GetNextRunTime() *timestamppb.Timestamp

func (*TransferConfig) GetNotificationPubsubTopic

func (x *TransferConfig) GetNotificationPubsubTopic() string

func (*TransferConfig) GetParams

func (x *TransferConfig) GetParams() *structpb.Struct

func (*TransferConfig) GetSchedule

func (x *TransferConfig) GetSchedule() string

func (*TransferConfig) GetScheduleOptions

func (x *TransferConfig) GetScheduleOptions() *ScheduleOptions

func (*TransferConfig) GetState

func (x *TransferConfig) GetState() TransferState

func (*TransferConfig) GetUpdateTime

func (x *TransferConfig) GetUpdateTime() *timestamppb.Timestamp

func (*TransferConfig) GetUserId

func (x *TransferConfig) GetUserId() int64

func (*TransferConfig) ProtoMessage

func (*TransferConfig) ProtoMessage()

func (*TransferConfig) ProtoReflect

func (x *TransferConfig) ProtoReflect() protoreflect.Message

func (*TransferConfig) Reset

func (x *TransferConfig) Reset()

func (*TransferConfig) String

func (x *TransferConfig) String() string

type TransferConfig_DestinationDatasetId

type TransferConfig_DestinationDatasetId struct {
	// The BigQuery target dataset id.
	DestinationDatasetId string `protobuf:"bytes,2,opt,name=destination_dataset_id,json=destinationDatasetId,proto3,oneof"`
}

type TransferMessage

type TransferMessage struct {

	// Time when message was logged.
	MessageTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=message_time,json=messageTime,proto3" json:"message_time,omitempty"`
	// Message severity.
	Severity TransferMessage_MessageSeverity `` /* 145-byte string literal not displayed */
	// Message text.
	MessageText string `protobuf:"bytes,3,opt,name=message_text,json=messageText,proto3" json:"message_text,omitempty"`
	// contains filtered or unexported fields
}

Represents a user facing message for a particular data transfer run.

func (*TransferMessage) Descriptor

func (*TransferMessage) Descriptor() ([]byte, []int)

Deprecated: Use TransferMessage.ProtoReflect.Descriptor instead.

func (*TransferMessage) GetMessageText

func (x *TransferMessage) GetMessageText() string

func (*TransferMessage) GetMessageTime

func (x *TransferMessage) GetMessageTime() *timestamppb.Timestamp

func (*TransferMessage) GetSeverity

func (*TransferMessage) ProtoMessage

func (*TransferMessage) ProtoMessage()

func (*TransferMessage) ProtoReflect

func (x *TransferMessage) ProtoReflect() protoreflect.Message

func (*TransferMessage) Reset

func (x *TransferMessage) Reset()

func (*TransferMessage) String

func (x *TransferMessage) String() string

type TransferMessage_MessageSeverity

type TransferMessage_MessageSeverity int32

Represents data transfer user facing message severity.

const (
	// No severity specified.
	TransferMessage_MESSAGE_SEVERITY_UNSPECIFIED TransferMessage_MessageSeverity = 0
	// Informational message.
	TransferMessage_INFO TransferMessage_MessageSeverity = 1
	// Warning message.
	TransferMessage_WARNING TransferMessage_MessageSeverity = 2
	// Error message.
	TransferMessage_ERROR TransferMessage_MessageSeverity = 3
)

func (TransferMessage_MessageSeverity) Descriptor

func (TransferMessage_MessageSeverity) Enum

func (TransferMessage_MessageSeverity) EnumDescriptor

func (TransferMessage_MessageSeverity) EnumDescriptor() ([]byte, []int)

Deprecated: Use TransferMessage_MessageSeverity.Descriptor instead.

func (TransferMessage_MessageSeverity) Number

func (TransferMessage_MessageSeverity) String

func (TransferMessage_MessageSeverity) Type

type TransferRun

type TransferRun struct {

	// The resource name of the transfer run.
	// Transfer run names have the form
	// `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
	// The name is ignored when creating a transfer run.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Minimum time after which a transfer run can be started.
	ScheduleTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"`
	// For batch transfer runs, specifies the date and time of the data should be
	// ingested.
	RunTime *timestamppb.Timestamp `protobuf:"bytes,10,opt,name=run_time,json=runTime,proto3" json:"run_time,omitempty"`
	// Status of the transfer run.
	ErrorStatus *status.Status `protobuf:"bytes,21,opt,name=error_status,json=errorStatus,proto3" json:"error_status,omitempty"`
	// Output only. Time when transfer run was started.
	// Parameter ignored by server for input requests.
	StartTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
	// Output only. Time when transfer run ended.
	// Parameter ignored by server for input requests.
	EndTime *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
	// Output only. Last time the data transfer run state was updated.
	UpdateTime *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"`
	// Output only. Data transfer specific parameters.
	Params *structpb.Struct `protobuf:"bytes,9,opt,name=params,proto3" json:"params,omitempty"`
	// Data transfer destination.
	//
	// Types that are assignable to Destination:
	//	*TransferRun_DestinationDatasetId
	Destination isTransferRun_Destination `protobuf_oneof:"destination"`
	// Output only. Data source id.
	DataSourceId string `protobuf:"bytes,7,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"`
	// Data transfer run state. Ignored for input requests.
	State TransferState `protobuf:"varint,8,opt,name=state,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferState" json:"state,omitempty"`
	// Deprecated. Unique ID of the user on whose behalf transfer is done.
	UserId int64 `protobuf:"varint,11,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"`
	// Output only. Describes the schedule of this transfer run if it was
	// created as part of a regular schedule. For batch transfer runs that are
	// scheduled manually, this is empty.
	// NOTE: the system might choose to delay the schedule depending on the
	// current load, so `schedule_time` doesn't always match this.
	Schedule string `protobuf:"bytes,12,opt,name=schedule,proto3" json:"schedule,omitempty"`
	// Output only. Pub/Sub topic where a notification will be sent after this
	// transfer run finishes
	NotificationPubsubTopic string `` /* 133-byte string literal not displayed */
	// Output only. Email notifications will be sent according to these
	// preferences to the email address of the user who owns the transfer config
	// this run was derived from.
	EmailPreferences *EmailPreferences `protobuf:"bytes,25,opt,name=email_preferences,json=emailPreferences,proto3" json:"email_preferences,omitempty"`
	// contains filtered or unexported fields
}

Represents a data transfer run.

func (*TransferRun) Descriptor

func (*TransferRun) Descriptor() ([]byte, []int)

Deprecated: Use TransferRun.ProtoReflect.Descriptor instead.

func (*TransferRun) GetDataSourceId

func (x *TransferRun) GetDataSourceId() string

func (*TransferRun) GetDestination

func (m *TransferRun) GetDestination() isTransferRun_Destination

func (*TransferRun) GetDestinationDatasetId

func (x *TransferRun) GetDestinationDatasetId() string

func (*TransferRun) GetEmailPreferences

func (x *TransferRun) GetEmailPreferences() *EmailPreferences

func (*TransferRun) GetEndTime

func (x *TransferRun) GetEndTime() *timestamppb.Timestamp

func (*TransferRun) GetErrorStatus

func (x *TransferRun) GetErrorStatus() *status.Status

func (*TransferRun) GetName

func (x *TransferRun) GetName() string

func (*TransferRun) GetNotificationPubsubTopic

func (x *TransferRun) GetNotificationPubsubTopic() string

func (*TransferRun) GetParams

func (x *TransferRun) GetParams() *structpb.Struct

func (*TransferRun) GetRunTime

func (x *TransferRun) GetRunTime() *timestamppb.Timestamp

func (*TransferRun) GetSchedule

func (x *TransferRun) GetSchedule() string

func (*TransferRun) GetScheduleTime

func (x *TransferRun) GetScheduleTime() *timestamppb.Timestamp

func (*TransferRun) GetStartTime

func (x *TransferRun) GetStartTime() *timestamppb.Timestamp

func (*TransferRun) GetState

func (x *TransferRun) GetState() TransferState

func (*TransferRun) GetUpdateTime

func (x *TransferRun) GetUpdateTime() *timestamppb.Timestamp

func (*TransferRun) GetUserId

func (x *TransferRun) GetUserId() int64

func (*TransferRun) ProtoMessage

func (*TransferRun) ProtoMessage()

func (*TransferRun) ProtoReflect

func (x *TransferRun) ProtoReflect() protoreflect.Message

func (*TransferRun) Reset

func (x *TransferRun) Reset()

func (*TransferRun) String

func (x *TransferRun) String() string

type TransferRun_DestinationDatasetId

type TransferRun_DestinationDatasetId struct {
	// Output only. The BigQuery target dataset id.
	DestinationDatasetId string `protobuf:"bytes,2,opt,name=destination_dataset_id,json=destinationDatasetId,proto3,oneof"`
}

type TransferState

type TransferState int32

Represents data transfer run state.

const (
	// State placeholder.
	TransferState_TRANSFER_STATE_UNSPECIFIED TransferState = 0
	// Data transfer is scheduled and is waiting to be picked up by
	// data transfer backend.
	TransferState_PENDING TransferState = 2
	// Data transfer is in progress.
	TransferState_RUNNING TransferState = 3
	// Data transfer completed successfully.
	TransferState_SUCCEEDED TransferState = 4
	// Data transfer failed.
	TransferState_FAILED TransferState = 5
	// Data transfer is cancelled.
	TransferState_CANCELLED TransferState = 6
)

func (TransferState) Descriptor

func (TransferState) Enum

func (x TransferState) Enum() *TransferState

func (TransferState) EnumDescriptor

func (TransferState) EnumDescriptor() ([]byte, []int)

Deprecated: Use TransferState.Descriptor instead.

func (TransferState) Number

func (TransferState) String

func (x TransferState) String() string

func (TransferState) Type

type TransferType

type TransferType int32

DEPRECATED. Represents data transfer type.

Deprecated: Do not use.

const (
	// Invalid or Unknown transfer type placeholder.
	TransferType_TRANSFER_TYPE_UNSPECIFIED TransferType = 0
	// Batch data transfer.
	TransferType_BATCH TransferType = 1
	// Streaming data transfer. Streaming data source currently doesn't
	// support multiple transfer configs per project.
	TransferType_STREAMING TransferType = 2
)

func (TransferType) Descriptor

func (TransferType) Enum

func (x TransferType) Enum() *TransferType

func (TransferType) EnumDescriptor

func (TransferType) EnumDescriptor() ([]byte, []int)

Deprecated: Use TransferType.Descriptor instead.

func (TransferType) Number

func (TransferType) String

func (x TransferType) String() string

func (TransferType) Type

type UnimplementedDataSourceServiceServer

type UnimplementedDataSourceServiceServer struct {
}

UnimplementedDataSourceServiceServer can be embedded to have forward compatible implementations.

func (*UnimplementedDataSourceServiceServer) CreateDataSourceDefinition

func (*UnimplementedDataSourceServiceServer) DeleteDataSourceDefinition

func (*UnimplementedDataSourceServiceServer) FinishRun

func (*UnimplementedDataSourceServiceServer) GetDataSourceDefinition

func (*UnimplementedDataSourceServiceServer) ListDataSourceDefinitions

func (*UnimplementedDataSourceServiceServer) LogTransferRunMessages

func (*UnimplementedDataSourceServiceServer) StartBigQueryJobs

func (*UnimplementedDataSourceServiceServer) UpdateDataSourceDefinition

func (*UnimplementedDataSourceServiceServer) UpdateTransferRun

type UnimplementedDataTransferServiceServer

type UnimplementedDataTransferServiceServer struct {
}

UnimplementedDataTransferServiceServer can be embedded to have forward compatible implementations.

func (*UnimplementedDataTransferServiceServer) CheckValidCreds

func (*UnimplementedDataTransferServiceServer) CreateTransferConfig

func (*UnimplementedDataTransferServiceServer) DeleteTransferConfig

func (*UnimplementedDataTransferServiceServer) DeleteTransferRun

func (*UnimplementedDataTransferServiceServer) GetDataSource

func (*UnimplementedDataTransferServiceServer) GetTransferConfig

func (*UnimplementedDataTransferServiceServer) GetTransferRun

func (*UnimplementedDataTransferServiceServer) ListDataSources

func (*UnimplementedDataTransferServiceServer) ListTransferConfigs

func (*UnimplementedDataTransferServiceServer) ListTransferLogs

func (*UnimplementedDataTransferServiceServer) ListTransferRuns

func (*UnimplementedDataTransferServiceServer) ScheduleTransferRuns

func (*UnimplementedDataTransferServiceServer) UpdateTransferConfig

type UpdateDataSourceDefinitionRequest

type UpdateDataSourceDefinitionRequest struct {
	// Data source definition.
	DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,1,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
	// Update field mask.
	UpdateMask           *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
	XXX_unrecognized     []byte                `json:"-"`
	XXX_sizecache        int32                 `json:"-"`
}

Represents the request of the UpdateDataSourceDefinition method.

func (*UpdateDataSourceDefinitionRequest) Descriptor

func (*UpdateDataSourceDefinitionRequest) Descriptor() ([]byte, []int)

func (*UpdateDataSourceDefinitionRequest) GetDataSourceDefinition

func (m *UpdateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition

func (*UpdateDataSourceDefinitionRequest) GetUpdateMask

func (*UpdateDataSourceDefinitionRequest) ProtoMessage

func (*UpdateDataSourceDefinitionRequest) ProtoMessage()

func (*UpdateDataSourceDefinitionRequest) Reset

func (*UpdateDataSourceDefinitionRequest) String

func (*UpdateDataSourceDefinitionRequest) XXX_DiscardUnknown

func (m *UpdateDataSourceDefinitionRequest) XXX_DiscardUnknown()

func (*UpdateDataSourceDefinitionRequest) XXX_Marshal

func (m *UpdateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*UpdateDataSourceDefinitionRequest) XXX_Merge

func (*UpdateDataSourceDefinitionRequest) XXX_Size

func (m *UpdateDataSourceDefinitionRequest) XXX_Size() int

func (*UpdateDataSourceDefinitionRequest) XXX_Unmarshal

func (m *UpdateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error

type UpdateTransferConfigRequest

type UpdateTransferConfigRequest struct {

	// Required. Data transfer configuration to create.
	TransferConfig *TransferConfig `protobuf:"bytes,1,opt,name=transfer_config,json=transferConfig,proto3" json:"transfer_config,omitempty"`
	// Optional OAuth2 authorization code to use with this transfer configuration.
	// If it is provided, the transfer configuration will be associated with the
	// authorizing user.
	// In order to obtain authorization_code, please make a
	// request to
	// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
	//
	// * client_id should be OAuth client_id of BigQuery DTS API for the given
	//   data source returned by ListDataSources method.
	// * data_source_scopes are the scopes returned by ListDataSources method.
	// * redirect_uri is an optional parameter. If not specified, then
	//   authorization code is posted to the opener of authorization flow window.
	//   Otherwise it will be sent to the redirect uri. A special value of
	//   urn:ietf:wg:oauth:2.0:oob means that authorization code should be
	//   returned in the title bar of the browser, with the page text prompting
	//   the user to copy the code and paste it in the application.
	AuthorizationCode string `protobuf:"bytes,3,opt,name=authorization_code,json=authorizationCode,proto3" json:"authorization_code,omitempty"`
	// Required. Required list of fields to be updated in this request.
	UpdateMask *fieldmaskpb.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
	// Optional version info. If users want to find a very recent access token,
	// that is, immediately after approving access, users have to set the
	// version_info claim in the token request. To obtain the version_info, users
	// must use the "none+gsession" response type. which be return a
	// version_info back in the authorization response which be be put in a JWT
	// claim in the token request.
	VersionInfo string `protobuf:"bytes,5,opt,name=version_info,json=versionInfo,proto3" json:"version_info,omitempty"`
	// Optional service account name. If this field is set and
	// "service_account_name" is set in update_mask, transfer config will be
	// updated to use this service account credentials. It requires that
	// requesting user calling this API has permissions to act as this service
	// account.
	ServiceAccountName string `protobuf:"bytes,6,opt,name=service_account_name,json=serviceAccountName,proto3" json:"service_account_name,omitempty"`
	// contains filtered or unexported fields
}

A request to update a transfer configuration. To update the user id of the transfer configuration, an authorization code needs to be provided.

func (*UpdateTransferConfigRequest) Descriptor

func (*UpdateTransferConfigRequest) Descriptor() ([]byte, []int)

Deprecated: Use UpdateTransferConfigRequest.ProtoReflect.Descriptor instead.

func (*UpdateTransferConfigRequest) GetAuthorizationCode

func (x *UpdateTransferConfigRequest) GetAuthorizationCode() string

func (*UpdateTransferConfigRequest) GetServiceAccountName

func (x *UpdateTransferConfigRequest) GetServiceAccountName() string

func (*UpdateTransferConfigRequest) GetTransferConfig

func (x *UpdateTransferConfigRequest) GetTransferConfig() *TransferConfig

func (*UpdateTransferConfigRequest) GetUpdateMask

func (*UpdateTransferConfigRequest) GetVersionInfo

func (x *UpdateTransferConfigRequest) GetVersionInfo() string

func (*UpdateTransferConfigRequest) ProtoMessage

func (*UpdateTransferConfigRequest) ProtoMessage()

func (*UpdateTransferConfigRequest) ProtoReflect

func (*UpdateTransferConfigRequest) Reset

func (x *UpdateTransferConfigRequest) Reset()

func (*UpdateTransferConfigRequest) String

func (x *UpdateTransferConfigRequest) String() string

type UpdateTransferRunRequest

type UpdateTransferRunRequest struct {
	// Run name must be set and correspond to an already existing run. Only
	// state, error_status, and data_version fields will be updated. All other
	// fields will be ignored.
	TransferRun *TransferRun `protobuf:"bytes,1,opt,name=transfer_run,json=transferRun,proto3" json:"transfer_run,omitempty"`
	// Required list of fields to be updated in this request.
	UpdateMask           *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
	XXX_unrecognized     []byte                `json:"-"`
	XXX_sizecache        int32                 `json:"-"`
}

A request to update a transfer run.

func (*UpdateTransferRunRequest) Descriptor

func (*UpdateTransferRunRequest) Descriptor() ([]byte, []int)

func (*UpdateTransferRunRequest) GetTransferRun

func (m *UpdateTransferRunRequest) GetTransferRun() *TransferRun

func (*UpdateTransferRunRequest) GetUpdateMask

func (m *UpdateTransferRunRequest) GetUpdateMask() *field_mask.FieldMask

func (*UpdateTransferRunRequest) ProtoMessage

func (*UpdateTransferRunRequest) ProtoMessage()

func (*UpdateTransferRunRequest) Reset

func (m *UpdateTransferRunRequest) Reset()

func (*UpdateTransferRunRequest) String

func (m *UpdateTransferRunRequest) String() string

func (*UpdateTransferRunRequest) XXX_DiscardUnknown

func (m *UpdateTransferRunRequest) XXX_DiscardUnknown()

func (*UpdateTransferRunRequest) XXX_Marshal

func (m *UpdateTransferRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*UpdateTransferRunRequest) XXX_Merge

func (m *UpdateTransferRunRequest) XXX_Merge(src proto.Message)

func (*UpdateTransferRunRequest) XXX_Size

func (m *UpdateTransferRunRequest) XXX_Size() int

func (*UpdateTransferRunRequest) XXX_Unmarshal

func (m *UpdateTransferRunRequest) XXX_Unmarshal(b []byte) error

type WriteDisposition

type WriteDisposition int32

Options for writing to the table. The WRITE_EMPTY option is intentionally excluded from the enum and is not supported by the data transfer service.

const (
	// The default writeDispostion
	WriteDisposition_WRITE_DISPOSITION_UNSPECIFIED WriteDisposition = 0
	// overwrites the table data.
	WriteDisposition_WRITE_TRUNCATE WriteDisposition = 1
	// the data is appended to the table.
	// Note duplication might happen if this mode is used.
	WriteDisposition_WRITE_APPEND WriteDisposition = 2
)

func (WriteDisposition) EnumDescriptor

func (WriteDisposition) EnumDescriptor() ([]byte, []int)

func (WriteDisposition) String

func (x WriteDisposition) String() string