Documentation ¶
Index ¶
- Constants
- func LoadJobRunData(ctx context.Context, e GCSEvent) error
- func LoadJobRunDataTest(ctx context.Context, e GCSEvent) error
- func UnMarshalJSON(jsonb []byte, result interface{}) error
- type BigQueryDataItem
- type BigQueryLoader
- func (b *BigQueryLoader) FindExistingData(ctx context.Context, partitionTime time.Time, ...) (bool, error)
- func (b *BigQueryLoader) GetMetaData(ctx context.Context, di DataInstance) (bigquery.TableMetadata, error)
- func (b *BigQueryLoader) Init(ctx context.Context) error
- func (b *BigQueryLoader) LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
- func (b *BigQueryLoader) ValidateTable(ctx context.Context, dataInstance DataInstance) error
- type BigQueryTableCache
- type ClientError
- type ClientsCache
- type DataFile
- type DataInstance
- type DataLoader
- type DataType
- type DataUploader
- type GCSEvent
- type JobRunDataEvent
- type OutputMetric
- type PrometheusData
- type PrometheusLabels
- type PrometheusMetric
- type PrometheusResult
- type PrometheusValue
- type SimpleUploader
Constants ¶
View Source
const ( AutoDataLoaderSuffix = "autodl.json" DataSetEnv = "DATASET_ID" ProjectIdEnv = "PROJECT_ID" PRJobsEnabledEnv = "PR_JOBS_ENABLED" // local testing only GCSCredentialsFileEnv = "GCS_CREDENTIALS_FILE" // local testing only )
View Source
const DataPartitioningField = "PartitionTime"
View Source
const JobRunNameField = "JobRunName"
View Source
const SourceNameField = "Source"
Variables ¶
This section is empty.
Functions ¶
func UnMarshalJSON ¶
Types ¶
type BigQueryDataItem ¶
type BigQueryDataItem struct { Instance *DataInstance Row map[string]string InsertID string }
type BigQueryLoader ¶
type BigQueryLoader struct { ProjectID string DataSetID string Client *bigquery.Client DryRun bool // contains filtered or unexported fields }
func (*BigQueryLoader) FindExistingData ¶
func (*BigQueryLoader) GetMetaData ¶
func (b *BigQueryLoader) GetMetaData(ctx context.Context, di DataInstance) (bigquery.TableMetadata, error)
func (*BigQueryLoader) LoadDataItems ¶
func (b *BigQueryLoader) LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
func (*BigQueryLoader) ValidateTable ¶
func (b *BigQueryLoader) ValidateTable(ctx context.Context, dataInstance DataInstance) error
type BigQueryTableCache ¶
type BigQueryTableCache struct {
// contains filtered or unexported fields
}
type ClientError ¶
func (*ClientError) Error ¶
func (c *ClientError) Error() string
type ClientsCache ¶
type ClientsCache struct {
// contains filtered or unexported fields
}
type DataFile ¶
type DataFile struct { TableName string `json:"table_name"` Schema map[string]DataType `json:"schema"` SchemaMapping map[string]string `json:"schema_mapping"` Rows []map[string]string `json:"rows"` ExpirationDays int `json:"expiration_days"` PartitionColumn string `json:"partition_column"` PartitionType string `json:"partition_type"` ChunkSize int `json:"chunk_size"` }
type DataInstance ¶
type DataLoader ¶
type DataLoader interface { ValidateTable(ctx context.Context, dataInstance DataInstance) error FindExistingData(ctx context.Context, partitionTime time.Time, partitionColumn, tableName, jobRunName, source string) (bool, error) LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error) }
type DataUploader ¶
type DataUploader struct {
// contains filtered or unexported fields
}
type GCSEvent ¶
type GCSEvent struct { Kind string `json:"kind"` ID string `json:"id"` // SelfLink string `json:"selfLink"` Name string `json:"name"` Bucket string `json:"bucket"` // Generation string `json:"generation"` // Metageneration string `json:"metageneration"` ContentType string `json:"contentType"` TimeCreated time.Time `json:"timeCreated"` Updated time.Time `json:"updated"` // TemporaryHold bool `json:"temporaryHold"` // EventBasedHold bool `json:"eventBasedHold"` // RetentionExpirationTime time.Time `json:"retentionExpirationTime"` // StorageClass string `json:"storageClass"` // TimeStorageClassUpdated time.Time `json:"timeStorageClassUpdated"` Size string `json:"size"` MD5Hash string `json:"md5Hash"` MediaLink string `json:"mediaLink"` // ContentEncoding string `json:"contentEncoding"` // ContentDisposition string `json:"contentDisposition"` // CacheControl string `json:"cacheControl"` Metadata map[string]interface{} `json:"metadata"` }
type JobRunDataEvent ¶
type OutputMetric ¶
type PrometheusData ¶
type PrometheusData struct { ResultType string `json:"resultType"` Result []PrometheusMetric `json:"result"` }
type PrometheusLabels ¶
PrometheusLabels avoids deserialization allocations
func (PrometheusLabels) MarshalJSON ¶
func (l PrometheusLabels) MarshalJSON() ([]byte, error)
func (*PrometheusLabels) UnmarshalJSON ¶
func (l *PrometheusLabels) UnmarshalJSON(data []byte) error
type PrometheusMetric ¶
type PrometheusMetric struct { Metric PrometheusLabels `json:"metric"` Value PrometheusValue `json:"value"` }
type PrometheusResult ¶
type PrometheusResult struct { Status string `json:"status"` Data PrometheusData `json:"data"` }
type PrometheusValue ¶
func (*PrometheusValue) UnmarshalJSON ¶
func (l *PrometheusValue) UnmarshalJSON(data []byte) error
type SimpleUploader ¶
type SimpleUploader interface {
// contains filtered or unexported methods
}
Click to show internal directories.
Click to hide internal directories.