datablobstorage

package
v0.1.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 10, 2024 License: Apache-2.0 Imports: 37 Imported by: 0

Documentation

Index

Constants

View Source
const AWSUploadFileMockErrMsg = "mocked error for uploading file for aws"
View Source
const DirectCopyWriterMockErrMsg = "forced error for direct copy"
View Source
const GCPWriterMockErrMsg = "forced error for gcp storage writer"
View Source
const LocalWriterMockErrMsg = "forced error for local path storage"

Variables

This section is empty.

Functions

func NewCopyCRDBDirect

func NewCopyCRDBDirect(logger zerolog.Logger, target *pgx.Conn) *copyCRDBDirect

func NewGCPStore

func NewGCPStore(
	logger zerolog.Logger,
	client *storage.Client,
	creds *google.Credentials,
	bucket string,
	bucketPath string,
	useLocalInfra bool,
) *gcpStore

func NewLocalStore

func NewLocalStore(
	logger zerolog.Logger, basePath string, listenAddr string, crdbAccessAddr string,
) (*localStore, error)

func NewS3Store

func NewS3Store(
	logger zerolog.Logger,
	session *session.Session,
	creds credentials.Value,
	bucket string,
	bucketPath string,
	useLocalInfra bool,
) *s3Store

func SetCopyEnvVars added in v0.1.3

func SetCopyEnvVars(ctx context.Context, conn *pgx.Conn) error

Types

type DatastoreCreationPayload added in v0.1.1

type DatastoreCreationPayload struct {
	DirectCopyPl *DirectCopyPayload
	GCPPl        *GCPPayload
	S3Pl         *S3Payload
	LocalPathPl  *LocalPathPayload

	TestFailedWriteToBucket bool
}

type DirectCopyPayload added in v0.1.1

type DirectCopyPayload struct {
	TargetConnForCopy *pgx.Conn
}

type GCPPayload added in v0.1.1

type GCPPayload struct {
	GCPBucket  string
	BucketPath string
}

type GCPStorageWriterMock added in v0.1.1

type GCPStorageWriterMock struct {
	*storage.Writer
}

GCPStorageWriterMock is to mock a gcp storage that always fail to upload to the bucket. We use it to simulate a disastrous edge case and ensure that the error in this case would be properly propagated.

func (*GCPStorageWriterMock) Write added in v0.1.1

func (w *GCPStorageWriterMock) Write(p []byte) (n int, err error)

type LocalPathPayload added in v0.1.1

type LocalPathPayload struct {
	LocalPath               string
	LocalPathListenAddr     string
	LocalPathCRDBAccessAddr string
}

type Resource

type Resource interface {
	Key() (string, error)
	Rows() int
	ImportURL() (string, error)
	MarkForCleanup(ctx context.Context) error
	Reader(ctx context.Context) (io.ReadCloser, error)
	IsLocal() bool
}

type S3Payload added in v0.1.1

type S3Payload struct {
	S3Bucket   string
	BucketPath string
	Region     string
}

type Store

type Store interface {
	// CreateFromReader is responsible for the creation of the individual
	// CSVs from the data export process. It will create the file and upload
	// it to the respetive data store and return the resource object which
	// will be used in the data import phase.
	CreateFromReader(ctx context.Context, r io.Reader, table dbtable.VerifiedTable, iteration int, fileExt string, numRows chan int, testingKnobs testutils.FetchTestingKnobs, shardNum int) (Resource, error)
	// ListFromContinuationPoint is used when restarting Fetch from
	// a continuation point. It will query the respective data store
	// and create the slice of resources that will be used by the
	// import process. Note that NO files are created from the method.
	// It simply lists all files in the data store and filters and returns
	// the files that are needed.
	ListFromContinuationPoint(ctx context.Context, table dbtable.VerifiedTable, fileName string) ([]Resource, error)
	CanBeTarget() bool
	DefaultFlushBatchSize() int
	Cleanup(ctx context.Context) error
	TelemetryName() string
}

func GenerateDatastore added in v0.1.1

func GenerateDatastore(
	ctx context.Context, cfg any, logger zerolog.Logger, testFailedWriteToBucket, testOnly bool,
) (Store, error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL