stores

package
v0.0.0-...-d2fa2ec Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 8, 2024 License: MIT Imports: 34 Imported by: 0

README

Stores

Store is the interface by which files are uploaded/retrieved from a storage backend.

How to implement support for a new artifact store

If you want to use Cavorite with artifact storage that isn't compatible with S3, GCS, or Azure

Instead of forking Cavorite to add support for a new storage backend, the recommended strategy is to implement a plugin. See the localstore plugin as an example.

You must implement all the methods of Store.

What should a plugin's Upload/Retrieve functions do?

Upload

In your implemention of Upload(), the following tasks need to be handled:

  1. Parsing .cavorite/config which is straightfoward:

    import (
        "github.com/discentem/cavorite/internal/config"
    )
    type SomeStore struct {
        logger hclog.Logger
        fsys   afero.Fs
    }
    // ...
    func (s *SomeStore) GetOptions() (stores.Options, error) {
        if s.fsys == nil {
            f := afero.NewOsFs() // replace with afero.NewMemMapFs() in tests
        }
        return config.LoadOptions(f)
    }
    
  2. Uploading the file to the bucket.

    This implementation is more complicated than GetOptions() and depends entirely on what artifact store you are using. See internal/stores/s3 for a detailed example.

    func (s *SomeStore) Upload(ctx context.Context, objects ...string) error {
        opts, err := s.GetOptions()
        if err != nil {
            return err
        }
        backendAddress := opts.BackendAddress
        s.logger.Info(fmt.Sprintf("Uploading %v via magicstore plugin", objects))
        // call your artifact storage provider's API and pass objects
        return UploadToMagicalStore(backendAddress, ...objects)
    }
    

Retrieve

When retrieving files, the following tasks need to be handled in the implementation:

  1. Read the metadata file
  2. Make sure the local path exists where the file will reside based on the metadata
  3. Download the file into the given location
  4. Confirm the hash of the local file matches the hash in the metadata, Error if it does not
  5. Delete the file if the hashes do not match

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	PluginSet = plugin.PluginSet{
		"store": &storePlugin{},
	}

	HandshakeConfig = plugin.HandshakeConfig{
		ProtocolVersion:  1,
		MagicCookieKey:   "BASIC_PLUGIN",
		MagicCookieValue: "cavorite",
	}

	// FIXME: make configurable?
	HLog = hclog.New(&hclog.LoggerOptions{
		Name:   "plugin",
		Output: os.Stdout,
		Level:  hclog.Debug,
	})
)
View Source
var (
	ErrCfilesLengthZero = errors.New("at least one cfile must be specified")
)
View Source
var ErrMetadataFileExtensionEmpty = fmt.Errorf("options.MetadatafileExtension cannot be %q", "")

Functions

func ListenAndServePlugin

func ListenAndServePlugin(store Store, logger hclog.Logger)

ListenAndServePlugin is used by plugins to start listening to requests

func MetadataMapToPluginProtoMap

func MetadataMapToPluginProtoMap(mmap metadata.CfileMetadataMap) map[string]*pluginproto.ObjectMetadata

Types

type AzureBlobStore

type AzureBlobStore struct {
	Options Options
	// contains filtered or unexported fields
}

func NewAzureBlobStore

func NewAzureBlobStore(ctx context.Context, fsys afero.Fs, storeOpts Options, azureBlobOptions azblob.ClientOptions) (*AzureBlobStore, error)

func (*AzureBlobStore) Close

func (s *AzureBlobStore) Close() error

func (*AzureBlobStore) GetFsys

func (s *AzureBlobStore) GetFsys() (afero.Fs, error)

func (*AzureBlobStore) GetOptions

func (s *AzureBlobStore) GetOptions() (Options, error)

func (*AzureBlobStore) Retrieve

func (s *AzureBlobStore) Retrieve(ctx context.Context, objects ...string) error

func (*AzureBlobStore) Upload

func (s *AzureBlobStore) Upload(ctx context.Context, objects ...string) error

type GCSStore

type GCSStore struct {
	Options Options `mapstructure:"options"`
	// contains filtered or unexported fields
}

func NewGCSStoreClient

func NewGCSStoreClient(ctx context.Context, fsys afero.Fs, opts Options) (*GCSStore, error)

NewGCSStoreClient creates a GCS Storage Client utilizing either the default GOOGLE_APPLICATION_CREDENTIAL env var or a json string env var named CAVORITE_GCS_CREDENTIALS

func (*GCSStore) Close

func (s *GCSStore) Close() error

func (*GCSStore) GetFsys

func (s *GCSStore) GetFsys() (afero.Fs, error)

func (*GCSStore) GetOptions

func (s *GCSStore) GetOptions() (Options, error)

func (*GCSStore) Retrieve

func (s *GCSStore) Retrieve(ctx context.Context, metaObjects ...string) error

Retrieve gets the file from the GCS bucket, validates the hash is correct and writes it to s.fsys

func (*GCSStore) Upload

func (s *GCSStore) Upload(ctx context.Context, objects ...string) error

Upload generates the metadata, writes it s.fsys and uploads the file to the GCS bucket

type Options

type Options struct {
	BackendAddress        string `json:"backend_address" mapstructure:"backend_address"`
	PluginAddress         string `json:"plugin_address,omitempty" mapstructure:"plugin_address"`
	MetadataFileExtension string `json:"metadata_file_extension" mapstructure:"metadata_file_extension"`
	Region                string `json:"region" mapstructure:"region"`
	/*
		If ObjectKeyPrefix is set to "team-bucket", and the initialized backend supports it,
			- `cavorite upload whatever/thing` will be written to `team-bucket/whatever/thing`
			- `cavorite retrieve whatever/thing` will request `team-bucket/whatever/thing`
	*/
	ObjectKeyPrefix string `json:"object_key_prefix,omitempty" mapstructure:"object_key_prefix"`
}

type PluggableStore

type PluggableStore struct {
	Store
	// contains filtered or unexported fields
}

PluggableStore is the Store used by cavorite that wraps go-plugin

func NewPluggableStore

func NewPluggableStore(_ context.Context, opts Options) (*PluggableStore, error)

func (*PluggableStore) Close

func (p *PluggableStore) Close() error

type S3Downloader

type S3Downloader interface {
	Download(
		ctx context.Context,
		w io.WriterAt,
		input *s3.GetObjectInput,
		options ...func(*s3manager.Downloader)) (n int64, err error)
}

type S3Store

type S3Store struct {
	Options Options `json:"options" mapstructure:"options"`
	// contains filtered or unexported fields
}

func NewS3Store

func NewS3Store(ctx context.Context, fsys afero.Fs, opts Options) (*S3Store, error)

func (*S3Store) Close

func (s *S3Store) Close() error

func (*S3Store) GetFsys

func (s *S3Store) GetFsys() (afero.Fs, error)

func (*S3Store) GetOptions

func (s *S3Store) GetOptions() (Options, error)

func (*S3Store) Retrieve

func (s *S3Store) Retrieve(ctx context.Context, mmap metadata.CfileMetadataMap, cfiles ...string) error

Retrieve gets the file from the S3 bucket, validates the hash is correct and writes it to disk

func (*S3Store) Upload

func (s *S3Store) Upload(ctx context.Context, objects ...string) error

Upload generates the metadata, writes it to disk and uploads the file to the S3 bucket

type S3Uploader

type S3Uploader interface {
	Upload(ctx context.Context,
		input *s3.PutObjectInput,
		opts ...func(*s3manager.Uploader)) (
		*s3manager.UploadOutput, error,
	)
}

type Store

type Store interface {
	Upload(ctx context.Context, keys ...string) error
	Retrieve(ctx context.Context, mmap metadata.CfileMetadataMap, keys ...string) error
	GetOptions() (Options, error)
	Close() error
}

type StoreType

type StoreType string
const (
	StoreTypeUndefined StoreType = "undefined"
	StoreTypeS3        StoreType = "s3"
	StoreTypeGCS       StoreType = "gcs"
	StoreTypeAzureBlob StoreType = "azure"
	StoreTypeGoPlugin  StoreType = "plugin"
)

type StoreWithGetters

type StoreWithGetters interface {
	Store
	GetFsys() (afero.Fs, error)
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL