cfbackup

package module
v0.1.14 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 22, 2015 License: Apache-2.0 Imports: 21 Imported by: 0

README

cfbackup

Cloud Foundry Backup Utilities

wercker status

GoDoc

this repo is meant to be included in other projects. It will provide method calls for backing up Ops Manager and Elastic Runtime.

Documentation

Index

Constants

View Source
const (
	BACKUP_LOGGER_NAME  = "Backup"
	RESTORE_LOGGER_NAME = "Restore"
)
View Source
const (
	ER_DEFAULT_SYSTEM_USER        = "vcap"
	ER_DIRECTOR_INFO_URL          = "https://%s:25555/info"
	ER_BACKUP_DIR                 = "elasticruntime"
	ER_VMS_URL                    = "https://%s:25555/deployments/%s/vms"
	ER_DIRECTOR                   = "DirectorInfo"
	ER_CONSOLE                    = "ConsoledbInfo"
	ER_UAA                        = "UaadbInfo"
	ER_CC                         = "CcdbInfo"
	ER_MYSQL                      = "MysqldbInfo"
	ER_NFS                        = "NfsInfo"
	ER_BACKUP_FILE_FORMAT         = "%s.backup"
	ER_INVALID_DIRECTOR_CREDS_MSG = "invalid director credentials"
	ER_NO_PERSISTENCE_ARCHIVES    = "there are no persistence stores in the list"
	ER_FILE_DOES_NOT_EXIST        = "file does not exist"
	ER_DB_BACKUP_FAILURE          = "failed to backup database"
)
View Source
const (
	IMPORT_ARCHIVE = iota
	EXPORT_ARCHIVE
)
View Source
const (
	NFS_DIR_PATH         string = "/var/vcap/store"
	NFS_ARCHIVE_DIR      string = "shared"
	NFS_DEFAULT_SSH_USER string = "vcap"
)
View Source
const (
	OPSMGR_INSTALLATION_SETTINGS_FILENAME       string = "installation.json"
	OPSMGR_INSTALLATION_SETTINGS_POSTFIELD_NAME string = "installation[file]"
	OPSMGR_INSTALLATION_ASSETS_FILENAME         string = "installation.zip"
	OPSMGR_INSTALLATION_ASSETS_POSTFIELD_NAME   string = "installation[file]"
	OPSMGR_DEPLOYMENTS_FILENAME                 string = "deployments.tar.gz"
	OPSMGR_ENCRYPTIONKEY_FILENAME               string = "cc_db_encryption_key.txt"
	OPSMGR_BACKUP_DIR                           string = "opsmanager"
	OPSMGR_DEPLOYMENTS_DIR                      string = "deployments"
	OPSMGR_DEFAULT_USER                         string = "tempest"
	OPSMGR_DEFAULT_SSH_PORT                     int    = 22
	OPSMGR_INSTALLATION_SETTINGS_URL            string = "https://%s/api/installation_settings"
	OPSMGR_INSTALLATION_ASSETS_URL              string = "https://%s/api/installation_asset_collection"
	OPSMGR_DEPLOYMENTS_FILE                     string = "/var/tempest/workspaces/default/deployments/bosh-deployments.yml"
)
View Source
const (
	SD_PRODUCT   string = "Product"
	SD_COMPONENT string = "Component"
	SD_IDENTITY  string = "Identity"
	SD_IP        string = "Ip"
	SD_USER      string = "User"
	SD_PASS      string = "Pass"
	SD_VCAPUSER  string = "VcapUser"
	SD_VCAPPASS  string = "VcapPass"
)

Variables

View Source
var (
	TILE_RESTORE_ACTION = func(t Tile) func() error {
		return t.Restore
	}
	TILE_BACKUP_ACTION = func(t Tile) func() error {
		return t.Backup
	}
)
View Source
var (
	ER_ERROR_DIRECTOR_CREDS = errors.New(ER_INVALID_DIRECTOR_CREDS_MSG)
	ER_ERROR_EMPTY_DB_LIST  = errors.New(ER_NO_PERSISTENCE_ARCHIVES)
	ER_ERROR_INVALID_PATH   = &os.PathError{Err: errors.New(ER_FILE_DOES_NOT_EXIST)}
	ER_DB_BACKUP            = errors.New(ER_DB_BACKUP_FAILURE)
)
View Source
var NewDirector = func(ip, username, password string, port int) bosh.Bosh {
	return bosh.NewBoshDirector(ip, username, password, port, NewHttpGateway())
}
View Source
var NewElasticRuntime = func(jsonFile string, target string, logger log.Logger) *ElasticRuntime {
	var (
		uaadbInfo *PgInfo = &PgInfo{
			SystemInfo: SystemInfo{
				Product:   "cf",
				Component: "uaadb",
				Identity:  "root",
			},
			Database: "uaa",
		}
		consoledbInfo *PgInfo = &PgInfo{
			SystemInfo: SystemInfo{
				Product:   "cf",
				Component: "consoledb",
				Identity:  "root",
			},
			Database: "console",
		}
		ccdbInfo *PgInfo = &PgInfo{
			SystemInfo: SystemInfo{
				Product:   "cf",
				Component: "ccdb",
				Identity:  "admin",
			},
			Database: "ccdb",
		}
		mysqldbInfo *MysqlInfo = &MysqlInfo{
			SystemInfo: SystemInfo{
				Product:   "cf",
				Component: "mysql",
				Identity:  "root",
			},
			Database: "mysql",
		}
		directorInfo *SystemInfo = &SystemInfo{
			Product:   "microbosh",
			Component: "director",
			Identity:  "director",
		}
		nfsInfo *NfsInfo = &NfsInfo{
			SystemInfo: SystemInfo{
				Product:   "cf",
				Component: "nfs_server",
				Identity:  "vcap",
			},
		}
	)

	context := &ElasticRuntime{
		JsonFile: jsonFile,
		BackupContext: BackupContext{
			TargetDir: target,
		},
		SystemsInfo: map[string]SystemDump{
			ER_DIRECTOR: directorInfo,
			ER_CONSOLE:  consoledbInfo,
			ER_UAA:      uaadbInfo,
			ER_CC:       ccdbInfo,
			ER_MYSQL:    mysqldbInfo,
			ER_NFS:      nfsInfo,
		},
		PersistentSystems: []SystemDump{
			consoledbInfo,
			uaadbInfo,
			ccdbInfo,
			nfsInfo,
			mysqldbInfo,
		},
		Logger: logger,
	}
	return context
}

NewElasticRuntime initializes an ElasticRuntime intance

View Source
var NewOpsManager = func(opsManagerHostname string, adminUsername string, adminPassword string, opsManagerUsername string, opsManagerPassword string, target string, logger log.Logger) (context *OpsManager, err error) {
	var remoteExecuter command.Executer

	if remoteExecuter, err = createExecuter(opsManagerHostname, opsManagerUsername, opsManagerPassword, OPSMGR_DEFAULT_SSH_PORT); err == nil {
		settingsHttpRequestor := ghttp.NewHttpGateway()
		settingsMultiHttpRequestor := ghttp.MultiPartUpload
		assetsHttpRequestor := ghttp.NewHttpGateway()
		assetsMultiHttpRequestor := ghttp.MultiPartUpload

		context = &OpsManager{
			SettingsUploader:  settingsMultiHttpRequestor,
			AssetsUploader:    assetsMultiHttpRequestor,
			SettingsRequestor: settingsHttpRequestor,
			AssetsRequestor:   assetsHttpRequestor,
			DeploymentDir:     path.Join(target, OPSMGR_BACKUP_DIR, OPSMGR_DEPLOYMENTS_DIR),
			Hostname:          opsManagerHostname,
			Username:          adminUsername,
			Password:          adminPassword,
			BackupContext: BackupContext{
				TargetDir: target,
			},
			Executer:            remoteExecuter,
			LocalExecuter:       command.NewLocalExecuter(),
			OpsmanagerBackupDir: OPSMGR_BACKUP_DIR,
			Logger:              logger,
		}
	}
	return
}

NewOpsManager initializes an OpsManager instance

View Source
var RunPipeline = func(actionBuilder func(Tile) func() error, tiles []Tile) (err error) {
	var pipeline []action

	for _, tile := range tiles {
		pipeline = append(pipeline, actionBuilder(tile))
	}
	err = runActions(pipeline)
	return
}

Runs a pipeline action (restore/backup) on a list of tiles

View Source
var TaskPingFreq time.Duration = 1000 * time.Millisecond

Not ping server so frequently and exausted the resources

Functions

func BackupNfs

func BackupNfs(password, ip string, dest io.Writer) (err error)

func ExtractEncryptionKey

func ExtractEncryptionKey(dest io.Writer, deploymentDir string) (err error)

func GetDeploymentName

func GetDeploymentName(jsonObj InstallationCompareObject) (deploymentName string, err error)

func GetPasswordAndIP

func GetPasswordAndIP(jsonObj InstallationCompareObject, product, component, username string) (ip, password string, err error)

func RunBackupPipeline

func RunBackupPipeline(hostname, adminUsername, adminPassword, opsManagerUsername, opsManagerPassword, destination string) (err error)

Backup the list of all default tiles

func RunRestorePipeline

func RunRestorePipeline(hostname, adminUsername, adminPassword, opsManagerUser, opsManagerPassword, destination string) (err error)

Restore the list of all default tiles

func SetLogger added in v0.1.3

func SetLogger(logger log.Logger)

SetLogger - lets us set the logger object

Types

type BackupContext

type BackupContext struct {
	TargetDir string
}

type CCJob added in v0.1.2

type CCJob struct {
	Job   string
	Index int
}

func GetCCVMs

func GetCCVMs(jsonObj []VMObject) ([]CCJob, error)

type CloudController

type CloudController struct {
	// contains filtered or unexported fields
}

func NewCloudController

func NewCloudController(ip, username, password, deploymentName, manifest string, cloudControllers CloudControllerJobs) *CloudController

func (*CloudController) Start

func (c *CloudController) Start() error

func (*CloudController) Stop

func (c *CloudController) Stop() error

type CloudControllerDeploymentParser

type CloudControllerDeploymentParser struct {
	// contains filtered or unexported fields
}

func (*CloudControllerDeploymentParser) Parse

func (s *CloudControllerDeploymentParser) Parse(jsonObj []VMObject) ([]CCJob, error)

type CloudControllerJobs

type CloudControllerJobs []CCJob

type ElasticRuntime

type ElasticRuntime struct {
	JsonFile          string
	SystemsInfo       map[string]SystemDump
	PersistentSystems []SystemDump
	HttpGateway       HttpGateway
	InstallationName  string
	BackupContext
	Logger log.Logger
}

ElasticRuntime contains information about a Pivotal Elastic Runtime deployment

func (*ElasticRuntime) Backup

func (context *ElasticRuntime) Backup() (err error)

Backup performs a backup of a Pivotal Elastic Runtime deployment

func (*ElasticRuntime) ReadAllUserCredentials

func (context *ElasticRuntime) ReadAllUserCredentials() (err error)

func (*ElasticRuntime) Restore

func (context *ElasticRuntime) Restore() (err error)

Restore performs a restore of a Pivotal Elastic Runtime deployment

func (*ElasticRuntime) RunDbAction

func (context *ElasticRuntime) RunDbAction(dbInfoList []SystemDump, action int) (err error)

type InstallationCompareObject

type InstallationCompareObject struct {
	Guid                 string
	Installation_Version string
	Products             []productCompareObject
	Infrastructure       infrastructure
}

func ReadAndUnmarshal

func ReadAndUnmarshal(src io.Reader) (jsonObj InstallationCompareObject, err error)

type IpPasswordParser

type IpPasswordParser struct {
	Product   string
	Component string
	Username  string
	// contains filtered or unexported fields
}

func (*IpPasswordParser) Parse

func (s *IpPasswordParser) Parse(jsonObj InstallationCompareObject) (ip, password string, err error)

type MysqlInfo

type MysqlInfo struct {
	SystemInfo
	Database string
}

func (*MysqlInfo) GetPersistanceBackup

func (s *MysqlInfo) GetPersistanceBackup() (dumper PersistanceBackup, err error)

type NFSBackup

type NFSBackup struct {
	Caller    command.Executer
	RemoteOps remoteOpsInterface
}

func NewNFSBackup

func NewNFSBackup(password, ip string) (nfs *NFSBackup, err error)

func (*NFSBackup) Dump

func (s *NFSBackup) Dump(dest io.Writer) (err error)

func (*NFSBackup) Import

func (s *NFSBackup) Import(lfile io.Reader) (err error)

type NfsInfo

type NfsInfo struct {
	SystemInfo
}

func (*NfsInfo) GetPersistanceBackup

func (s *NfsInfo) GetPersistanceBackup() (dumper PersistanceBackup, err error)

type OpsManager

type OpsManager struct {
	BackupContext
	Hostname            string
	Username            string
	Password            string
	TempestPassword     string
	DbEncryptionKey     string
	Executer            command.Executer
	LocalExecuter       command.Executer
	SettingsUploader    httpUploader
	AssetsUploader      httpUploader
	SettingsRequestor   httpRequestor
	AssetsRequestor     httpRequestor
	DeploymentDir       string
	OpsmanagerBackupDir string
	Logger              log.Logger
}

OpsManager contains the location and credentials of a Pivotal Ops Manager instance

func (*OpsManager) Backup

func (context *OpsManager) Backup() (err error)

Backup performs a backup of a Pivotal Ops Manager instance

func (*OpsManager) Restore

func (context *OpsManager) Restore() (err error)

Restore performs a restore of a Pivotal Ops Manager instance

type PersistanceBackup

type PersistanceBackup interface {
	Dump(io.Writer) error
	Import(io.Reader) error
}

type PgInfo

type PgInfo struct {
	SystemInfo
	Database string
}

func (*PgInfo) GetPersistanceBackup

func (s *PgInfo) GetPersistanceBackup() (dumper PersistanceBackup, err error)

type SystemDump

type SystemDump interface {
	Error() error
	GetPersistanceBackup() (dumper PersistanceBackup, err error)
	// contains filtered or unexported methods
}

type SystemInfo

type SystemInfo struct {
	goutil.GetSet
	Product   string
	Component string
	Identity  string
	Ip        string
	User      string
	Pass      string
	VcapUser  string
	VcapPass  string
}

func (*SystemInfo) Error

func (s *SystemInfo) Error() (err error)

func (*SystemInfo) Get

func (s *SystemInfo) Get(name string) string

func (*SystemInfo) GetPersistanceBackup

func (s *SystemInfo) GetPersistanceBackup() (dumper PersistanceBackup, err error)

func (*SystemInfo) Set

func (s *SystemInfo) Set(name string, val string)

type Tile

type Tile interface {
	Backup() error
	Restore() error
}

Tile is a deployable component that can be backed up

type VMObject

type VMObject struct {
	Job   string
	Index int
}

func ReadAndUnmarshalVMObjects

func ReadAndUnmarshalVMObjects(src io.Reader) (jsonObj []VMObject, err error)

Directories

Path Synopsis
Godeps
_workspace/src/github.com/kr/fs
Package fs provides filesystem-related functions.
Package fs provides filesystem-related functions.
_workspace/src/github.com/onsi/ginkgo
Ginkgo is a BDD-style testing framework for Golang The godoc documentation describes Ginkgo's API.
Ginkgo is a BDD-style testing framework for Golang The godoc documentation describes Ginkgo's API.
_workspace/src/github.com/onsi/ginkgo/config
Ginkgo accepts a number of configuration options.
Ginkgo accepts a number of configuration options.
_workspace/src/github.com/onsi/ginkgo/ginkgo
The Ginkgo CLI The Ginkgo CLI is fully documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli) You can also learn more by running: ginkgo help Here are some of the more commonly used commands: To install: go install github.com/onsi/ginkgo/ginkgo To run tests: ginkgo To run tests in all subdirectories: ginkgo -r To run tests in particular packages: ginkgo <flags> /path/to/package /path/to/another/package To pass arguments/flags to your tests: ginkgo <flags> <packages> -- <pass-throughs> To run tests in parallel ginkgo -p this will automatically detect the optimal number of nodes to use.
The Ginkgo CLI The Ginkgo CLI is fully documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli) You can also learn more by running: ginkgo help Here are some of the more commonly used commands: To install: go install github.com/onsi/ginkgo/ginkgo To run tests: ginkgo To run tests in all subdirectories: ginkgo -r To run tests in particular packages: ginkgo <flags> /path/to/package /path/to/another/package To pass arguments/flags to your tests: ginkgo <flags> <packages> -- <pass-throughs> To run tests in parallel ginkgo -p this will automatically detect the optimal number of nodes to use.
_workspace/src/github.com/onsi/ginkgo/internal/remote
Aggregator is a reporter used by the Ginkgo CLI to aggregate and present parallel test output coherently as tests complete.
Aggregator is a reporter used by the Ginkgo CLI to aggregate and present parallel test output coherently as tests complete.
_workspace/src/github.com/onsi/ginkgo/reporters
Ginkgo's Default Reporter A number of command line flags are available to tweak Ginkgo's default output.
Ginkgo's Default Reporter A number of command line flags are available to tweak Ginkgo's default output.
_workspace/src/github.com/onsi/gomega
Gomega is the Ginkgo BDD-style testing framework's preferred matcher library.
Gomega is the Ginkgo BDD-style testing framework's preferred matcher library.
_workspace/src/github.com/onsi/gomega/format
Gomega's format package pretty-prints objects.
Gomega's format package pretty-prints objects.
_workspace/src/github.com/onsi/gomega/gbytes
Package gbytes provides a buffer that supports incrementally detecting input.
Package gbytes provides a buffer that supports incrementally detecting input.
_workspace/src/github.com/onsi/gomega/gexec
Package gexec provides support for testing external processes.
Package gexec provides support for testing external processes.
_workspace/src/github.com/onsi/gomega/ghttp
Package ghttp supports testing HTTP clients by providing a test server (simply a thin wrapper around httptest's server) that supports registering multiple handlers.
Package ghttp supports testing HTTP clients by providing a test server (simply a thin wrapper around httptest's server) that supports registering multiple handlers.
_workspace/src/github.com/onsi/gomega/matchers
Gomega matchers This package implements the Gomega matchers and does not typically need to be imported.
Gomega matchers This package implements the Gomega matchers and does not typically need to be imported.
_workspace/src/github.com/pivotalservices/sftp
Package sftp implements the SSH File Transfer Protocol as described in https://filezilla-project.org/specs/draft-ietf-secsh-filexfer-02.txt
Package sftp implements the SSH File Transfer Protocol as described in https://filezilla-project.org/specs/draft-ietf-secsh-filexfer-02.txt
_workspace/src/github.com/pivotalservices/sftp/examples/buffered-read-benchmark
buffered-read-benchmark benchmarks the peformance of reading from /dev/zero on the server to a []byte on the client via io.Copy.
buffered-read-benchmark benchmarks the peformance of reading from /dev/zero on the server to a []byte on the client via io.Copy.
_workspace/src/github.com/pivotalservices/sftp/examples/buffered-write-benchmark
buffered-write-benchmark benchmarks the peformance of writing a single large []byte on the client to /dev/null on the server via io.Copy.
buffered-write-benchmark benchmarks the peformance of writing a single large []byte on the client to /dev/null on the server via io.Copy.
_workspace/src/github.com/pivotalservices/sftp/examples/gsftp
gsftp implements a simple sftp client.
gsftp implements a simple sftp client.
_workspace/src/github.com/pivotalservices/sftp/examples/streaming-read-benchmark
streaming-read-benchmark benchmarks the peformance of reading from /dev/zero on the server to /dev/null on the client via io.Copy.
streaming-read-benchmark benchmarks the peformance of reading from /dev/zero on the server to /dev/null on the client via io.Copy.
_workspace/src/github.com/pivotalservices/sftp/examples/streaming-write-benchmark
streaming-write-benchmark benchmarks the peformance of writing from /dev/zero on the client to /dev/null on the server via io.Copy.
streaming-write-benchmark benchmarks the peformance of writing from /dev/zero on the client to /dev/null on the server via io.Copy.
_workspace/src/gopkg.in/datianshi/crypto.v1/ssh
Package ssh implements an SSH client and server.
Package ssh implements an SSH client and server.
_workspace/src/gopkg.in/datianshi/crypto.v1/ssh/agent
Package agent implements a client to an ssh-agent daemon.
Package agent implements a client to an ssh-agent daemon.
_workspace/src/gopkg.in/datianshi/crypto.v1/ssh/terminal
Package terminal provides support functions for dealing with terminals, as commonly found on UNIX systems.
Package terminal provides support functions for dealing with terminals, as commonly found on UNIX systems.
_workspace/src/gopkg.in/datianshi/crypto.v1/ssh/test
This package contains integration tests for the code.google.com/p/go.crypto/ssh package.
This package contains integration tests for the code.google.com/p/go.crypto/ssh package.
_workspace/src/gopkg.in/yaml.v1
Package yaml implements YAML support for the Go language.
Package yaml implements YAML support for the Go language.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL