jobs

package
v1.0.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 22, 2022 License: Apache-2.0 Imports: 14 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func DataSourceJobs added in v0.5.1

func DataSourceJobs() *schema.Resource

func ResourceJob

func ResourceJob() *schema.Resource

Types

type CronSchedule

type CronSchedule struct {
	QuartzCronExpression string `json:"quartz_cron_expression"`
	TimezoneID           string `json:"timezone_id"`
	PauseStatus          string `json:"pause_status,omitempty" tf:"computed"`
}

CronSchedule contains the information for the quartz cron expression

type EmailNotifications

type EmailNotifications struct {
	OnStart               []string `json:"on_start,omitempty"`
	OnSuccess             []string `json:"on_success,omitempty"`
	OnFailure             []string `json:"on_failure,omitempty"`
	NoAlertForSkippedRuns bool     `json:"no_alert_for_skipped_runs,omitempty"`
	AlertOnLastAttempt    bool     `json:"alert_on_last_attempt,omitempty"`
}

EmailNotifications contains the information for email notifications after job completion

type GitSource added in v0.5.0

type GitSource struct {
	Url      string `json:"git_url" tf:"alias:url"`
	Provider string `json:"git_provider,omitempty" tf:"alias:provider"`
	Branch   string `json:"git_branch,omitempty" tf:"alias:branch"`
	Tag      string `json:"git_tag,omitempty" tf:"alias:tag"`
	Commit   string `json:"git_commit,omitempty" tf:"alias:commit"`
}

BEGIN Jobs + Repo integration preview

type Job

type Job struct {
	JobID           int64        `json:"job_id,omitempty"`
	CreatorUserName string       `json:"creator_user_name,omitempty"`
	Settings        *JobSettings `json:"settings,omitempty"`
	CreatedTime     int64        `json:"created_time,omitempty"`
}

Job contains the information when using a GET request from the Databricks Jobs api

func (Job) ID

func (j Job) ID() string

ID returns job id as string

type JobCluster added in v0.4.9

type JobCluster struct {
	JobClusterKey string            `json:"job_cluster_key,omitempty" tf:"group:cluster_type"`
	NewCluster    *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"`
}

type JobList

type JobList struct {
	Jobs []Job `json:"jobs"`
}

JobList returns a list of all jobs

type JobRun

type JobRun struct {
	JobID       int64    `json:"job_id"`
	RunID       int64    `json:"run_id"`
	NumberInJob int64    `json:"number_in_job"`
	StartTime   int64    `json:"start_time,omitempty"`
	State       RunState `json:"state"`
	Trigger     string   `json:"trigger,omitempty"`
	RuntType    string   `json:"run_type,omitempty"`

	OverridingParameters RunParameters `json:"overriding_parameters,omitempty"`
}

JobRun is a simplified representation of corresponding entity

type JobRunsList

type JobRunsList struct {
	Runs    []JobRun `json:"runs"`
	HasMore bool     `json:"has_more"`
}

JobRunsList returns a page of job runs

type JobRunsListRequest

type JobRunsListRequest struct {
	JobID         int64 `url:"job_id,omitempty"`
	ActiveOnly    bool  `url:"active_only,omitempty"`
	CompletedOnly bool  `url:"completed_only,omitempty"`
	Offset        int32 `url:"offset,omitempty"`
	Limit         int32 `url:"limit,omitempty"`
}

JobRunsListRequest used to do what it sounds like

type JobSettings

type JobSettings struct {
	Name string `json:"name,omitempty" tf:"default:Untitled"`

	// BEGIN Jobs API 2.0
	ExistingClusterID      string              `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster             *clusters.Cluster   `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	NotebookTask           *NotebookTask       `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask           *SparkJarTask       `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask        *SparkPythonTask    `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask        *SparkSubmitTask    `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask           *PipelineTask       `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask        *PythonWheelTask    `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	Libraries              []libraries.Library `json:"libraries,omitempty" tf:"slice_set,alias:library"`
	TimeoutSeconds         int32               `json:"timeout_seconds,omitempty"`
	MaxRetries             int32               `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32               `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool                `json:"retry_on_timeout,omitempty"`

	// BEGIN Jobs API 2.1
	Tasks       []JobTaskSettings `json:"tasks,omitempty" tf:"alias:task"`
	Format      string            `json:"format,omitempty" tf:"computed"`
	JobClusters []JobCluster      `json:"job_clusters,omitempty" tf:"alias:job_cluster"`

	// BEGIN Jobs + Repo integration preview
	GitSource *GitSource `json:"git_source,omitempty"`

	Schedule           *CronSchedule       `json:"schedule,omitempty"`
	MaxConcurrentRuns  int32               `json:"max_concurrent_runs,omitempty"`
	EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"`
	Tags               map[string]string   `json:"tags,omitempty"`
}

JobSettings contains the information for configuring a job on databricks

type JobTaskSettings

type JobTaskSettings struct {
	TaskKey     string           `json:"task_key,omitempty"`
	Description string           `json:"description,omitempty"`
	DependsOn   []TaskDependency `json:"depends_on,omitempty"`

	ExistingClusterID      string              `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster             *clusters.Cluster   `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	JobClusterKey          string              `json:"job_cluster_key,omitempty" tf:"group:cluster_type"`
	Libraries              []libraries.Library `json:"libraries,omitempty" tf:"slice_set,alias:library"`
	NotebookTask           *NotebookTask       `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask           *SparkJarTask       `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask        *SparkPythonTask    `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask        *SparkSubmitTask    `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask           *PipelineTask       `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask        *PythonWheelTask    `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	EmailNotifications     *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"`
	TimeoutSeconds         int32               `json:"timeout_seconds,omitempty"`
	MaxRetries             int32               `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32               `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool                `json:"retry_on_timeout,omitempty" tf:"computed"`
}

type JobsAPI

type JobsAPI struct {
	// contains filtered or unexported fields
}

JobsAPI exposes the Jobs API

func NewJobsAPI

func NewJobsAPI(ctx context.Context, m interface{}) JobsAPI

NewJobsAPI creates JobsAPI instance from provider meta

func (JobsAPI) Create

func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)

Create creates a job on the workspace given the job settings

func (JobsAPI) Delete

func (a JobsAPI) Delete(id string) error

Delete deletes the job given a job id

func (JobsAPI) List

func (a JobsAPI) List() (l JobList, err error)

List all jobs

func (JobsAPI) Read

func (a JobsAPI) Read(id string) (job Job, err error)

Read returns the job object with all the attributes

func (JobsAPI) Restart

func (a JobsAPI) Restart(id string, timeout time.Duration) error

func (JobsAPI) RunNow

func (a JobsAPI) RunNow(jobID int64) (int64, error)

RunNow triggers the job and returns a run ID

func (JobsAPI) RunsCancel

func (a JobsAPI) RunsCancel(runID int64, timeout time.Duration) error

RunsCancel cancels job run and waits till it's finished

func (JobsAPI) RunsGet

func (a JobsAPI) RunsGet(runID int64) (JobRun, error)

RunsGet to retrieve information about the run

func (JobsAPI) RunsList

func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)

RunsList returns a job runs list

func (JobsAPI) Start

func (a JobsAPI) Start(jobID int64, timeout time.Duration) error

func (JobsAPI) Update

func (a JobsAPI) Update(id string, jobSettings JobSettings) error

Update updates a job given the id and a new set of job settings

type NotebookTask

type NotebookTask struct {
	NotebookPath   string            `json:"notebook_path"`
	BaseParameters map[string]string `json:"base_parameters,omitempty"`
}

NotebookTask contains the information for notebook jobs

type PipelineTask

type PipelineTask struct {
	PipelineID string `json:"pipeline_id"`
}

PipelineTask contains the information for pipeline jobs

type PythonWheelTask

type PythonWheelTask struct {
	EntryPoint      string            `json:"entry_point,omitempty"`
	PackageName     string            `json:"package_name,omitempty"`
	Parameters      []string          `json:"parameters,omitempty"`
	NamedParameters map[string]string `json:"named_parameters,omitempty"`
}

PythonWheelTask contains the information for python wheel jobs

type RunParameters

type RunParameters struct {
	// a shortcut field to reuse this type for RunNow
	JobID int64 `json:"job_id,omitempty"`

	NotebookParams    map[string]string `json:"notebook_params,omitempty"`
	JarParams         []string          `json:"jar_params,omitempty"`
	PythonParams      []string          `json:"python_params,omitempty"`
	SparkSubmitParams []string          `json:"spark_submit_params,omitempty"`
}

RunParameters used to pass params to tasks

type RunState

type RunState struct {
	ResultState    string `json:"result_state,omitempty"`
	LifeCycleState string `json:"life_cycle_state,omitempty"`
	StateMessage   string `json:"state_message,omitempty"`
}

RunState of the job

type SparkJarTask

type SparkJarTask struct {
	JarURI        string   `json:"jar_uri,omitempty"`
	MainClassName string   `json:"main_class_name,omitempty"`
	Parameters    []string `json:"parameters,omitempty"`
}

SparkJarTask contains the information for jar jobs

type SparkPythonTask

type SparkPythonTask struct {
	PythonFile string   `json:"python_file"`
	Parameters []string `json:"parameters,omitempty"`
}

SparkPythonTask contains the information for python jobs

type SparkSubmitTask

type SparkSubmitTask struct {
	Parameters []string `json:"parameters,omitempty"`
}

SparkSubmitTask contains the information for spark submit jobs

type TaskDependency

type TaskDependency struct {
	TaskKey string `json:"task_key,omitempty"`
}

type UpdateJobRequest

type UpdateJobRequest struct {
	JobID       int64        `json:"job_id,omitempty" url:"job_id,omitempty"`
	NewSettings *JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"`
}

UpdateJobRequest used to do what it sounds like

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL