pipelines

package
v2.139.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 30, 2024 License: Apache-2.0 Imports: 18 Imported by: 1

README

CDK Pipelines

A construct library for painless Continuous Delivery of CDK applications.

CDK Pipelines is an opinionated construct library. It is purpose-built to deploy one or more copies of your CDK applications using CloudFormation with a minimal amount of effort on your part. It is not intended to support arbitrary deployment pipelines, and very specifically it is not built to use CodeDeploy to deploy applications to instances, or deploy your custom-built ECR images to an ECS cluster directly: use CDK file assets with CloudFormation Init for instances, or CDK container assets for ECS clusters instead.

Give the CDK Pipelines way of doing things a shot first: you might find it does everything you need. If you need more control, or if you need v2 support from aws-codepipeline, we recommend you drop down to using the aws-codepipeline construct library directly.

This module contains two sets of APIs: an original and a modern version of CDK Pipelines. The modern API has been updated to be easier to work with and customize, and will be the preferred API going forward. The original version of the API is still available for backwards compatibility, but we recommend migrating to the new version if possible.

Compared to the original API, the modern API: has more sensible defaults; is more flexible; supports parallel deployments; supports multiple synth inputs; allows more control of CodeBuild project generation; supports deployment engines other than CodePipeline.

The README for the original API, as well as a migration guide, can be found in our GitHub repository.

At a glance

Deploying your application continuously starts by defining a MyApplicationStage, a subclass of Stage that contains the stacks that make up a single copy of your application.

You then define a Pipeline, instantiate as many instances of MyApplicationStage as you want for your test and production environments, with different parameters for each, and calling pipeline.addStage() for each of them. You can deploy to the same account and Region, or to a different one, with the same amount of code. The CDK Pipelines library takes care of the details.

CDK Pipelines supports multiple deployment engines (see Using a different deployment engine), and comes with a deployment engine that deploys CDK apps using AWS CodePipeline. To use the CodePipeline engine, define a CodePipeline construct. The following example creates a CodePipeline that deploys an application from GitHub:

/** The stacks for our app are minimally defined here.  The internals of these
  * stacks aren't important, except that DatabaseStack exposes an attribute
  * "table" for a database table it defines, and ComputeStack accepts a reference
  * to this table in its properties.
  */
type databaseStack struct {
	stack
	table tableV2
}

func newDatabaseStack(scope construct, id *string) *databaseStack {
	this := &databaseStack{}
	newStack_Override(this, scope, id)
	this.table = dynamodb.NewTableV2(this, jsii.String("Table"), &TablePropsV2{
		PartitionKey: &Attribute{
			Name: jsii.String("id"),
			Type: dynamodb.AttributeType_STRING,
		},
	})
	return this
}

type computeProps struct {
	table *tableV2
}

type computeStack struct {
	stack
}

func newComputeStack(scope construct, id *string, props computeProps) *computeStack {
	this := &computeStack{}
	newStack_Override(this, scope, id)
	return this
}

/**
 * Stack to hold the pipeline
 */
type myPipelineStack struct {
	stack
}

func newMyPipelineStack(scope construct, id *string, props stackProps) *myPipelineStack {
	this := &myPipelineStack{}
	newStack_Override(this, scope, id, props)

	pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
		Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
			// Use a connection created using the AWS console to authenticate to GitHub
			// Other sources are available.
			Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
				ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
			}),
			Commands: []*string{
				jsii.String("npm ci"),
				jsii.String("npm run build"),
				jsii.String("npx cdk synth"),
			},
		}),
	})

	// 'MyApplication' is defined below. Call `addStage` as many times as
	// necessary with any account and region (may be different from the
	// pipeline's).
	pipeline.AddStage(NewMyApplication(this, jsii.String("Prod"), &stageProps{
		Env: &Environment{
			Account: jsii.String("123456789012"),
			Region: jsii.String("eu-west-1"),
		},
	}))
	return this
}

/**
 * Your application
 *
 * May consist of one or more Stacks (here, two)
 *
 * By declaring our DatabaseStack and our ComputeStack inside a Stage,
 * we make sure they are deployed together, or not at all.
 */
type myApplication struct {
	stage
}

func newMyApplication(scope construct, id *string, props stageProps) *myApplication {
	this := &myApplication{}
	newStage_Override(this, scope, id, props)

	dbStack := NewDatabaseStack(this, jsii.String("Database"))
	NewComputeStack(this, jsii.String("Compute"), &computeProps{
		table: dbStack.table,
	})
	return this
}

// In your main file
// In your main file
NewMyPipelineStack(this, jsii.String("PipelineStack"), &stackProps{
	Env: &Environment{
		Account: jsii.String("123456789012"),
		Region: jsii.String("eu-west-1"),
	},
})

The pipeline is self-mutating, which means that if you add new application stages in the source code, or new stacks to MyApplication, the pipeline will automatically reconfigure itself to deploy those new stages and stacks.

(Note that you have to bootstrap all environments before the above code will work, and switch on "Modern synthesis" if you are using CDKv1. See the section CDK Environment Bootstrapping below for more information).

Provisioning the pipeline

To provision the pipeline you have defined, make sure the target environment has been bootstrapped (see below), and then execute deploying the PipelineStack once. Afterwards, the pipeline will keep itself up-to-date.

Important: be sure to git commit and git push before deploying the Pipeline stack using cdk deploy!

The reason is that the pipeline will start deploying and self-mutating right away based on the sources in the repository, so the sources it finds in there should be the ones you want it to find.

Run the following commands to get the pipeline going:

$ git commit -a
$ git push
$ cdk deploy PipelineStack

Administrative permissions to the account are only necessary up until this point. We recommend you remove access to these credentials after doing this.

Working on the pipeline

The self-mutation feature of the Pipeline might at times get in the way of the pipeline development workflow. Each change to the pipeline must be pushed to git, otherwise, after the pipeline was updated using cdk deploy, it will automatically revert to the state found in git.

To make the development more convenient, the self-mutation feature can be turned off temporarily, by passing selfMutation: false property, example:

// Modern API
modernPipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	SelfMutation: jsii.Boolean(false),
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

// Original API
cloudAssemblyArtifact := codepipeline.NewArtifact()
originalPipeline := pipelines.NewCdkPipeline(this, jsii.String("Pipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(false),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})

Defining the pipeline

This section of the documentation describes the AWS CodePipeline engine, which comes with this library. If you want to use a different deployment engine, read the section Using a different deployment engine below.

Synth and sources

To define a pipeline, instantiate a CodePipeline construct from the aws-cdk-lib/pipelines module. It takes one argument, a synth step, which is expected to produce the CDK Cloud Assembly as its single output (the contents of the cdk.out directory after running cdk synth). "Steps" are arbitrary actions in the pipeline, typically used to run scripts or commands.

For the synth, use a ShellStep and specify the commands necessary to install dependencies, the CDK CLI, build your project and run cdk synth; the specific commands required will depend on the programming language you are using. For a typical NPM-based project, the synth will look like this:

var source iFileSetProducer
// the repository source

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

The pipeline assumes that your ShellStep will produce a cdk.out directory in the root, containing the CDK cloud assembly. If your CDK project lives in a subdirectory, be sure to adjust the primaryOutputDirectory to match:

var source iFileSetProducer
// the repository source

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("cd mysubdir"),
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
		PrimaryOutputDirectory: jsii.String("mysubdir/cdk.out"),
	}),
})

The underlying aws-cdk-lib/aws-codepipeline.Pipeline construct will be produced when app.synth() is called. You can also force it to be produced earlier by calling pipeline.buildPipeline(). After you've called that method, you can inspect the constructs that were produced by accessing the properties of the pipeline object.

Commands for other languages and package managers

The commands you pass to new ShellStep will be very similar to the commands you run on your own workstation to install dependencies and synth your CDK project. Here are some (non-exhaustive) examples for what those commands might look like in a number of different situations.

For Yarn, the install commands are different:

var source iFileSetProducer
// the repository source

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("yarn install --frozen-lockfile"),
			jsii.String("yarn build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

For Python projects, remember to install the CDK CLI globally (as there is no package.json to automatically install it for you):

var source iFileSetProducer
// the repository source

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("pip install -r requirements.txt"),
			jsii.String("npm install -g aws-cdk"),
			jsii.String("cdk synth"),
		},
	}),
})

For Java projects, remember to install the CDK CLI globally (as there is no package.json to automatically install it for you), and the Maven compilation step is automatically executed for you as you run cdk synth:

var source iFileSetProducer
// the repository source

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("npm install -g aws-cdk"),
			jsii.String("cdk synth"),
		},
	}),
})

You can adapt these examples to your own situation.

Migrating from buildspec.yml files

You may currently have the build instructions for your CodeBuild Projects in a buildspec.yml file in your source repository. In addition to your build commands, the CodeBuild Project's buildspec also controls some information that CDK Pipelines manages for you, like artifact identifiers, input artifact locations, Docker authorization, and exported variables.

Since there is no way in general for CDK Pipelines to modify the file in your resource repository, CDK Pipelines configures the BuildSpec directly on the CodeBuild Project, instead of loading it from the buildspec.yml file. This requires a pipeline self-mutation to update.

To avoid this, put your build instructions in a separate script, for example build.sh, and call that script from the build commands array:

var source iFileSetProducer


pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
			jsii.String("./build.sh"),
		},
	}),
})

Doing so keeps your exact build instructions in sync with your source code in the source repository where it belongs, and provides a convenient build script for developers at the same time.

CodePipeline Sources

In CodePipeline, Sources define where the source of your application lives. When a change to the source is detected, the pipeline will start executing. Source objects can be created by factory methods on the CodePipelineSource class:

GitHub, GitHub Enterprise, BitBucket using a connection

The recommended way of connecting to GitHub or BitBucket is by using a connection. You will first use the AWS Console to authenticate to the source control provider, and then use the connection ARN in your pipeline definition:

pipelines.CodePipelineSource_Connection(jsii.String("org/repo"), jsii.String("branch"), &ConnectionSourceOptions{
	ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
})
GitHub using OAuth

You can also authenticate to GitHub using a personal access token. This expects that you've created a personal access token and stored it in Secrets Manager. By default, the source object will look for a secret named github-token, but you can change the name. The token should have the repo and admin:repo_hook scopes.

pipelines.CodePipelineSource_GitHub(jsii.String("org/repo"), jsii.String("branch"), &GitHubSourceOptions{
	// This is optional
	Authentication: cdk.SecretValue_SecretsManager(jsii.String("my-token")),
})
CodeCommit

You can use a CodeCommit repository as the source. Either create or import that the CodeCommit repository and then use CodePipelineSource.codeCommit to reference it:

repository := codecommit.Repository_FromRepositoryName(this, jsii.String("Repository"), jsii.String("my-repository"))
pipelines.CodePipelineSource_CodeCommit(repository, jsii.String("main"))
S3

You can use a zip file in S3 as the source of the pipeline. The pipeline will be triggered every time the file in S3 is changed:

bucket := s3.Bucket_FromBucketName(this, jsii.String("Bucket"), jsii.String("my-bucket"))
pipelines.CodePipelineSource_S3(bucket, jsii.String("my/source.zip"))
ECR

You can use a Docker image in ECR as the source of the pipeline. The pipeline will be triggered every time an image is pushed to ECR:

repository := ecr.NewRepository(this, jsii.String("Repository"))
pipelines.CodePipelineSource_Ecr(repository)
Additional inputs

ShellStep allows passing in more than one input: additional inputs will be placed in the directories you specify. Any step that produces an output file set can be used as an input, such as a CodePipelineSource, but also other ShellStep:

prebuild := pipelines.NewShellStep(jsii.String("Prebuild"), &ShellStepProps{
	Input: pipelines.CodePipelineSource_GitHub(jsii.String("myorg/repo1"), jsii.String("main")),
	PrimaryOutputDirectory: jsii.String("./build"),
	Commands: []*string{
		jsii.String("./build.sh"),
	},
})

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_*GitHub(jsii.String("myorg/repo2"), jsii.String("main")),
		AdditionalInputs: map[string]iFileSetProducer{
			"subdir": pipelines.CodePipelineSource_*GitHub(jsii.String("myorg/repo3"), jsii.String("main")),
			"../siblingdir": prebuild,
		},

		Commands: []*string{
			jsii.String("./build.sh"),
		},
	}),
})
CDK application deployments

After you have defined the pipeline and the synth step, you can add one or more CDK Stages which will be deployed to their target environments. To do so, call pipeline.addStage() on the Stage object:

var pipeline codePipeline

// Do this as many times as necessary with any account and region
// Account and region may different from the pipeline's.
pipeline.AddStage(NewMyApplicationStage(this, jsii.String("Prod"), &stageProps{
	Env: &Environment{
		Account: jsii.String("123456789012"),
		Region: jsii.String("eu-west-1"),
	},
}))

CDK Pipelines will automatically discover all Stacks in the given Stage object, determine their dependency order, and add appropriate actions to the pipeline to publish the assets referenced in those stacks and deploy the stacks in the right order.

If the Stacks are targeted at an environment in a different AWS account or Region and that environment has been bootstrapped , CDK Pipelines will transparently make sure the IAM roles are set up correctly and any requisite replication Buckets are created.

Deploying in parallel

By default, all applications added to CDK Pipelines by calling addStage() will be deployed in sequence, one after the other. If you have a lot of stages, you can speed up the pipeline by choosing to deploy some stages in parallel. You do this by calling addWave() instead of addStage(): a wave is a set of stages that are all deployed in parallel instead of sequentially. Waves themselves are still deployed in sequence. For example, the following will deploy two copies of your application to eu-west-1 and eu-central-1 in parallel:

var pipeline codePipeline

europeWave := pipeline.AddWave(jsii.String("Europe"))
europeWave.AddStage(NewMyApplicationStage(this, jsii.String("Ireland"), &stageProps{
	Env: &Environment{
		Region: jsii.String("eu-west-1"),
	},
}))
europeWave.AddStage(NewMyApplicationStage(this, jsii.String("Germany"), &stageProps{
	Env: &Environment{
		Region: jsii.String("eu-central-1"),
	},
}))
Deploying to other accounts / encrypting the Artifact Bucket

CDK Pipelines can transparently deploy to other Regions and other accounts (provided those target environments have been bootstrapped). However, deploying to another account requires one additional piece of configuration: you need to enable crossAccountKeys: true when creating the pipeline.

This will encrypt the artifact bucket(s), but incurs a cost for maintaining the KMS key.

You may also wish to enable automatic key rotation for the created KMS key.

Example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	// Encrypt artifacts, required for cross-account deployments
	CrossAccountKeys: jsii.Boolean(true),
	EnableKeyRotation: jsii.Boolean(true),
	 // optional
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})
Deploying without change sets

Deployment is done by default with CodePipeline engine using change sets, i.e. to first create a change set and then execute it. This allows you to inject steps that inspect the change set and approve or reject it, but failed deployments are not retryable and creation of the change set costs time.

The creation of change sets can be switched off by setting useChangeSets: false:

var synth shellStep


type pipelineStack struct {
	stack
}

func newPipelineStack(scope construct, id *string, props stackProps) *pipelineStack {
	this := &pipelineStack{}
	newStack_Override(this, scope, id, props)

	pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
		Synth: Synth,

		// Disable change set creation and make deployments in pipeline as single step
		UseChangeSets: jsii.Boolean(false),
	})
	return this
}
Validation

Every addStage() and addWave() command takes additional options. As part of these options, you can specify pre and post steps, which are arbitrary steps that run before or after the contents of the stage or wave, respectively. You can use these to add validations like manual or automated gates to your pipeline. We recommend putting manual approval gates in the set of pre steps, and automated approval gates in the set of post steps.

The following example shows both an automated approval in the form of a ShellStep, and a manual approval in the form of a ManualApprovalStep added to the pipeline. Both must pass in order to promote from the PreProd to the Prod environment:

var pipeline codePipeline

preprod := NewMyApplicationStage(this, jsii.String("PreProd"))
prod := NewMyApplicationStage(this, jsii.String("Prod"))

pipeline.AddStage(preprod, &AddStageOpts{
	Post: []step{
		pipelines.NewShellStep(jsii.String("Validate Endpoint"), &ShellStepProps{
			Commands: []*string{
				jsii.String("curl -Ssf https://my.webservice.com/"),
			},
		}),
	},
})
pipeline.AddStage(prod, &AddStageOpts{
	Pre: []*step{
		pipelines.NewManualApprovalStep(jsii.String("PromoteToProd")),
	},
})

You can also specify steps to be executed at the stack level. To achieve this, you can specify the stack and step via the stackSteps property:

var pipeline codePipeline
type myStacksStage struct {
	stage
	stack1 *stack
	stack2 *stack
}

func newMyStacksStage(scope construct, id *string, props stageProps) *myStacksStage {
	this := &myStacksStage{}
	newStage_Override(this, scope, id, props)
	this.stack1 = awscdk.Newstack(this, jsii.String("stack1"))
	this.stack2 = awscdk.Newstack(this, jsii.String("stack2"))
	return this
}
prod := NewMyStacksStage(this, jsii.String("Prod"))

pipeline.AddStage(prod, &AddStageOpts{
	StackSteps: []stackSteps{
		&stackSteps{
			Stack: prod.stack1,
			Pre: []step{
				pipelines.NewManualApprovalStep(jsii.String("Pre-Stack Check")),
			},
			 // Executed before stack is prepared
			ChangeSet: []*step{
				pipelines.NewManualApprovalStep(jsii.String("ChangeSet Approval")),
			},
			 // Executed after stack is prepared but before the stack is deployed
			Post: []*step{
				pipelines.NewManualApprovalStep(jsii.String("Post-Deploy Check")),
			},
		},
		&stackSteps{
			Stack: prod.stack2,
			Post: []*step{
				pipelines.NewManualApprovalStep(jsii.String("Post-Deploy Check")),
			},
		},
	},
})

If you specify multiple steps, they will execute in parallel by default. You can add dependencies between them to if you wish to specify an order. To add a dependency, call step.addStepDependency():

firstStep := pipelines.NewManualApprovalStep(jsii.String("A"))
secondStep := pipelines.NewManualApprovalStep(jsii.String("B"))
secondStep.AddStepDependency(firstStep)

For convenience, Step.sequence() will take an array of steps and dependencies between adjacent steps, so that the whole list executes in order:

// Step A will depend on step B and step B will depend on step C
orderedSteps := pipelines.Step_Sequence([]step{
	pipelines.NewManualApprovalStep(jsii.String("A")),
	pipelines.NewManualApprovalStep(jsii.String("B")),
	pipelines.NewManualApprovalStep(jsii.String("C")),
})
Using CloudFormation Stack Outputs in approvals

Because many CloudFormation deployments result in the generation of resources with unpredictable names, validations have support for reading back CloudFormation Outputs after a deployment. This makes it possible to pass (for example) the generated URL of a load balancer to the test set.

To use Stack Outputs, expose the CfnOutput object you're interested in, and pass it to envFromCfnOutputs of the ShellStep:

var pipeline codePipeline
type myOutputStage struct {
	stage
	loadBalancerAddress cfnOutput
}

func newMyOutputStage(scope construct, id *string, props stageProps) *myOutputStage {
	this := &myOutputStage{}
	newStage_Override(this, scope, id, props)
	this.loadBalancerAddress = awscdk.NewCfnOutput(this, jsii.String("Output"), &CfnOutputProps{
		Value: jsii.String("value"),
	})
	return this
}

lbApp := NewMyOutputStage(this, jsii.String("MyApp"))
pipeline.AddStage(lbApp, &AddStageOpts{
	Post: []step{
		pipelines.NewShellStep(jsii.String("HitEndpoint"), &ShellStepProps{
			EnvFromCfnOutputs: map[string]*cfnOutput{
				// Make the load balancer address available as $URL inside the commands
				"URL": lbApp.loadBalancerAddress,
			},
			Commands: []*string{
				jsii.String("curl -Ssf $URL"),
			},
		}),
	},
})
Running scripts compiled during the synth step

As part of a validation, you probably want to run a test suite that's more elaborate than what can be expressed in a couple of lines of shell script. You can bring additional files into the shell script validation by supplying the input or additionalInputs property of ShellStep. The input can be produced by the Synth step, or come from a source or any other build step.

Here's an example that captures an additional output directory in the synth step and runs tests from there:

var synth shellStep

stage := NewMyApplicationStage(this, jsii.String("MyApplication"))
pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: Synth,
})

pipeline.AddStage(stage, &AddStageOpts{
	Post: []step{
		pipelines.NewShellStep(jsii.String("Approve"), &ShellStepProps{
			// Use the contents of the 'integ' directory from the synth step as the input
			Input: synth.AddOutputDirectory(jsii.String("integ")),
			Commands: []*string{
				jsii.String("cd integ && ./run.sh"),
			},
		}),
	},
})
Customizing CodeBuild Projects

CDK pipelines will generate CodeBuild projects for each ShellStep you use, and it will also generate CodeBuild projects to publish assets and perform the self-mutation of the pipeline. To control the various aspects of the CodeBuild projects that get generated, use a CodeBuildStep instead of a ShellStep. This class has a number of properties that allow you to customize various aspects of the projects:

var vpc vpc
var mySecurityGroup securityGroup

pipelines.NewCodeBuildStep(jsii.String("Synth"), &CodeBuildStepProps{
	// ...standard ShellStep props...
	Commands: []*string{
	},
	Env: map[string]interface{}{
	},

	// If you are using a CodeBuildStep explicitly, set the 'cdk.out' directory
	// to be the synth step's output.
	PrimaryOutputDirectory: jsii.String("cdk.out"),

	// Control the name of the project
	ProjectName: jsii.String("MyProject"),

	// Control parts of the BuildSpec other than the regular 'build' and 'install' commands
	PartialBuildSpec: codebuild.BuildSpec_FromObject(map[string]interface{}{
		"version": jsii.String("0.2"),
	}),

	// Control the build environment
	BuildEnvironment: &BuildEnvironment{
		ComputeType: codebuild.ComputeType_LARGE,
		Privileged: jsii.Boolean(true),
	},
	Timeout: awscdk.Duration_Minutes(jsii.Number(90)),
	FileSystemLocations: []iFileSystemLocation{
		codebuild.FileSystemLocation_Efs(&EfsFileSystemLocationProps{
			Identifier: jsii.String("myidentifier2"),
			Location: jsii.String("myclodation.mydnsroot.com:/loc"),
			MountPoint: jsii.String("/media"),
			MountOptions: jsii.String("opts"),
		}),
	},

	// Control Elastic Network Interface creation
	Vpc: vpc,
	SubnetSelection: &SubnetSelection{
		SubnetType: ec2.SubnetType_PRIVATE_WITH_EGRESS,
	},
	SecurityGroups: []iSecurityGroup{
		mySecurityGroup,
	},

	// Control caching
	Cache: codebuild.Cache_Bucket(s3.NewBucket(this, jsii.String("Cache"))),

	// Additional policy statements for the execution role
	RolePolicyStatements: []policyStatement{
		iam.NewPolicyStatement(&PolicyStatementProps{
		}),
	},
})

You can also configure defaults for all CodeBuild projects by passing codeBuildDefaults, or just for the synth, asset publishing, and self-mutation projects by passing synthCodeBuildDefaults, assetPublishingCodeBuildDefaults, or selfMutationCodeBuildDefaults:

import "github.com/aws/aws-cdk-go/awscdk"

var vpc vpc
var mySecurityGroup securityGroup


pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	// Standard CodePipeline properties
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Defaults for all CodeBuild projects
	CodeBuildDefaults: &CodeBuildOptions{
		// Prepend commands and configuration to all projects
		PartialBuildSpec: codebuild.BuildSpec_FromObject(map[string]interface{}{
			"version": jsii.String("0.2"),
		}),

		// Control the build environment
		BuildEnvironment: &BuildEnvironment{
			ComputeType: codebuild.ComputeType_LARGE,
		},

		// Control Elastic Network Interface creation
		Vpc: vpc,
		SubnetSelection: &SubnetSelection{
			SubnetType: ec2.SubnetType_PRIVATE_WITH_EGRESS,
		},
		SecurityGroups: []iSecurityGroup{
			mySecurityGroup,
		},

		// Additional policy statements for the execution role
		RolePolicy: []policyStatement{
			iam.NewPolicyStatement(&PolicyStatementProps{
			}),
		},

		// Information about logs
		Logging: &LoggingOptions{
			CloudWatch: &CloudWatchLoggingOptions{
				LogGroup: awscdk.Aws_logs.NewLogGroup(this, jsii.String("MyLogGroup")),
			},
			S3: &S3LoggingOptions{
				Bucket: s3.NewBucket(this, jsii.String("LogBucket")),
			},
		},
	},

	SynthCodeBuildDefaults: &CodeBuildOptions{
	},
	AssetPublishingCodeBuildDefaults: &CodeBuildOptions{
	},
	SelfMutationCodeBuildDefaults: &CodeBuildOptions{
	},
})
Arbitrary CodePipeline actions

If you want to add a type of CodePipeline action to the CDK Pipeline that doesn't have a matching class yet, you can define your own step class that extends Step and implements ICodePipelineActionFactory.

Here's an example that adds a Jenkins step:

type myJenkinsStep struct {
	step
}

func newMyJenkinsStep(provider jenkinsProvider, input fileSet) *myJenkinsStep {
	this := &myJenkinsStep{}
	pipelines.NewStep_Override(this, jsii.String("MyJenkinsStep"))

	// This is necessary if your step accepts parameters, like environment variables,
	// that may contain outputs from other steps. It doesn't matter what the
	// structure is, as long as it contains the values that may contain outputs.
	this.DiscoverReferencedOutputs(map[string]map[string]interface{}{
		"env": map[string]interface{}{
		},
	})
	return this
}

func (this *myJenkinsStep) produceAction(stage iStage, options produceActionOptions) codePipelineActionFactoryResult {

	// This is where you control what type of Action gets added to the
	// CodePipeline
	*stage.AddAction(cpactions.NewJenkinsAction(&JenkinsActionProps{
		// Copy 'actionName' and 'runOrder' from the options
		ActionName: options.ActionName,
		RunOrder: options.RunOrder,

		// Jenkins-specific configuration
		Type: cpactions.JenkinsActionType_TEST,
		JenkinsProvider: this.provider,
		ProjectName: jsii.String("MyJenkinsProject"),

		// Translate the FileSet into a codepipeline.Artifact
		Inputs: []artifact{
			options.Artifacts.ToCodePipeline(this.input),
		},
	}))

	return &codePipelineActionFactoryResult{
		RunOrdersConsumed: jsii.Number(1),
	}
}

Another example, adding a lambda step referencing outputs from a stack:

type myLambdaStep struct {
	step
	stackOutputReference stackOutputReference
}

func newMyLambdaStep(fn function, stackOutput cfnOutput) *myLambdaStep {
	this := &myLambdaStep{}
	pipelines.NewStep_Override(this, jsii.String("MyLambdaStep"))
	this.stackOutputReference = pipelines.stackOutputReference_FromCfnOutput(stackOutput)
	return this
}

func (this *myLambdaStep) produceAction(stage iStage, options produceActionOptions) codePipelineActionFactoryResult {

	*stage.AddAction(cpactions.NewLambdaInvokeAction(&LambdaInvokeActionProps{
		ActionName: options.ActionName,
		RunOrder: options.RunOrder,
		// Map the reference to the variable name the CDK has generated for you.
		UserParameters: map[string]interface{}{
			"stackOutput": options.stackOutputsMap.toCodePipeline(this.stackOutputReference),
		},
		Lambda: this.fn,
	}))

	return &codePipelineActionFactoryResult{
		RunOrdersConsumed: jsii.Number(1),
	}
}public get consumedStackOutputs(): pipelines.StackOutputReference[] {
    return [this.stackOutputReference];
  }
Using an existing AWS Codepipeline

If you wish to use an existing CodePipeline.Pipeline while using the modern API's methods and classes, you can pass in the existing CodePipeline.Pipeline to be built upon instead of having the pipelines.CodePipeline construct create a new CodePipeline.Pipeline. This also gives you more direct control over the underlying CodePipeline.Pipeline construct if the way the modern API creates it doesn't allow for desired configurations. Use CodePipelineFileset to convert CodePipeline artifacts into CDK Pipelines file sets, that can be used everywhere a file set or file set producer is expected.

Here's an example of passing in an existing pipeline and using a source that's already in the pipeline:

var codePipeline pipeline


sourceArtifact := codepipeline.NewArtifact(jsii.String("MySourceArtifact"))

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	CodePipeline: codePipeline,
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineFileSet_FromArtifact(sourceArtifact),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

If your existing pipeline already provides a synth step, pass the existing artifact in place of the synth step:

var codePipeline pipeline


buildArtifact := codepipeline.NewArtifact(jsii.String("MyBuildArtifact"))

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	CodePipeline: codePipeline,
	Synth: pipelines.CodePipelineFileSet_FromArtifact(buildArtifact),
})

Note that if you provide an existing pipeline, you cannot provide values for pipelineName, crossAccountKeys, reuseCrossRegionSupportStacks, or role because those values are passed in directly to the underlying codepipeline.Pipeline.

Using Docker in the pipeline

Docker can be used in 3 different places in the pipeline:

  • If you are using Docker image assets in your application stages: Docker will run in the asset publishing projects.
  • If you are using Docker image assets in your stack (for example as images for your CodeBuild projects): Docker will run in the self-mutate project.
  • If you are using Docker to bundle file assets anywhere in your project (for example, if you are using such construct libraries as aws-cdk-lib/aws-lambda-nodejs): Docker will run in the synth project.

For the first case, you don't need to do anything special. For the other two cases, you need to make sure that privileged mode is enabled on the correct CodeBuild projects, so that Docker can run correctly. The follow sections describe how to do that.

You may also need to authenticate to Docker registries to avoid being throttled. See the section Authenticating to Docker registries below for information on how to do that.

Using Docker image assets in the pipeline

If your PipelineStack is using Docker image assets (as opposed to the application stacks the pipeline is deploying), for example by the use of LinuxBuildImage.fromAsset(), you need to pass dockerEnabledForSelfMutation: true to the pipeline. For example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the pipeline uses Docker image assets
	DockerEnabledForSelfMutation: jsii.Boolean(true),
})

pipeline.AddWave(jsii.String("MyWave"), &WaveOptions{
	Post: []step{
		pipelines.NewCodeBuildStep(jsii.String("RunApproval"), &CodeBuildStepProps{
			Commands: []*string{
				jsii.String("command-from-image"),
			},
			BuildEnvironment: &BuildEnvironment{
				// The user of a Docker image asset in the pipeline requires turning on
				// 'dockerEnabledForSelfMutation'.
				BuildImage: codebuild.LinuxBuildImage_FromAsset(this, jsii.String("Image"), &DockerImageAssetProps{
					Directory: jsii.String("./docker-image"),
				}),
			},
		}),
	},
})

Important: You must turn on the dockerEnabledForSelfMutation flag, commit and allow the pipeline to self-update before adding the actual Docker asset.

Using bundled file assets

If you are using asset bundling anywhere (such as automatically done for you if you add a construct like aws-cdk-lib/aws-lambda-nodejs), you need to pass dockerEnabledForSynth: true to the pipeline. For example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the application uses bundled file assets
	DockerEnabledForSynth: jsii.Boolean(true),
})

Important: You must turn on the dockerEnabledForSynth flag, commit and allow the pipeline to self-update before adding the actual Docker asset.

Authenticating to Docker registries

You can specify credentials to use for authenticating to Docker registries as part of the pipeline definition. This can be useful if any Docker image assets — in the pipeline or any of the application stages — require authentication, either due to being in a different environment (e.g., ECR repo) or to avoid throttling (e.g., DockerHub).

dockerHubSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("DHSecret"), jsii.String("arn:aws:..."))
customRegSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("CRSecret"), jsii.String("arn:aws:..."))
repo1 := ecr.Repository_FromRepositoryArn(this, jsii.String("Repo"), jsii.String("arn:aws:ecr:eu-west-1:0123456789012:repository/Repo1"))
repo2 := ecr.Repository_FromRepositoryArn(this, jsii.String("Repo"), jsii.String("arn:aws:ecr:eu-west-1:0123456789012:repository/Repo2"))

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	DockerCredentials: []dockerCredential{
		pipelines.*dockerCredential_DockerHub(dockerHubSecret),
		pipelines.*dockerCredential_CustomRegistry(jsii.String("dockerregistry.example.com"), customRegSecret),
		pipelines.*dockerCredential_Ecr([]iRepository{
			repo1,
			repo2,
		}),
	},
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

For authenticating to Docker registries that require a username and password combination (like DockerHub), create a Secrets Manager Secret with fields named username and secret, and import it (the field names change be customized).

Authentication to ECR repositories is done using the execution role of the relevant CodeBuild job. Both types of credentials can be provided with an optional role to assume before requesting the credentials.

By default, the Docker credentials provided to the pipeline will be available to the Synth, Self-Update, and Asset Publishing actions within the *pipeline. The scope of the credentials can be limited via the DockerCredentialUsage option.

dockerHubSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("DHSecret"), jsii.String("arn:aws:..."))
// Only the image asset publishing actions will be granted read access to the secret.
creds := pipelines.DockerCredential_DockerHub(dockerHubSecret, &ExternalDockerCredentialOptions{
	Usages: []dockerCredentialUsage{
		pipelines.*dockerCredentialUsage_ASSET_PUBLISHING,
	},
})

CDK Environment Bootstrapping

An environment is an (account, region) pair where you want to deploy a CDK stack (see Environments in the CDK Developer Guide). In a Continuous Deployment pipeline, there are at least two environments involved: the environment where the pipeline is provisioned, and the environment where you want to deploy the application (or different stages of the application). These can be the same, though best practices recommend you isolate your different application stages from each other in different AWS accounts or regions.

Before you can provision the pipeline, you have to bootstrap the environment you want to create it in. If you are deploying your application to different environments, you also have to bootstrap those and be sure to add a trust relationship.

After you have bootstrapped an environment and created a pipeline that deploys to it, it's important that you don't delete the stack or change its Qualifier, or future deployments to this environment will fail. If you want to upgrade the bootstrap stack to a newer version, do that by updating it in-place.

This library requires the modern bootstrapping stack which has been updated specifically to support cross-account continuous delivery.

If you are using CDKv2, you do not need to do anything else. Modern bootstrapping and modern stack synthesis (also known as "default stack synthesis") is the default.

If you are using CDKv1, you need to opt in to modern bootstrapping and modern stack synthesis using a feature flag. Make sure cdk.json includes:

{
  "context": {
    "@aws-cdk/core:newStyleStackSynthesis": true
  }
}

And be sure to run cdk bootstrap in the same directory as the cdk.json file.

To bootstrap an environment for provisioning the pipeline:

$ npx cdk bootstrap \
    [--profile admin-profile-1] \
    --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \
    aws://111111111111/us-east-1

To bootstrap a different environment for deploying CDK applications into using a pipeline in account 111111111111:

$ npx cdk bootstrap \
    [--profile admin-profile-2] \
    --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \
    --trust 11111111111 \
    aws://222222222222/us-east-2

If you only want to trust an account to do lookups (e.g, when your CDK application has a Vpc.fromLookup() call), use the option --trust-for-lookup:

$ npx cdk bootstrap \
    [--profile admin-profile-2] \
    --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \
    --trust-for-lookup 11111111111 \
    aws://222222222222/us-east-2

These command lines explained:

  • npx: means to use the CDK CLI from the current NPM install. If you are using a global install of the CDK CLI, leave this out.
  • --profile: should indicate a profile with administrator privileges that has permissions to provision a pipeline in the indicated account. You can leave this flag out if either the AWS default credentials or the AWS_* environment variables confer these permissions.
  • --cloudformation-execution-policies: ARN of the managed policy that future CDK deployments should execute with. By default this is AdministratorAccess, but if you also specify the --trust flag to give another Account permissions to deploy into the current account, you must specify a value here.
  • --trust: indicates which other account(s) should have permissions to deploy CDK applications into this account. In this case we indicate the Pipeline's account, but you could also use this for developer accounts (don't do that for production application accounts though!).
  • --trust-for-lookup: gives a more limited set of permissions to the trusted account, only allowing it to look up values such as availability zones, EC2 images and VPCs. --trust-for-lookup does not give permissions to modify anything in the account. Note that --trust implies --trust-for-lookup, so you don't need to specify the same account twice.
  • aws://222222222222/us-east-2: the account and region we're bootstrapping.

Be aware that anyone who has access to the trusted Accounts effectively has all permissions conferred by the configured CloudFormation execution policies, allowing them to do things like read arbitrary S3 buckets and create arbitrary infrastructure in the bootstrapped account. Restrict the list of --trusted Accounts, or restrict the policies configured by --cloudformation-execution-policies.


Security tip: we recommend that you use administrative credentials to an account only to bootstrap it and provision the initial pipeline. Otherwise, access to administrative credentials should be dropped as soon as possible.


On the use of AdministratorAccess: The use of the AdministratorAccess policy ensures that your pipeline can deploy every type of AWS resource to your account. Make sure you trust all the code and dependencies that make up your CDK app. Check with the appropriate department within your organization to decide on the proper policy to use.

If your policy includes permissions to create on attach permission to a role, developers can escalate their privilege with more permissive permission. Thus, we recommend implementing permissions boundary in the CDK Execution role. To do this, you can bootstrap with the --template option with a customized template that contains a permission boundary.

Migrating from old bootstrap stack

The bootstrap stack is a CloudFormation stack in your account named CDKToolkit that provisions a set of resources required for the CDK to deploy into that environment.

The "new" bootstrap stack (obtained by running cdk bootstrap with CDK_NEW_BOOTSTRAP=1) is slightly more elaborate than the "old" stack. It contains:

  • An S3 bucket and ECR repository with predictable names, so that we can reference assets in these storage locations without the use of CloudFormation template parameters.
  • A set of roles with permissions to access these asset locations and to execute CloudFormation, assumable from whatever accounts you specify under --trust.

It is possible and safe to migrate from the old bootstrap stack to the new bootstrap stack. This will create a new S3 file asset bucket in your account and orphan the old bucket. You should manually delete the orphaned bucket after you are sure you have redeployed all CDK applications and there are no more references to the old asset bucket.

Considerations around Running at Scale

If you are planning to run pipelines for more than a hundred repos deploying across multiple regions, then you will want to consider reusing both artifacts buckets and cross-region replication buckets.

In a situation like this, you will want to have a separate CDK app / dedicated repo which creates and managed the buckets which will be shared by the pipelines of all your other apps. Note that this app must NOT be using the shared buckets because of chicken & egg issues.

The following code assumes you have created and are managing your buckets in the aforementioned separate cdk repo and are just importing them for use in one of your (many) pipelines.

var sharedXRegionUsWest1BucketArn string
var sharedXRegionUsWest1KeyArn string

var sharedXRegionUsWest2BucketArn string
var sharedXRegionUsWest2KeyArn string


usWest1Bucket := s3.Bucket_FromBucketAttributes(*scope, jsii.String("UsEast1Bucket"), &BucketAttributes{
	BucketArn: sharedXRegionUsWest1BucketArn,
	EncryptionKey: kms.Key_FromKeyArn(scope, jsii.String("UsEast1BucketKeyArn"), sharedXRegionUsWest1BucketArn),
})

usWest2Bucket := s3.Bucket_FromBucketAttributes(*scope, jsii.String("UsWest2Bucket"), &BucketAttributes{
	BucketArn: sharedXRegionUsWest2BucketArn,
	EncryptionKey: kms.Key_*FromKeyArn(scope, jsii.String("UsWest2BucketKeyArn"), sharedXRegionUsWest2KeyArn),
})

crossRegionReplicationBuckets := map[string]iBucket{
	"us-west-1": usWest1Bucket,
	"us-west-2": usWest2Bucket,
}

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
	 // Use shared buckets.
	CrossRegionReplicationBuckets: CrossRegionReplicationBuckets,
})

Context Lookups

You might be using CDK constructs that need to look up runtime context, which is information from the target AWS Account and Region the CDK needs to synthesize CloudFormation templates appropriate for that environment. Examples of this kind of context lookups are the number of Availability Zones available to you, a Route53 Hosted Zone ID, or the ID of an AMI in a given region. This information is automatically looked up when you run cdk synth.

By default, a cdk synth performed in a pipeline will not have permissions to perform these lookups, and the lookups will fail. This is by design.

Our recommended way of using lookups is by running cdk synth on the developer workstation and checking in the cdk.context.json file, which contains the results of the context lookups. This will make sure your synthesized infrastructure is consistent and repeatable. If you do not commit cdk.context.json, the results of the lookups may suddenly be different in unexpected ways, and even produce results that cannot be deployed or will cause data loss. To give an account permissions to perform lookups against an environment, without being able to deploy to it and make changes, run cdk bootstrap --trust-for-lookup=<account>.

If you want to use lookups directly from the pipeline, you either need to accept the risk of nondeterminism, or make sure you save and load the cdk.context.json file somewhere between synth runs. Finally, you should give the synth CodeBuild execution role permissions to assume the bootstrapped lookup roles. As an example, doing so would look like this:

pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewCodeBuildStep(jsii.String("Synth"), &CodeBuildStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("..."),
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
			jsii.String("..."),
		},
		RolePolicyStatements: []policyStatement{
			iam.NewPolicyStatement(&PolicyStatementProps{
				Actions: []*string{
					jsii.String("sts:AssumeRole"),
				},
				Resources: []*string{
					jsii.String("*"),
				},
				Conditions: map[string]interface{}{
					"StringEquals": map[string]*string{
						"iam:ResourceTag/aws-cdk:bootstrap-role": jsii.String("lookup"),
					},
				},
			}),
		},
	}),
})

The above example requires that the target environments have all been bootstrapped with bootstrap stack version 8, released with CDK CLI 1.114.0.

Security Considerations

It's important to stay safe while employing Continuous Delivery. The CDK Pipelines library comes with secure defaults to the best of our ability, but by its very nature the library cannot take care of everything.

We therefore expect you to mind the following:

  • Maintain dependency hygiene and vet 3rd-party software you use. Any software you run on your build machine has the ability to change the infrastructure that gets deployed. Be careful with the software you depend on.
  • Use dependency locking to prevent accidental upgrades! The default CdkSynths that come with CDK Pipelines will expect package-lock.json and yarn.lock to ensure your dependencies are the ones you expect.
  • CDK Pipelines runs on resources created in your own account, and the configuration of those resources is controlled by developers submitting code through the pipeline. Therefore, CDK Pipelines by itself cannot protect against malicious developers trying to bypass compliance checks. If your threat model includes developers writing CDK code, you should have external compliance mechanisms in place like AWS CloudFormation Hooks (preventive) or AWS Config (reactive) that the CloudFormation Execution Role does not have permissions to disable.
  • Credentials to production environments should be short-lived. After bootstrapping and the initial pipeline provisioning, there is no more need for developers to have access to any of the account credentials; all further changes can be deployed through git. Avoid the chances of credentials leaking by not having them in the first place!
Confirm permissions broadening

To keep tabs on the security impact of changes going out through your pipeline, you can insert a security check before any stage deployment. This security check will check if the upcoming deployment would add any new IAM permissions or security group rules, and if so pause the pipeline and require you to confirm the changes.

The security check will appear as two distinct actions in your pipeline: first a CodeBuild project that runs cdk diff on the stage that's about to be deployed, followed by a Manual Approval action that pauses the pipeline. If it so happens that there no new IAM permissions or security group rules will be added by the deployment, the manual approval step is automatically satisfied. The pipeline will look like this:

Pipeline
├── ...
├── MyApplicationStage
│    ├── MyApplicationSecurityCheck       // Security Diff Action
│    ├── MyApplicationManualApproval      // Manual Approval Action
│    ├── Stack.Prepare
│    └── Stack.Deploy
└── ...

You can insert the security check by using a ConfirmPermissionsBroadening step:

var pipeline codePipeline

stage := NewMyApplicationStage(this, jsii.String("MyApplication"))
pipeline.AddStage(stage, &AddStageOpts{
	Pre: []step{
		pipelines.NewConfirmPermissionsBroadening(jsii.String("Check"), &PermissionsBroadeningCheckProps{
			Stage: *Stage,
		}),
	},
})

To get notified when there is a change that needs your manual approval, create an SNS Topic, subscribe your own email address, and pass it in as as the notificationTopic property:

var pipeline codePipeline

topic := sns.NewTopic(this, jsii.String("SecurityChangesTopic"))
topic.AddSubscription(subscriptions.NewEmailSubscription(jsii.String("test@email.com")))

stage := NewMyApplicationStage(this, jsii.String("MyApplication"))
pipeline.AddStage(stage, &AddStageOpts{
	Pre: []step{
		pipelines.NewConfirmPermissionsBroadening(jsii.String("Check"), &PermissionsBroadeningCheckProps{
			Stage: *Stage,
			NotificationTopic: topic,
		}),
	},
})

Note: Manual Approvals notifications only apply when an application has security check enabled.

Using a different deployment engine

CDK Pipelines supports multiple deployment engines, but this module vends a construct for only one such engine: AWS CodePipeline. It is also possible to use CDK Pipelines to build pipelines backed by other deployment engines.

Here is a list of CDK Libraries that integrate CDK Pipelines with alternative deployment engines:

Troubleshooting

Here are some common errors you may encounter while using this library.

Pipeline: Internal Failure

If you see the following error during deployment of your pipeline:

CREATE_FAILED  | AWS::CodePipeline::Pipeline | Pipeline/Pipeline
Internal Failure

There's something wrong with your GitHub access token. It might be missing, or not have the right permissions to access the repository you're trying to access.

Key: Policy contains a statement with one or more invalid principals

If you see the following error during deployment of your pipeline:

CREATE_FAILED | AWS::KMS::Key | Pipeline/Pipeline/ArtifactsBucketEncryptionKey
Policy contains a statement with one or more invalid principals.

One of the target (account, region) environments has not been bootstrapped with the new bootstrap stack. Check your target environments and make sure they are all bootstrapped.

Message: no matching base directory path found for cdk.out

If you see this error during the Synth step, it means that CodeBuild is expecting to find a cdk.out directory in the root of your CodeBuild project, but the directory wasn't there. There are two common causes for this:

  • cdk synth is not being executed: cdk synth used to be run implicitly for you, but you now have to explicitly include the command. For NPM-based projects, add npx cdk synth to the end of the commands property, for other languages add npm install -g aws-cdk and cdk synth.
  • Your CDK project lives in a subdirectory: you added a cd <somedirectory> command to the list of commands; don't forget to tell the ScriptStep about the different location of cdk.out, by passing primaryOutputDirectory: '<somedirectory>/cdk.out'.
is in ROLLBACK_COMPLETE state and can not be updated

If you see the following error during execution of your pipeline:

Stack ... is in ROLLBACK_COMPLETE state and can not be updated. (Service:
AmazonCloudFormation; Status Code: 400; Error Code: ValidationError; Request
ID: ...)

The stack failed its previous deployment, and is in a non-retryable state. Go into the CloudFormation console, delete the stack, and retry the deployment.

Cannot find module 'xxxx' or its corresponding type declarations

You may see this if you are using TypeScript or other NPM-based languages, when using NPM 7 on your workstation (where you generate package-lock.json) and NPM 6 on the CodeBuild image used for synthesizing.

It looks like NPM 7 has started writing less information to package-lock.json, leading NPM 6 reading that same file to not install all required packages anymore.

Make sure you are using the same NPM version everywhere, either downgrade your workstation's version or upgrade the CodeBuild version.

Cannot find module '.../check-node-version.js' (MODULE_NOT_FOUND)

The above error may be produced by npx when executing the CDK CLI, or any project that uses the AWS SDK for JavaScript, without the target application having been installed yet. For example, it can be triggered by npx cdk synth if aws-cdk is not in your package.json.

Work around this by either installing the target application using NPM before running npx, or set the environment variable NPM_CONFIG_UNSAFE_PERM=true.

Cannot connect to the Docker daemon at unix:///var/run/docker.sock

If, in the 'Synth' action (inside the 'Build' stage) of your pipeline, you get an error like this:

stderr: docker: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?.
See 'docker run --help'.

It means that the AWS CodeBuild project for 'Synth' is not configured to run in privileged mode, which prevents Docker builds from happening. This typically happens if you use a CDK construct that bundles asset using tools run via Docker, like aws-lambda-nodejs, aws-lambda-python, aws-lambda-go and others.

Make sure you set the privileged environment variable to true in the synth definition:

sourceArtifact := codepipeline.NewArtifact()
cloudAssemblyArtifact := codepipeline.NewArtifact()
pipeline := pipelines.NewCdkPipeline(this, jsii.String("MyPipeline"), &cdkPipelineProps{
	cloudAssemblyArtifact: cloudAssemblyArtifact,
	synthAction: pipelines.simpleSynthAction_StandardNpmSynth(&standardNpmSynthOptions{
		sourceArtifact: sourceArtifact,
		cloudAssemblyArtifact: cloudAssemblyArtifact,
		environment: &BuildEnvironment{
			Privileged: jsii.Boolean(true),
		},
	}),
})

After turning on privilegedMode: true, you will need to do a one-time manual cdk deploy of your pipeline to get it going again (as with a broken 'synth' the pipeline will not be able to self update to the right state).

Not authorized to perform sts:AssumeRole on arn:aws:iam:::role/-lookup-role-*

You may get an error like the following in the Synth step:

Could not assume role in target account using current credentials (which are for account 111111111111). User:
arn:aws:sts::111111111111:assumed-role/PipelineStack-PipelineBuildSynthCdkBuildProje-..../AWSCodeBuild-....
is not authorized to perform: sts:AssumeRole on resource:
arn:aws:iam::222222222222:role/cdk-hnb659fds-lookup-role-222222222222-us-east-1.
Please make sure that this role exists in the account. If it doesn't exist, (re)-bootstrap the environment with
the right '--trust', using the latest version of the CDK CLI.

This is a sign that the CLI is trying to do Context Lookups during the Synth step, which are failing because it cannot assume the right role. We recommend you don't rely on Context Lookups in the pipeline at all, and commit a file called cdk.context.json with the right lookup values in it to source control.

If you do want to do lookups in the pipeline, the cause is one of the following:

  • The target environment has not been bootstrapped; OR
  • The target environment has been bootstrapped without the right --trust relationship; OR
  • The CodeBuild execution role does not have permissions to call sts:AssumeRole.

See the section called Context Lookups for more information on using this feature.

IAM policies: Cannot exceed quota for PoliciesPerRole / Maximum policy size exceeded

This happens as a result of having a lot of targets in the Pipeline: the IAM policies that get generated enumerate all required roles and grow too large.

Make sure you are on version 2.26.0 or higher, and that your cdk.json contains the following:

{
  "context": {
    "aws-cdk-lib/aws-iam:minimizePolicies": true
  }
}
S3 error: Access Denied

An "S3 Access Denied" error can have two causes:

  • Asset hashes have changed, but self-mutation has been disabled in the pipeline.
  • You have deleted and recreated the bootstrap stack, or changed its qualifier.
Self-mutation step has been removed

Some constructs, such as EKS clusters, generate nested stacks. When CloudFormation tries to deploy those stacks, it may fail with this error:

S3 error: Access Denied For more information check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html

This happens because the pipeline is not self-mutating and, as a consequence, the FileAssetX build projects get out-of-sync with the generated templates. To fix this, make sure the selfMutating property is set to true:

cloudAssemblyArtifact := codepipeline.NewArtifact()
pipeline := pipelines.NewCdkPipeline(this, jsii.String("MyPipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(true),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})
Bootstrap roles have been renamed or recreated

While attempting to deploy an application stage, the "Prepare" or "Deploy" stage may fail with a cryptic error like:

Action execution failed Access Denied (Service: Amazon S3; Status Code: 403; Error Code: AccessDenied; Request ID: 0123456ABCDEFGH; S3 Extended Request ID: 3hWcrVkhFGxfiMb/rTJO0Bk7Qn95x5ll4gyHiFsX6Pmk/NT+uX9+Z1moEcfkL7H3cjH7sWZfeD0=; Proxy: null)

This generally indicates that the roles necessary to deploy have been deleted (or deleted and re-created); for example, if the bootstrap stack has been deleted and re-created, this scenario will happen. Under the hood, the resources that rely on these roles (e.g., cdk-$qualifier-deploy-role-$account-$region) point to different canonical IDs than the recreated versions of these roles, which causes the errors. There are no simple solutions to this issue, and for that reason we strongly recommend that bootstrap stacks not be deleted and re-created once created.

The most automated way to solve the issue is to introduce a secondary bootstrap stack. By changing the qualifier that the pipeline stack looks for, a change will be detected and the impacted policies and resources will be updated. A hypothetical recovery workflow would look something like this:

  • First, for all impacted environments, create a secondary bootstrap stack:
$ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \
    --qualifier random1234 \
    --toolkit-stack-name CDKToolkitTemp \
    aws://111111111111/us-east-1
awscdk.Newstack(this, jsii.String("MyStack"), &StackProps{
	// Update this qualifier to match the one used above.
	Synthesizer: cdk.NewDefaultStackSynthesizer(&DefaultStackSynthesizerProps{
		Qualifier: jsii.String("randchars1234"),
	}),
})
  • Deploy the updated stacks. This will update the stacks to use the roles created in the new bootstrap stack.

  • (Optional) Restore back to the original state:

    • Revert the change made in step #2 above
    • Re-deploy the pipeline to use the original qualifier.
    • Delete the temporary bootstrap stack(s)
Manual Alternative

Alternatively, the errors can be resolved by finding each impacted resource and policy, and correcting the policies by replacing the canonical IDs (e.g., AROAYBRETNYCYV6ZF2R93) with the appropriate ARNs. As an example, the KMS encryption key policy for the artifacts bucket may have a statement that looks like the following:

{
  "Effect" : "Allow",
  "Principal" : {
    // "AWS" : "AROAYBRETNYCYV6ZF2R93"  // Indicates this issue; replace this value
    "AWS": "arn:aws:iam::0123456789012:role/cdk-hnb659fds-deploy-role-0123456789012-eu-west-1", // Correct value
  },
  "Action" : [ "kms:Decrypt", "kms:DescribeKey" ],
  "Resource" : "*"
}

Any resource or policy that references the qualifier (hnb659fds by default) will need to be updated.

This CDK CLI is not compatible with the CDK library used by your application

The CDK CLI version used in your pipeline is too old to read the Cloud Assembly produced by your CDK app.

Most likely this happens in the SelfMutate action, you are passing the cliVersion parameter to control the version of the CDK CLI, and you just updated the CDK framework version that your application uses. You either forgot to change the cliVersion parameter, or changed the cliVersion in the same commit in which you changed the framework version. Because a change to the pipeline settings needs a successful run of the SelfMutate step to be applied, the next iteration of the SelfMutate step still executes with the old CLI version, and that old CLI version is not able to read the cloud assembly produced by the new framework version.

Solution: change the cliVersion first, commit, push and deploy, and only then change the framework version.

We recommend you avoid specifying the cliVersion parameter at all. By default the pipeline will use the latest CLI version, which will support all cloud assembly versions.

Using Drop-in Docker Replacements

By default, the AWS CDK will build and publish Docker image assets using the docker command. However, by specifying the CDK_DOCKER environment variable, you can override the command that will be used to build and publish your assets.

In CDK Pipelines, the drop-in replacement for the docker command must be included in the CodeBuild environment and configured for your pipeline.

Adding to the default CodeBuild image

You can add a drop-in Docker replacement command to the default CodeBuild environment by adding install-phase commands that encode how to install your tooling and by adding the CDK_DOCKER environment variable to your build environment.

var source iFileSetProducer // the repository source
var synthCommands []*string // Commands to synthesize your app
var installCommands []*string
// Commands to install your toolchain

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	// Standard CodePipeline properties...
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: synthCommands,
	}),

	// Configure CodeBuild to use a drop-in Docker replacement.
	CodeBuildDefaults: &CodeBuildOptions{
		PartialBuildSpec: codebuild.BuildSpec_FromObject(map[string]interface{}{
			"phases": map[string]map[string][]*string{
				"install": map[string][]*string{
					// Add the shell commands to install your drop-in Docker
					// replacement to the CodeBuild enviromment.
					"commands": installCommands,
				},
			},
		}),
		BuildEnvironment: &BuildEnvironment{
			EnvironmentVariables: map[string]buildEnvironmentVariable{
				// Instruct the AWS CDK to use `drop-in-replacement` instead of
				// `docker` when building / publishing docker images.
				// e.g., `drop-in-replacement build . -f path/to/Dockerfile`
				"CDK_DOCKER": &buildEnvironmentVariable{
					"value": jsii.String("drop-in-replacement"),
				},
			},
		},
	},
})
Using a custom build image

If you're using a custom build image in CodeBuild, you can override the command the AWS CDK uses to build Docker images by providing CDK_DOCKER as an ENV in your Dockerfile or by providing the environment variable in the pipeline as shown below.

var source iFileSetProducer // the repository source
var synthCommands []*string
// Commands to synthesize your app

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	// Standard CodePipeline properties...
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: synthCommands,
	}),

	// Configure CodeBuild to use a drop-in Docker replacement.
	CodeBuildDefaults: &CodeBuildOptions{
		BuildEnvironment: &BuildEnvironment{
			// Provide a custom build image containing your toolchain and the
			// pre-installed replacement for the `docker` command.
			BuildImage: codebuild.LinuxBuildImage_FromDockerRegistry(jsii.String("your-docker-registry")),
			EnvironmentVariables: map[string]buildEnvironmentVariable{
				// If you haven't provided an `ENV` in your Dockerfile that overrides
				// `CDK_DOCKER`, then you must provide the name of the command that
				// the AWS CDK should run instead of `docker` here.
				"CDK_DOCKER": &buildEnvironmentVariable{
					"value": jsii.String("drop-in-replacement"),
				},
			},
		},
	},
})

Known Issues

There are some usability issues that are caused by underlying technology, and cannot be remedied by CDK at this point. They are reproduced here for completeness.

  • Console links to other accounts will not work: the AWS CodePipeline console will assume all links are relative to the current account. You will not be able to use the pipeline console to click through to a CloudFormation stack in a different account.
  • If a change set failed to apply the pipeline must be restarted: if a change set failed to apply, it cannot be retried. The pipeline must be restarted from the top by clicking Release Change.
  • A stack that failed to create must be deleted manually: if a stack failed to create on the first attempt, you must delete it using the CloudFormation console before starting the pipeline again by clicking Release Change.

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func CodeBuildStep_Sequence added in v2.4.0

func CodeBuildStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

func CodePipelineSource_Sequence added in v2.4.0

func CodePipelineSource_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

func CodePipeline_IsConstruct

func CodePipeline_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.

Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.

Returns: true if `x` is an object created from a class which extends `Construct`.

func CodePipeline_IsPipeline added in v2.38.0

func CodePipeline_IsPipeline(x interface{}) *bool

Return whether the given object extends `PipelineBase`.

We do attribute detection since we can't reliably use 'instanceof'.

func ConfirmPermissionsBroadening_Sequence added in v2.4.0

func ConfirmPermissionsBroadening_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

func ManualApprovalStep_Sequence added in v2.4.0

func ManualApprovalStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

func NewArtifactMap_Override

func NewArtifactMap_Override(a ArtifactMap)

func NewCodeBuildStep_Override

func NewCodeBuildStep_Override(c CodeBuildStep, id *string, props *CodeBuildStepProps)

func NewCodePipelineSource_Override

func NewCodePipelineSource_Override(c CodePipelineSource, id *string)

func NewCodePipeline_Override

func NewCodePipeline_Override(c CodePipeline, scope constructs.Construct, id *string, props *CodePipelineProps)

func NewConfirmPermissionsBroadening_Override

func NewConfirmPermissionsBroadening_Override(c ConfirmPermissionsBroadening, id *string, props *PermissionsBroadeningCheckProps)

func NewDockerCredential_Override

func NewDockerCredential_Override(d DockerCredential, usages *[]DockerCredentialUsage)

func NewFileSet_Override

func NewFileSet_Override(f FileSet, id *string, producer Step)

func NewManualApprovalStep_Override

func NewManualApprovalStep_Override(m ManualApprovalStep, id *string, props *ManualApprovalStepProps)

func NewPipelineBase_Override

func NewPipelineBase_Override(p PipelineBase, scope constructs.Construct, id *string, props *PipelineBaseProps)

func NewShellStep_Override

func NewShellStep_Override(s ShellStep, id *string, props *ShellStepProps)

func NewStackOutputsMap_Override added in v2.60.0

func NewStackOutputsMap_Override(s StackOutputsMap, pipeline PipelineBase)

func NewStep_Override

func NewStep_Override(s Step, id *string)

func NewWave_Override

func NewWave_Override(w Wave, id *string, props *WaveProps)

func PipelineBase_IsConstruct

func PipelineBase_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.

Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.

Returns: true if `x` is an object created from a class which extends `Construct`.

func PipelineBase_IsPipeline added in v2.38.0

func PipelineBase_IsPipeline(x interface{}) *bool

Return whether the given object extends `PipelineBase`.

We do attribute detection since we can't reliably use 'instanceof'.

func ShellStep_Sequence added in v2.4.0

func ShellStep_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

func Step_Sequence added in v2.4.0

func Step_Sequence(steps *[]Step) *[]Step

Define a sequence of steps to be executed in order.

If you need more fine-grained step ordering, use the `addStepDependency()` API. For example, if you want `secondStep` to occur after `firstStep`, call `secondStep.addStepDependency(firstStep)`.

Types

type AddStageOpts

type AddStageOpts struct {
	// Additional steps to run after all of the stacks in the stage.
	// Default: - No additional steps.
	//
	Post *[]Step `field:"optional" json:"post" yaml:"post"`
	// Additional steps to run before any of the stacks in the stage.
	// Default: - No additional steps.
	//
	Pre *[]Step `field:"optional" json:"pre" yaml:"pre"`
	// Instructions for stack level steps.
	// Default: - No additional instructions.
	//
	StackSteps *[]*StackSteps `field:"optional" json:"stackSteps" yaml:"stackSteps"`
}

Options to pass to `addStage`.

Example:

var pipeline codePipeline

preprod := NewMyApplicationStage(this, jsii.String("PreProd"))
prod := NewMyApplicationStage(this, jsii.String("Prod"))

pipeline.AddStage(preprod, &AddStageOpts{
	Post: []step{
		pipelines.NewShellStep(jsii.String("Validate Endpoint"), &ShellStepProps{
			Commands: []*string{
				jsii.String("curl -Ssf https://my.webservice.com/"),
			},
		}),
	},
})
pipeline.AddStage(prod, &AddStageOpts{
	Pre: []*step{
		pipelines.NewManualApprovalStep(jsii.String("PromoteToProd")),
	},
})

type ArtifactMap

type ArtifactMap interface {
	// Return the matching CodePipeline artifact for a FileSet.
	ToCodePipeline(x FileSet) awscodepipeline.Artifact
}

Translate FileSets to CodePipeline Artifacts.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

artifactMap := awscdk.Pipelines.NewArtifactMap()

func NewArtifactMap

func NewArtifactMap() ArtifactMap

type AssetType

type AssetType string

Type of the asset that is being published.

const (
	// A file.
	AssetType_FILE AssetType = "FILE"
	// A Docker image.
	AssetType_DOCKER_IMAGE AssetType = "DOCKER_IMAGE"
)

type CodeBuildOptions

type CodeBuildOptions struct {
	// Partial build environment, will be combined with other build environments that apply.
	// Default: - Non-privileged build, SMALL instance, LinuxBuildImage.STANDARD_7_0
	//
	BuildEnvironment *awscodebuild.BuildEnvironment `field:"optional" json:"buildEnvironment" yaml:"buildEnvironment"`
	// Caching strategy to use.
	// Default: - No cache.
	//
	Cache awscodebuild.Cache `field:"optional" json:"cache" yaml:"cache"`
	// ProjectFileSystemLocation objects for CodeBuild build projects.
	//
	// A ProjectFileSystemLocation object specifies the identifier, location, mountOptions, mountPoint,
	// and type of a file system created using Amazon Elastic File System.
	// Requires a vpc to be set and privileged to be set to true.
	// Default: - no file system locations.
	//
	FileSystemLocations *[]awscodebuild.IFileSystemLocation `field:"optional" json:"fileSystemLocations" yaml:"fileSystemLocations"`
	// Information about logs for CodeBuild projects.
	//
	// A CodeBuild project can create logs in Amazon CloudWatch Logs, an S3 bucket, or both.
	// Default: - no log configuration is set.
	//
	Logging *awscodebuild.LoggingOptions `field:"optional" json:"logging" yaml:"logging"`
	// Partial buildspec, will be combined with other buildspecs that apply.
	//
	// The BuildSpec must be available inline--it cannot reference a file
	// on disk.
	// Default: - No initial BuildSpec.
	//
	PartialBuildSpec awscodebuild.BuildSpec `field:"optional" json:"partialBuildSpec" yaml:"partialBuildSpec"`
	// Policy statements to add to role.
	// Default: - No policy statements added to CodeBuild Project Role.
	//
	RolePolicy *[]awsiam.PolicyStatement `field:"optional" json:"rolePolicy" yaml:"rolePolicy"`
	// Which security group(s) to associate with the project network interfaces.
	//
	// Only used if 'vpc' is supplied.
	// Default: - Security group will be automatically created.
	//
	SecurityGroups *[]awsec2.ISecurityGroup `field:"optional" json:"securityGroups" yaml:"securityGroups"`
	// Which subnets to use.
	//
	// Only used if 'vpc' is supplied.
	// Default: - All private subnets.
	//
	SubnetSelection *awsec2.SubnetSelection `field:"optional" json:"subnetSelection" yaml:"subnetSelection"`
	// The number of minutes after which AWS CodeBuild stops the build if it's not complete.
	//
	// For valid values, see the timeoutInMinutes field in the AWS
	// CodeBuild User Guide.
	// Default: Duration.hours(1)
	//
	Timeout awscdk.Duration `field:"optional" json:"timeout" yaml:"timeout"`
	// The VPC where to create the CodeBuild network interfaces in.
	// Default: - No VPC.
	//
	Vpc awsec2.IVpc `field:"optional" json:"vpc" yaml:"vpc"`
}

Options for customizing a single CodeBuild project.

Example:

var source iFileSetProducer // the repository source
var synthCommands []*string // Commands to synthesize your app
var installCommands []*string
// Commands to install your toolchain

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	// Standard CodePipeline properties...
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: synthCommands,
	}),

	// Configure CodeBuild to use a drop-in Docker replacement.
	CodeBuildDefaults: &CodeBuildOptions{
		PartialBuildSpec: codebuild.BuildSpec_FromObject(map[string]interface{}{
			"phases": map[string]map[string][]*string{
				"install": map[string][]*string{
					// Add the shell commands to install your drop-in Docker
					// replacement to the CodeBuild enviromment.
					"commands": installCommands,
				},
			},
		}),
		BuildEnvironment: &BuildEnvironment{
			EnvironmentVariables: map[string]buildEnvironmentVariable{
				// Instruct the AWS CDK to use `drop-in-replacement` instead of
				// `docker` when building / publishing docker images.
				// e.g., `drop-in-replacement build . -f path/to/Dockerfile`
				"CDK_DOCKER": &buildEnvironmentVariable{
					"value": jsii.String("drop-in-replacement"),
				},
			},
		},
	},
})

type CodeBuildStep

type CodeBuildStep interface {
	ShellStep
	// Custom execution role to be used for the Code Build Action.
	// Default: - A role is automatically created.
	//
	ActionRole() awsiam.IRole
	// Build environment.
	// Default: - No value specified at construction time, use defaults.
	//
	BuildEnvironment() *awscodebuild.BuildEnvironment
	// Caching strategy to use.
	// Default: - No cache.
	//
	Cache() awscodebuild.Cache
	// Commands to run.
	Commands() *[]*string
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Environment variables to set.
	// Default: - No environment variables.
	//
	Env() *map[string]*string
	// Set environment variables based on Stack Outputs.
	// Default: - No environment variables created from stack outputs.
	//
	EnvFromCfnOutputs() *map[string]StackOutputReference
	// ProjectFileSystemLocation objects for CodeBuild build projects.
	//
	// A ProjectFileSystemLocation object specifies the identifier, location, mountOptions, mountPoint,
	// and type of a file system created using Amazon Elastic File System.
	// Default: - no file system locations.
	//
	FileSystemLocations() *[]awscodebuild.IFileSystemLocation
	// The CodeBuild Project's principal.
	GrantPrincipal() awsiam.IPrincipal
	// Identifier for this step.
	Id() *string
	// Input FileSets.
	//
	// A list of `(FileSet, directory)` pairs, which are a copy of the
	// input properties. This list should not be modified directly.
	Inputs() *[]*FileSetLocation
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	// Default: - No installation commands.
	//
	InstallCommands() *[]*string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// Information about logs for CodeBuild projects.
	//
	// A CodeBuilde project can create logs in Amazon CloudWatch Logs, an S3 bucket, or both.
	// Default: - no log configuration is set.
	//
	Logging() *awscodebuild.LoggingOptions
	// Output FileSets.
	//
	// A list of `(FileSet, directory)` pairs, which are a copy of the
	// input properties. This list should not be modified directly.
	Outputs() *[]*FileSetLocation
	// Additional configuration that can only be configured via BuildSpec.
	//
	// Contains exported variables.
	// Default: - Contains the exported variables.
	//
	PartialBuildSpec() awscodebuild.BuildSpec
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// CodeBuild Project generated for the pipeline.
	//
	// Will only be available after the pipeline has been built.
	Project() awscodebuild.IProject
	// Name for the generated CodeBuild project.
	// Default: - No value specified at construction time, use defaults.
	//
	ProjectName() *string
	// Custom execution role to be used for the CodeBuild project.
	// Default: - No value specified at construction time, use defaults.
	//
	Role() awsiam.IRole
	// Policy statements to add to role used during the synth.
	// Default: - No value specified at construction time, use defaults.
	//
	RolePolicyStatements() *[]awsiam.PolicyStatement
	// Which security group to associate with the script's project network interfaces.
	// Default: - No value specified at construction time, use defaults.
	//
	SecurityGroups() *[]awsec2.ISecurityGroup
	// Which subnets to use.
	// Default: - No value specified at construction time, use defaults.
	//
	SubnetSelection() *awsec2.SubnetSelection
	// The number of minutes after which AWS CodeBuild stops the build if it's not complete.
	//
	// For valid values, see the timeoutInMinutes field in the AWS
	// CodeBuild User Guide.
	// Default: Duration.hours(1)
	//
	Timeout() awscdk.Duration
	// The VPC where to execute the SimpleSynth.
	// Default: - No value specified at construction time, use defaults.
	//
	Vpc() awsec2.IVpc
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add an additional output FileSet based on a directory.
	//
	// After running the script, the contents of the given directory
	// will be exported as a `FileSet`. Use the `FileSet` as the
	// input to another step.
	//
	// Multiple calls with the exact same directory name string (not normalized)
	// will return the same FileSet.
	AddOutputDirectory(directory *string) FileSet
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	// Reference a CodePipeline variable defined by the CodeBuildStep.
	//
	// The variable must be set in the shell of the CodeBuild step when
	// it finishes its `post_build` phase.
	//
	// Example:
	//   // Access the output of one CodeBuildStep in another CodeBuildStep
	//   var pipeline codePipeline
	//
	//
	//   step1 := pipelines.NewCodeBuildStep(jsii.String("Step1"), &CodeBuildStepProps{
	//   	Commands: []*string{
	//   		jsii.String("export MY_VAR=hello"),
	//   	},
	//   })
	//
	//   step2 := pipelines.NewCodeBuildStep(jsii.String("Step2"), &CodeBuildStepProps{
	//   	Env: map[string]*string{
	//   		"IMPORTED_VAR": step1.exportedVariable(jsii.String("MY_VAR")),
	//   	},
	//   	Commands: []*string{
	//   		jsii.String("echo $IMPORTED_VAR"),
	//   	},
	//   })
	//
	ExportedVariable(variableName *string) *string
	// Configure the given output directory as primary output.
	//
	// If no primary output has been configured yet, this directory
	// will become the primary output of this ShellStep, otherwise this
	// method will throw if the given directory is different than the
	// currently configured primary output directory.
	PrimaryOutputDirectory(directory *string) FileSet
	// Return a string representation of this Step.
	ToString() *string
}

Run a script as a CodeBuild Project.

The BuildSpec must be available inline--it cannot reference a file on disk. If your current build instructions are in a file like `buildspec.yml` in your repository, extract them to a script (say, `build.sh`) and invoke that script as part of the build:

```ts

new pipelines.CodeBuildStep('Synth', {
  commands: ['./build.sh'],
});

```.

Example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the pipeline uses Docker image assets
	DockerEnabledForSelfMutation: jsii.Boolean(true),
})

pipeline.AddWave(jsii.String("MyWave"), &WaveOptions{
	Post: []step{
		pipelines.NewCodeBuildStep(jsii.String("RunApproval"), &CodeBuildStepProps{
			Commands: []*string{
				jsii.String("command-from-image"),
			},
			BuildEnvironment: &BuildEnvironment{
				// The user of a Docker image asset in the pipeline requires turning on
				// 'dockerEnabledForSelfMutation'.
				BuildImage: codebuild.LinuxBuildImage_FromAsset(this, jsii.String("Image"), &DockerImageAssetProps{
					Directory: jsii.String("./docker-image"),
				}),
			},
		}),
	},
})

func NewCodeBuildStep

func NewCodeBuildStep(id *string, props *CodeBuildStepProps) CodeBuildStep

type CodeBuildStepProps

type CodeBuildStepProps struct {
	// Commands to run.
	Commands *[]*string `field:"required" json:"commands" yaml:"commands"`
	// Additional FileSets to put in other directories.
	//
	// Specifies a mapping from directory name to FileSets. During the
	// script execution, the FileSets will be available in the directories
	// indicated.
	//
	// The directory names may be relative. For example, you can put
	// the main input and an additional input side-by-side with the
	// following configuration:
	//
	// “`ts
	// const script = new pipelines.ShellStep('MainScript', {
	//   commands: ['npm ci','npm run build','npx cdk synth'],
	//   input: pipelines.CodePipelineSource.gitHub('org/source1', 'main'),
	//   additionalInputs: {
	//     '../siblingdir': pipelines.CodePipelineSource.gitHub('org/source2', 'main'),
	//   }
	// });
	// “`.
	// Default: - No additional inputs.
	//
	AdditionalInputs *map[string]IFileSetProducer `field:"optional" json:"additionalInputs" yaml:"additionalInputs"`
	// Environment variables to set.
	// Default: - No environment variables.
	//
	Env *map[string]*string `field:"optional" json:"env" yaml:"env"`
	// Set environment variables based on Stack Outputs.
	//
	// `ShellStep`s following stack or stage deployments may
	// access the `CfnOutput`s of those stacks to get access to
	// --for example--automatically generated resource names or
	// endpoint URLs.
	// Default: - No environment variables created from stack outputs.
	//
	EnvFromCfnOutputs *map[string]awscdk.CfnOutput `field:"optional" json:"envFromCfnOutputs" yaml:"envFromCfnOutputs"`
	// FileSet to run these scripts on.
	//
	// The files in the FileSet will be placed in the working directory when
	// the script is executed. Use `additionalInputs` to download file sets
	// to other directories as well.
	// Default: - No input specified.
	//
	Input IFileSetProducer `field:"optional" json:"input" yaml:"input"`
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	// Default: - No installation commands.
	//
	InstallCommands *[]*string `field:"optional" json:"installCommands" yaml:"installCommands"`
	// The directory that will contain the primary output fileset.
	//
	// After running the script, the contents of the given directory
	// will be treated as the primary output of this Step.
	// Default: - No primary output.
	//
	PrimaryOutputDirectory *string `field:"optional" json:"primaryOutputDirectory" yaml:"primaryOutputDirectory"`
	// Custom execution role to be used for the Code Build Action.
	// Default: - A role is automatically created.
	//
	ActionRole awsiam.IRole `field:"optional" json:"actionRole" yaml:"actionRole"`
	// Changes to environment.
	//
	// This environment will be combined with the pipeline's default
	// environment.
	// Default: - Use the pipeline's default build environment.
	//
	BuildEnvironment *awscodebuild.BuildEnvironment `field:"optional" json:"buildEnvironment" yaml:"buildEnvironment"`
	// Caching strategy to use.
	// Default: - No cache.
	//
	Cache awscodebuild.Cache `field:"optional" json:"cache" yaml:"cache"`
	// ProjectFileSystemLocation objects for CodeBuild build projects.
	//
	// A ProjectFileSystemLocation object specifies the identifier, location, mountOptions, mountPoint,
	// and type of a file system created using Amazon Elastic File System.
	// Default: - no file system locations.
	//
	FileSystemLocations *[]awscodebuild.IFileSystemLocation `field:"optional" json:"fileSystemLocations" yaml:"fileSystemLocations"`
	// Information about logs for CodeBuild projects.
	//
	// A CodeBuild project can create logs in Amazon CloudWatch Logs, an S3 bucket, or both.
	// Default: - no log configuration is set.
	//
	Logging *awscodebuild.LoggingOptions `field:"optional" json:"logging" yaml:"logging"`
	// Additional configuration that can only be configured via BuildSpec.
	//
	// You should not use this to specify output artifacts; those
	// should be supplied via the other properties of this class, otherwise
	// CDK Pipelines won't be able to inspect the artifacts.
	//
	// Set the `commands` to an empty array if you want to fully specify
	// the BuildSpec using this field.
	//
	// The BuildSpec must be available inline--it cannot reference a file
	// on disk.
	// Default: - BuildSpec completely derived from other properties.
	//
	PartialBuildSpec awscodebuild.BuildSpec `field:"optional" json:"partialBuildSpec" yaml:"partialBuildSpec"`
	// Name for the generated CodeBuild project.
	// Default: - Automatically generated.
	//
	ProjectName *string `field:"optional" json:"projectName" yaml:"projectName"`
	// Custom execution role to be used for the CodeBuild project.
	// Default: - A role is automatically created.
	//
	Role awsiam.IRole `field:"optional" json:"role" yaml:"role"`
	// Policy statements to add to role used during the synth.
	//
	// Can be used to add acces to a CodeArtifact repository etc.
	// Default: - No policy statements added to CodeBuild Project Role.
	//
	RolePolicyStatements *[]awsiam.PolicyStatement `field:"optional" json:"rolePolicyStatements" yaml:"rolePolicyStatements"`
	// Which security group to associate with the script's project network interfaces.
	//
	// If no security group is identified, one will be created automatically.
	//
	// Only used if 'vpc' is supplied.
	// Default: - Security group will be automatically created.
	//
	SecurityGroups *[]awsec2.ISecurityGroup `field:"optional" json:"securityGroups" yaml:"securityGroups"`
	// Which subnets to use.
	//
	// Only used if 'vpc' is supplied.
	// Default: - All private subnets.
	//
	SubnetSelection *awsec2.SubnetSelection `field:"optional" json:"subnetSelection" yaml:"subnetSelection"`
	// The number of minutes after which AWS CodeBuild stops the build if it's not complete.
	//
	// For valid values, see the timeoutInMinutes field in the AWS
	// CodeBuild User Guide.
	// Default: Duration.hours(1)
	//
	Timeout awscdk.Duration `field:"optional" json:"timeout" yaml:"timeout"`
	// The VPC where to execute the SimpleSynth.
	// Default: - No VPC.
	//
	Vpc awsec2.IVpc `field:"optional" json:"vpc" yaml:"vpc"`
}

Construction props for a CodeBuildStep.

Example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the pipeline uses Docker image assets
	DockerEnabledForSelfMutation: jsii.Boolean(true),
})

pipeline.AddWave(jsii.String("MyWave"), &WaveOptions{
	Post: []step{
		pipelines.NewCodeBuildStep(jsii.String("RunApproval"), &CodeBuildStepProps{
			Commands: []*string{
				jsii.String("command-from-image"),
			},
			BuildEnvironment: &BuildEnvironment{
				// The user of a Docker image asset in the pipeline requires turning on
				// 'dockerEnabledForSelfMutation'.
				BuildImage: codebuild.LinuxBuildImage_FromAsset(this, jsii.String("Image"), &DockerImageAssetProps{
					Directory: jsii.String("./docker-image"),
				}),
			},
		}),
	},
})

type CodeCommitSourceOptions

type CodeCommitSourceOptions struct {
	// The action name used for this source in the CodePipeline.
	// Default: - The repository name.
	//
	ActionName *string `field:"optional" json:"actionName" yaml:"actionName"`
	// If this is set, the next CodeBuild job clones the repository (instead of CodePipeline downloading the files).
	//
	// This provides access to repository history, and retains symlinks (symlinks would otherwise be
	// removed by CodePipeline).
	//
	// **Note**: if this option is true, only CodeBuild jobs can use the output artifact.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodeCommit.html
	//
	// Default: false.
	//
	CodeBuildCloneOutput *bool `field:"optional" json:"codeBuildCloneOutput" yaml:"codeBuildCloneOutput"`
	// Role to be used by on commit event rule.
	//
	// Used only when trigger value is CodeCommitTrigger.EVENTS.
	// Default: a new role will be created.
	//
	EventRole awsiam.IRole `field:"optional" json:"eventRole" yaml:"eventRole"`
	// How should CodePipeline detect source changes for this Action.
	// Default: CodeCommitTrigger.EVENTS
	//
	Trigger awscodepipelineactions.CodeCommitTrigger `field:"optional" json:"trigger" yaml:"trigger"`
}

Configuration options for a CodeCommit source.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var role role

codeCommitSourceOptions := &CodeCommitSourceOptions{
	ActionName: jsii.String("actionName"),
	CodeBuildCloneOutput: jsii.Boolean(false),
	EventRole: role,
	Trigger: awscdk.Aws_codepipeline_actions.CodeCommitTrigger_NONE,
}

type CodePipeline

type CodePipeline interface {
	PipelineBase
	// The FileSet tha contains the cloud assembly.
	//
	// This is the primary output of the synth step.
	CloudAssemblyFileSet() FileSet
	// The tree node.
	Node() constructs.Node
	// The CodePipeline pipeline that deploys the CDK app.
	//
	// Only available after the pipeline has been built.
	Pipeline() awscodepipeline.Pipeline
	// Whether SelfMutation is enabled for this CDK Pipeline.
	SelfMutationEnabled() *bool
	// The CodeBuild project that performs the SelfMutation.
	//
	// Will throw an error if this is accessed before `buildPipeline()`
	// is called, or if selfMutation has been disabled.
	SelfMutationProject() awscodebuild.IProject
	// The build step that produces the CDK Cloud Assembly.
	Synth() IFileSetProducer
	// The CodeBuild project that performs the Synth.
	//
	// Only available after the pipeline has been built.
	SynthProject() awscodebuild.IProject
	// The waves in this pipeline.
	Waves() *[]Wave
	// Deploy a single Stage by itself.
	//
	// Add a Stage to the pipeline, to be deployed in sequence with other
	// Stages added to the pipeline. All Stacks in the stage will be deployed
	// in an order automatically determined by their relative dependencies.
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
	// Add a Wave to the pipeline, for deploying multiple Stages in parallel.
	//
	// Use the return object of this method to deploy multiple stages in parallel.
	//
	// Example:
	//
	// “`ts
	// declare const pipeline: pipelines.CodePipeline;
	//
	// const wave = pipeline.addWave('MyWave');
	// wave.addStage(new MyApplicationStage(this, 'Stage1'));
	// wave.addStage(new MyApplicationStage(this, 'Stage2'));
	// “`.
	AddWave(id *string, options *WaveOptions) Wave
	// Send the current pipeline definition to the engine, and construct the pipeline.
	//
	// It is not possible to modify the pipeline after calling this method.
	BuildPipeline()
	// Implemented by subclasses to do the actual pipeline construction.
	DoBuildPipeline()
	// Returns a string representation of this construct.
	ToString() *string
}

A CDK Pipeline that uses CodePipeline to deploy CDK apps.

This is a `Pipeline` with its `engine` property set to `CodePipelineEngine`, and exists for nicer ergonomics for users that don't need to switch out engines.

Example:

// Modern API
modernPipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	SelfMutation: jsii.Boolean(false),
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

// Original API
cloudAssemblyArtifact := codepipeline.NewArtifact()
originalPipeline := pipelines.NewCdkPipeline(this, jsii.String("Pipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(false),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})

func NewCodePipeline

func NewCodePipeline(scope constructs.Construct, id *string, props *CodePipelineProps) CodePipeline

type CodePipelineActionFactoryResult

type CodePipelineActionFactoryResult struct {
	// How many RunOrders were consumed.
	//
	// If you add 1 action, return the value 1 here.
	RunOrdersConsumed *float64 `field:"required" json:"runOrdersConsumed" yaml:"runOrdersConsumed"`
	// If a CodeBuild project got created, the project.
	// Default: - This factory did not create a CodeBuild project.
	//
	Project awscodebuild.IProject `field:"optional" json:"project" yaml:"project"`
}

The result of adding actions to the pipeline.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var project project

codePipelineActionFactoryResult := &CodePipelineActionFactoryResult{
	RunOrdersConsumed: jsii.Number(123),

	// the properties below are optional
	Project: project,
}

type CodePipelineFileSet

type CodePipelineFileSet interface {
	FileSet
	// Human-readable descriptor for this file set (does not need to be unique).
	Id() *string
	// The primary output of a file set producer.
	//
	// The primary output of a FileSet is itself.
	PrimaryOutput() FileSet
	// The Step that produces this FileSet.
	Producer() Step
	// Mark the given Step as the producer for this FileSet.
	//
	// This method can only be called once.
	ProducedBy(producer Step)
	// Return a string representation of this FileSet.
	ToString() *string
}

A FileSet created from a CodePipeline artifact.

You only need to use this if you want to add CDK Pipeline stages add the end of an existing CodePipeline, which should be very rare.

Example:

var codePipeline pipeline

sourceArtifact := codepipeline.NewArtifact(jsii.String("MySourceArtifact"))

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	CodePipeline: codePipeline,
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineFileSet_FromArtifact(sourceArtifact),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

func CodePipelineFileSet_FromArtifact

func CodePipelineFileSet_FromArtifact(artifact awscodepipeline.Artifact) CodePipelineFileSet

Turn a CodePipeline Artifact into a FileSet.

type CodePipelineProps

type CodePipelineProps struct {
	// The build step that produces the CDK Cloud Assembly.
	//
	// The primary output of this step needs to be the `cdk.out` directory
	// generated by the `cdk synth` command.
	//
	// If you use a `ShellStep` here and you don't configure an output directory,
	// the output directory will automatically be assumed to be `cdk.out`.
	Synth IFileSetProducer `field:"required" json:"synth" yaml:"synth"`
	// An existing S3 Bucket to use for storing the pipeline's artifact.
	// Default: - A new S3 bucket will be created.
	//
	ArtifactBucket awss3.IBucket `field:"optional" json:"artifactBucket" yaml:"artifactBucket"`
	// Additional customizations to apply to the asset publishing CodeBuild projects.
	// Default: - Only `codeBuildDefaults` are applied.
	//
	AssetPublishingCodeBuildDefaults *CodeBuildOptions `field:"optional" json:"assetPublishingCodeBuildDefaults" yaml:"assetPublishingCodeBuildDefaults"`
	// CDK CLI version to use in self-mutation and asset publishing steps.
	//
	// If you want to lock the CDK CLI version used in the pipeline, by steps
	// that are automatically generated for you, specify the version here.
	//
	// We recommend you do not specify this value, as not specifying it always
	// uses the latest CLI version which is backwards compatible with old versions.
	//
	// If you do specify it, be aware that this version should always be equal to or higher than the
	// version of the CDK framework used by the CDK app, when the CDK commands are
	// run during your pipeline execution. When you change this version, the *next
	// time* the `SelfMutate` step runs it will still be using the CLI of the the
	// *previous* version that was in this property: it will only start using the
	// new version after `SelfMutate` completes successfully. That means that if
	// you want to update both framework and CLI version, you should update the
	// CLI version first, commit, push and deploy, and only then update the
	// framework version.
	// Default: - Latest version.
	//
	CliVersion *string `field:"optional" json:"cliVersion" yaml:"cliVersion"`
	// Customize the CodeBuild projects created for this pipeline.
	// Default: - All projects run non-privileged build, SMALL instance, LinuxBuildImage.STANDARD_7_0
	//
	CodeBuildDefaults *CodeBuildOptions `field:"optional" json:"codeBuildDefaults" yaml:"codeBuildDefaults"`
	// An existing Pipeline to be reused and built upon.
	//
	// [disable-awslint:ref-via-interface].
	// Default: - a new underlying pipeline is created.
	//
	CodePipeline awscodepipeline.Pipeline `field:"optional" json:"codePipeline" yaml:"codePipeline"`
	// Create KMS keys for the artifact buckets, allowing cross-account deployments.
	//
	// The artifact buckets have to be encrypted to support deploying CDK apps to
	// another account, so if you want to do that or want to have your artifact
	// buckets encrypted, be sure to set this value to `true`.
	//
	// Be aware there is a cost associated with maintaining the KMS keys.
	// Default: false.
	//
	CrossAccountKeys *bool `field:"optional" json:"crossAccountKeys" yaml:"crossAccountKeys"`
	// A map of region to S3 bucket name used for cross-region CodePipeline.
	//
	// For every Action that you specify targeting a different region than the Pipeline itself,
	// if you don't provide an explicit Bucket for that region using this property,
	// the construct will automatically create a Stack containing an S3 Bucket in that region.
	// Passed directly through to the {@link cp.Pipeline}.
	// Default: - no cross region replication buckets.
	//
	CrossRegionReplicationBuckets *map[string]awss3.IBucket `field:"optional" json:"crossRegionReplicationBuckets" yaml:"crossRegionReplicationBuckets"`
	// A list of credentials used to authenticate to Docker registries.
	//
	// Specify any credentials necessary within the pipeline to build, synth, update, or publish assets.
	// Default: [].
	//
	DockerCredentials *[]DockerCredential `field:"optional" json:"dockerCredentials" yaml:"dockerCredentials"`
	// Enable Docker for the self-mutate step.
	//
	// Set this to true if the pipeline itself uses Docker container assets
	// (for example, if you use `LinuxBuildImage.fromAsset()` as the build
	// image of a CodeBuild step in the pipeline).
	//
	// You do not need to set it if you build Docker image assets in the
	// application Stages and Stacks that are *deployed* by this pipeline.
	//
	// Configures privileged mode for the self-mutation CodeBuild action.
	//
	// If you are about to turn this on in an already-deployed Pipeline,
	// set the value to `true` first, commit and allow the pipeline to
	// self-update, and only then use the Docker asset in the pipeline.
	// Default: false.
	//
	DockerEnabledForSelfMutation *bool `field:"optional" json:"dockerEnabledForSelfMutation" yaml:"dockerEnabledForSelfMutation"`
	// Enable Docker for the 'synth' step.
	//
	// Set this to true if you are using file assets that require
	// "bundling" anywhere in your application (meaning an asset
	// compilation step will be run with the tools provided by
	// a Docker image), both for the Pipeline stack as well as the
	// application stacks.
	//
	// A common way to use bundling assets in your application is by
	// using the `aws-cdk-lib/aws-lambda-nodejs` library.
	//
	// Configures privileged mode for the synth CodeBuild action.
	//
	// If you are about to turn this on in an already-deployed Pipeline,
	// set the value to `true` first, commit and allow the pipeline to
	// self-update, and only then use the bundled asset.
	// Default: false.
	//
	DockerEnabledForSynth *bool `field:"optional" json:"dockerEnabledForSynth" yaml:"dockerEnabledForSynth"`
	// Enable KMS key rotation for the generated KMS keys.
	//
	// By default KMS key rotation is disabled, but will add
	// additional costs when enabled.
	// Default: - false (key rotation is disabled).
	//
	EnableKeyRotation *bool `field:"optional" json:"enableKeyRotation" yaml:"enableKeyRotation"`
	// The name of the CodePipeline pipeline.
	// Default: - Automatically generated.
	//
	PipelineName *string `field:"optional" json:"pipelineName" yaml:"pipelineName"`
	// Publish assets in multiple CodeBuild projects.
	//
	// If set to false, use one Project per type to publish all assets.
	//
	// Publishing in parallel improves concurrency and may reduce publishing
	// latency, but may also increase overall provisioning time of the CodeBuild
	// projects.
	//
	// Experiment and see what value works best for you.
	// Default: true.
	//
	PublishAssetsInParallel *bool `field:"optional" json:"publishAssetsInParallel" yaml:"publishAssetsInParallel"`
	// Reuse the same cross region support stack for all pipelines in the App.
	// Default: - true (Use the same support stack for all pipelines in App).
	//
	ReuseCrossRegionSupportStacks *bool `field:"optional" json:"reuseCrossRegionSupportStacks" yaml:"reuseCrossRegionSupportStacks"`
	// The IAM role to be assumed by this Pipeline.
	// Default: - A new role is created.
	//
	Role awsiam.IRole `field:"optional" json:"role" yaml:"role"`
	// Whether the pipeline will update itself.
	//
	// This needs to be set to `true` to allow the pipeline to reconfigure
	// itself when assets or stages are being added to it, and `true` is the
	// recommended setting.
	//
	// You can temporarily set this to `false` while you are iterating
	// on the pipeline itself and prefer to deploy changes using `cdk deploy`.
	// Default: true.
	//
	SelfMutation *bool `field:"optional" json:"selfMutation" yaml:"selfMutation"`
	// Additional customizations to apply to the self mutation CodeBuild projects.
	// Default: - Only `codeBuildDefaults` are applied.
	//
	SelfMutationCodeBuildDefaults *CodeBuildOptions `field:"optional" json:"selfMutationCodeBuildDefaults" yaml:"selfMutationCodeBuildDefaults"`
	// Additional customizations to apply to the synthesize CodeBuild projects.
	// Default: - Only `codeBuildDefaults` are applied.
	//
	SynthCodeBuildDefaults *CodeBuildOptions `field:"optional" json:"synthCodeBuildDefaults" yaml:"synthCodeBuildDefaults"`
	// Deploy every stack by creating a change set and executing it.
	//
	// When enabled, creates a "Prepare" and "Execute" action for each stack. Disable
	// to deploy the stack in one pipeline action.
	// Default: true.
	//
	UseChangeSets *bool `field:"optional" json:"useChangeSets" yaml:"useChangeSets"`
}

Properties for a `CodePipeline`.

Example:

// Modern API
modernPipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	SelfMutation: jsii.Boolean(false),
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

// Original API
cloudAssemblyArtifact := codepipeline.NewArtifact()
originalPipeline := pipelines.NewCdkPipeline(this, jsii.String("Pipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(false),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})

type CodePipelineSource

type CodePipelineSource interface {
	Step
	ICodePipelineActionFactory
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Identifier for this step.
	Id() *string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	GetAction(output awscodepipeline.Artifact, actionName *string, runOrder *float64, variablesNamespace *string) awscodepipelineactions.Action
	// Create the desired Action and add it to the pipeline.
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
	// Return an attribute of the current source revision.
	//
	// These values can be passed into the environment variables of pipeline steps,
	// so your steps can access information about the source revision.
	//
	// Pipeline synth step has some source attributes predefined in the environment.
	// If these suffice, you don't need to use this method for the synth step.
	//
	// Example:
	//   // Access the CommitId of a GitHub source in the synth
	//   source := pipelines.CodePipelineSource_GitHub(jsii.String("owner/repo"), jsii.String("main"))
	//
	//   pipeline := pipelines.NewCodePipeline(*scope, jsii.String("MyPipeline"), &CodePipelineProps{
	//   	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
	//   		Input: source,
	//   		Commands: []*string{
	//   		},
	//   		Env: map[string]*string{
	//   			"COMMIT_ID": source.sourceAttribute(jsii.String("CommitId")),
	//   		},
	//   	}),
	//   })
	//
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/reference-variables.html#reference-variables-list
	//
	SourceAttribute(name *string) *string
	// Return a string representation of this Step.
	ToString() *string
}

Factory for CodePipeline source steps.

This class contains a number of factory methods for the different types of sources that CodePipeline supports.

Example:

// Access the CommitId of a GitHub source in the synth
source := pipelines.CodePipelineSource_GitHub(jsii.String("owner/repo"), jsii.String("main"))

pipeline := pipelines.NewCodePipeline(*scope, jsii.String("MyPipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: source,
		Commands: []*string{
		},
		Env: map[string]*string{
			"COMMIT_ID": source.sourceAttribute(jsii.String("CommitId")),
		},
	}),
})

func CodePipelineSource_CodeCommit

func CodePipelineSource_CodeCommit(repository awscodecommit.IRepository, branch *string, props *CodeCommitSourceOptions) CodePipelineSource

Returns a CodeCommit source.

If you need access to symlinks or the repository history, be sure to set `codeBuildCloneOutput`.

Example:

var repository iRepository

pipelines.CodePipelineSource_CodeCommit(repository, jsii.String("main"))

func CodePipelineSource_Connection

func CodePipelineSource_Connection(repoString *string, branch *string, props *ConnectionSourceOptions) CodePipelineSource

Returns a CodeStar connection source.

A CodeStar connection allows AWS CodePipeline to access external resources, such as repositories in GitHub, GitHub Enterprise or BitBucket.

To use this method, you first need to create a CodeStar connection using the AWS console. In the process, you may have to sign in to the external provider -- GitHub, for example -- to authorize AWS to read and modify your repository. Once you have done this, copy the connection ARN and use it to create the source.

Example:

```ts

pipelines.CodePipelineSource.connection('owner/repo', 'main', {
  connectionArn: 'arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41', // Created using the AWS console
});

```

If you need access to symlinks or the repository history, be sure to set `codeBuildCloneOutput`. See: https://docs.aws.amazon.com/dtconsole/latest/userguide/welcome-connections.html

func CodePipelineSource_Ecr added in v2.14.0

func CodePipelineSource_Ecr(repository awsecr.IRepository, props *ECRSourceOptions) CodePipelineSource

Returns an ECR source.

Example:

var repository iRepository

pipelines.CodePipelineSource_Ecr(repository, &ECRSourceOptions{
	ImageTag: jsii.String("latest"),
})

func CodePipelineSource_GitHub

func CodePipelineSource_GitHub(repoString *string, branch *string, props *GitHubSourceOptions) CodePipelineSource

Returns a GitHub source, using OAuth tokens to authenticate with GitHub and a separate webhook to detect changes.

This is no longer the recommended method. Please consider using `connection()` instead.

Pass in the owner and repository in a single string, like this:

```ts pipelines.CodePipelineSource.gitHub('owner/repo', 'main'); ```

Authentication will be done by a secret called `github-token` in AWS Secrets Manager (unless specified otherwise).

If you rotate the value in the Secret, you must also change at least one property on the Pipeline, to force CloudFormation to re-read the secret.

The token should have these permissions:

* **repo** - to read the repository * **admin:repo_hook** - if you plan to use webhooks (true by default)

If you need access to symlinks or the repository history, use a source of type `connection` instead.

func CodePipelineSource_S3

func CodePipelineSource_S3(bucket awss3.IBucket, objectKey *string, props *S3SourceOptions) CodePipelineSource

Returns an S3 source.

Example:

var bucket bucket

pipelines.CodePipelineSource_S3(bucket, jsii.String("path/to/file.zip"))

type ConfirmPermissionsBroadening

type ConfirmPermissionsBroadening interface {
	Step
	ICodePipelineActionFactory
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Identifier for this step.
	Id() *string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	// Create the desired Action and add it to the pipeline.
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
	// Return a string representation of this Step.
	ToString() *string
}

Pause the pipeline if a deployment would add IAM permissions or Security Group rules.

This step is only supported in CodePipeline pipelines.

Example:

var pipeline codePipeline

stage := NewMyApplicationStage(this, jsii.String("MyApplication"))
pipeline.AddStage(stage, &AddStageOpts{
	Pre: []step{
		pipelines.NewConfirmPermissionsBroadening(jsii.String("Check"), &PermissionsBroadeningCheckProps{
			Stage: *Stage,
		}),
	},
})

type ConnectionSourceOptions

type ConnectionSourceOptions struct {
	// The ARN of the CodeStar Connection created in the AWS console that has permissions to access this GitHub or BitBucket repository.
	//
	// Example:
	//   "arn:aws:codestar-connections:us-east-1:123456789012:connection/12345678-abcd-12ab-34cdef5678gh"
	//
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/connections-create.html
	//
	ConnectionArn *string `field:"required" json:"connectionArn" yaml:"connectionArn"`
	// The action name used for this source in the CodePipeline.
	// Default: - The repository string.
	//
	ActionName *string `field:"optional" json:"actionName" yaml:"actionName"`
	// If this is set, the next CodeBuild job clones the repository (instead of CodePipeline downloading the files).
	//
	// This provides access to repository history, and retains symlinks (symlinks would otherwise be
	// removed by CodePipeline).
	//
	// **Note**: if this option is true, only CodeBuild jobs can use the output artifact.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodestarConnectionSource.html#action-reference-CodestarConnectionSource-config
	//
	// Default: false.
	//
	CodeBuildCloneOutput *bool `field:"optional" json:"codeBuildCloneOutput" yaml:"codeBuildCloneOutput"`
	// Controls automatically starting your pipeline when a new commit is made on the configured repository and branch.
	//
	// If unspecified,
	// the default value is true, and the field does not display by default.
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference-CodestarConnectionSource.html
	//
	// Default: true.
	//
	TriggerOnPush *bool `field:"optional" json:"triggerOnPush" yaml:"triggerOnPush"`
}

Configuration options for CodeStar source.

Example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the pipeline uses Docker image assets
	DockerEnabledForSelfMutation: jsii.Boolean(true),
})

pipeline.AddWave(jsii.String("MyWave"), &WaveOptions{
	Post: []step{
		pipelines.NewCodeBuildStep(jsii.String("RunApproval"), &CodeBuildStepProps{
			Commands: []*string{
				jsii.String("command-from-image"),
			},
			BuildEnvironment: &BuildEnvironment{
				// The user of a Docker image asset in the pipeline requires turning on
				// 'dockerEnabledForSelfMutation'.
				BuildImage: codebuild.LinuxBuildImage_FromAsset(this, jsii.String("Image"), &DockerImageAssetProps{
					Directory: jsii.String("./docker-image"),
				}),
			},
		}),
	},
})

type DockerCredential

type DockerCredential interface {
	Usages() *[]DockerCredentialUsage
	// Grant read-only access to the registry credentials.
	//
	// This grants read access to any secrets, and pull access to any repositories.
	GrantRead(grantee awsiam.IGrantable, usage DockerCredentialUsage)
}

Represents credentials used to access a Docker registry.

Example:

dockerHubSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("DHSecret"), jsii.String("arn:aws:..."))
customRegSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("CRSecret"), jsii.String("arn:aws:..."))
repo1 := ecr.Repository_FromRepositoryArn(this, jsii.String("Repo"), jsii.String("arn:aws:ecr:eu-west-1:0123456789012:repository/Repo1"))
repo2 := ecr.Repository_FromRepositoryArn(this, jsii.String("Repo"), jsii.String("arn:aws:ecr:eu-west-1:0123456789012:repository/Repo2"))

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	DockerCredentials: []dockerCredential{
		pipelines.*dockerCredential_DockerHub(dockerHubSecret),
		pipelines.*dockerCredential_CustomRegistry(jsii.String("dockerregistry.example.com"), customRegSecret),
		pipelines.*dockerCredential_Ecr([]iRepository{
			repo1,
			repo2,
		}),
	},
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

func DockerCredential_CustomRegistry

func DockerCredential_CustomRegistry(registryDomain *string, secret awssecretsmanager.ISecret, opts *ExternalDockerCredentialOptions) DockerCredential

Creates a DockerCredential for a registry, based on its domain name (e.g., 'www.example.com').

func DockerCredential_DockerHub

func DockerCredential_DockerHub(secret awssecretsmanager.ISecret, opts *ExternalDockerCredentialOptions) DockerCredential

Creates a DockerCredential for DockerHub.

Convenience method for `customRegistry('https://index.docker.io/v1/', opts)`.

func DockerCredential_Ecr

func DockerCredential_Ecr(repositories *[]awsecr.IRepository, opts *EcrDockerCredentialOptions) DockerCredential

Creates a DockerCredential for one or more ECR repositories.

NOTE - All ECR repositories in the same account and region share a domain name (e.g., 0123456789012.dkr.ecr.eu-west-1.amazonaws.com), and can only have one associated set of credentials (and DockerCredential). Attempting to associate one set of credentials with one ECR repo and another with another ECR repo in the same account and region will result in failures when using these credentials in the pipeline.

type DockerCredentialUsage

type DockerCredentialUsage string

Defines which stages of a pipeline require the specified credentials.

Example:

dockerHubSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("DHSecret"), jsii.String("arn:aws:..."))
// Only the image asset publishing actions will be granted read access to the secret.
creds := pipelines.DockerCredential_DockerHub(dockerHubSecret, &ExternalDockerCredentialOptions{
	Usages: []dockerCredentialUsage{
		pipelines.*dockerCredentialUsage_ASSET_PUBLISHING,
	},
})
const (
	// Synth/Build.
	DockerCredentialUsage_SYNTH DockerCredentialUsage = "SYNTH"
	// Self-update.
	DockerCredentialUsage_SELF_UPDATE DockerCredentialUsage = "SELF_UPDATE"
	// Asset publishing.
	DockerCredentialUsage_ASSET_PUBLISHING DockerCredentialUsage = "ASSET_PUBLISHING"
)

type ECRSourceOptions added in v2.14.0

type ECRSourceOptions struct {
	// The action name used for this source in the CodePipeline.
	// Default: - The repository name.
	//
	ActionName *string `field:"optional" json:"actionName" yaml:"actionName"`
	// The image tag that will be checked for changes.
	// Default: latest.
	//
	ImageTag *string `field:"optional" json:"imageTag" yaml:"imageTag"`
}

Options for ECR sources.

Example:

var repository iRepository

pipelines.CodePipelineSource_Ecr(repository, &ECRSourceOptions{
	ImageTag: jsii.String("latest"),
})

type EcrDockerCredentialOptions

type EcrDockerCredentialOptions struct {
	// An IAM role to assume prior to accessing the secret.
	// Default: - none. The current execution role will be used.
	//
	AssumeRole awsiam.IRole `field:"optional" json:"assumeRole" yaml:"assumeRole"`
	// Defines which stages of the pipeline should be granted access to these credentials.
	// Default: - all relevant stages (synth, self-update, asset publishing) are granted access.
	//
	Usages *[]DockerCredentialUsage `field:"optional" json:"usages" yaml:"usages"`
}

Options for defining access for a Docker Credential composed of ECR repos.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var role role

ecrDockerCredentialOptions := &EcrDockerCredentialOptions{
	AssumeRole: role,
	Usages: []dockerCredentialUsage{
		awscdk.Pipelines.*dockerCredentialUsage_SYNTH,
	},
}

type ExternalDockerCredentialOptions

type ExternalDockerCredentialOptions struct {
	// An IAM role to assume prior to accessing the secret.
	// Default: - none. The current execution role will be used.
	//
	AssumeRole awsiam.IRole `field:"optional" json:"assumeRole" yaml:"assumeRole"`
	// The name of the JSON field of the secret which contains the secret/password.
	// Default: 'secret'.
	//
	SecretPasswordField *string `field:"optional" json:"secretPasswordField" yaml:"secretPasswordField"`
	// The name of the JSON field of the secret which contains the user/login name.
	// Default: 'username'.
	//
	SecretUsernameField *string `field:"optional" json:"secretUsernameField" yaml:"secretUsernameField"`
	// Defines which stages of the pipeline should be granted access to these credentials.
	// Default: - all relevant stages (synth, self-update, asset publishing) are granted access.
	//
	Usages *[]DockerCredentialUsage `field:"optional" json:"usages" yaml:"usages"`
}

Options for defining credentials for a Docker Credential.

Example:

dockerHubSecret := secretsmanager.Secret_FromSecretCompleteArn(this, jsii.String("DHSecret"), jsii.String("arn:aws:..."))
// Only the image asset publishing actions will be granted read access to the secret.
creds := pipelines.DockerCredential_DockerHub(dockerHubSecret, &ExternalDockerCredentialOptions{
	Usages: []dockerCredentialUsage{
		pipelines.*dockerCredentialUsage_ASSET_PUBLISHING,
	},
})

type FileSet

type FileSet interface {
	IFileSetProducer
	// Human-readable descriptor for this file set (does not need to be unique).
	Id() *string
	// The primary output of a file set producer.
	//
	// The primary output of a FileSet is itself.
	PrimaryOutput() FileSet
	// The Step that produces this FileSet.
	Producer() Step
	// Mark the given Step as the producer for this FileSet.
	//
	// This method can only be called once.
	ProducedBy(producer Step)
	// Return a string representation of this FileSet.
	ToString() *string
}

A set of files traveling through the deployment pipeline.

Individual steps in the pipeline produce or consume `FileSet`s.

Example:

type myJenkinsStep struct {
	step
}

func newMyJenkinsStep(provider jenkinsProvider, input fileSet) *myJenkinsStep {
	this := &myJenkinsStep{}
	pipelines.NewStep_Override(this, jsii.String("MyJenkinsStep"))

	// This is necessary if your step accepts parameters, like environment variables,
	// that may contain outputs from other steps. It doesn't matter what the
	// structure is, as long as it contains the values that may contain outputs.
	this.DiscoverReferencedOutputs(map[string]map[string]interface{}{
		"env": map[string]interface{}{
		},
	})
	return this
}

func (this *myJenkinsStep) produceAction(stage iStage, options produceActionOptions) codePipelineActionFactoryResult {

	// This is where you control what type of Action gets added to the
	// CodePipeline
	*stage.AddAction(cpactions.NewJenkinsAction(&JenkinsActionProps{
		// Copy 'actionName' and 'runOrder' from the options
		ActionName: options.ActionName,
		RunOrder: options.RunOrder,

		// Jenkins-specific configuration
		Type: cpactions.JenkinsActionType_TEST,
		JenkinsProvider: this.provider,
		ProjectName: jsii.String("MyJenkinsProject"),

		// Translate the FileSet into a codepipeline.Artifact
		Inputs: []artifact{
			options.Artifacts.ToCodePipeline(this.input),
		},
	}))

	return &codePipelineActionFactoryResult{
		RunOrdersConsumed: jsii.Number(1),
	}
}

func NewFileSet

func NewFileSet(id *string, producer Step) FileSet

type FileSetLocation

type FileSetLocation struct {
	// The (relative) directory where the FileSet is found.
	Directory *string `field:"required" json:"directory" yaml:"directory"`
	// The FileSet object.
	FileSet FileSet `field:"required" json:"fileSet" yaml:"fileSet"`
}

Location of a FileSet consumed or produced by a ShellStep.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

var fileSet fileSet

fileSetLocation := &FileSetLocation{
	Directory: jsii.String("directory"),
	FileSet: fileSet,
}

type GitHubSourceOptions

type GitHubSourceOptions struct {
	// The action name used for this source in the CodePipeline.
	// Default: - The repository string.
	//
	ActionName *string `field:"optional" json:"actionName" yaml:"actionName"`
	// A GitHub OAuth token to use for authentication.
	//
	// It is recommended to use a Secrets Manager `Secret` to obtain the token:
	//
	// “`ts
	// const oauth = cdk.SecretValue.secretsManager('my-github-token');
	// “`
	//
	// The GitHub Personal Access Token should have these scopes:
	//
	// * **repo** - to read the repository
	// * **admin:repo_hook** - if you plan to use webhooks (true by default).
	// See: https://docs.aws.amazon.com/codepipeline/latest/userguide/GitHub-create-personal-token-CLI.html
	//
	// Default: - SecretValue.secretsManager('github-token')
	//
	Authentication awscdk.SecretValue `field:"optional" json:"authentication" yaml:"authentication"`
	// How AWS CodePipeline should be triggered.
	//
	// With the default value "WEBHOOK", a webhook is created in GitHub that triggers the action.
	// With "POLL", CodePipeline periodically checks the source for changes.
	// With "None", the action is not triggered through changes in the source.
	//
	// To use `WEBHOOK`, your GitHub Personal Access Token should have
	// **admin:repo_hook** scope (in addition to the regular **repo** scope).
	// Default: GitHubTrigger.WEBHOOK
	//
	Trigger awscodepipelineactions.GitHubTrigger `field:"optional" json:"trigger" yaml:"trigger"`
}

Options for GitHub sources.

Example:

pipelines.CodePipelineSource_GitHub(jsii.String("org/repo"), jsii.String("branch"), &GitHubSourceOptions{
	// This is optional
	Authentication: cdk.SecretValue_SecretsManager(jsii.String("my-token")),
})

type ICodePipelineActionFactory

type ICodePipelineActionFactory interface {
	// Create the desired Action and add it to the pipeline.
	ProduceAction(stage awscodepipeline.IStage, options *ProduceActionOptions) *CodePipelineActionFactoryResult
}

Factory for explicit CodePipeline Actions.

If you have specific types of Actions you want to add to a CodePipeline, write a subclass of `Step` that implements this interface, and add the action or actions you want in the `produce` method.

There needs to be a level of indirection here, because some aspects of the Action creation need to be controlled by the workflow engine (name and runOrder). All the rest of the properties are controlled by the factory.

type IFileSetProducer

type IFileSetProducer interface {
	// The `FileSet` produced by this file set producer.
	// Default: - This producer doesn't produce any file set.
	//
	PrimaryOutput() FileSet
}

Any class that produces, or is itself, a `FileSet`.

Steps implicitly produce a primary FileSet as an output.

type ManualApprovalStep

type ManualApprovalStep interface {
	Step
	// The comment associated with this manual approval.
	// Default: - No comment.
	//
	Comment() *string
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Identifier for this step.
	Id() *string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	// Return a string representation of this Step.
	ToString() *string
}

A manual approval step.

If this step is added to a Pipeline, the Pipeline will be paused waiting for a human to resume it

Only engines that support pausing the deployment will support this step type.

Example:

var pipeline codePipeline

preprod := NewMyApplicationStage(this, jsii.String("PreProd"))
prod := NewMyApplicationStage(this, jsii.String("Prod"))

pipeline.AddStage(preprod, &AddStageOpts{
	Post: []step{
		pipelines.NewShellStep(jsii.String("Validate Endpoint"), &ShellStepProps{
			Commands: []*string{
				jsii.String("curl -Ssf https://my.webservice.com/"),
			},
		}),
	},
})
pipeline.AddStage(prod, &AddStageOpts{
	Pre: []*step{
		pipelines.NewManualApprovalStep(jsii.String("PromoteToProd")),
	},
})

func NewManualApprovalStep

func NewManualApprovalStep(id *string, props *ManualApprovalStepProps) ManualApprovalStep

type ManualApprovalStepProps

type ManualApprovalStepProps struct {
	// The comment to display with this manual approval.
	// Default: - No comment.
	//
	Comment *string `field:"optional" json:"comment" yaml:"comment"`
}

Construction properties for a `ManualApprovalStep`.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

manualApprovalStepProps := &ManualApprovalStepProps{
	Comment: jsii.String("comment"),
}

type PermissionsBroadeningCheckProps

type PermissionsBroadeningCheckProps struct {
	// The CDK Stage object to check the stacks of.
	//
	// This should be the same Stage object you are passing to `addStage()`.
	Stage awscdk.Stage `field:"required" json:"stage" yaml:"stage"`
	// Topic to send notifications when a human needs to give manual confirmation.
	// Default: - no notification.
	//
	NotificationTopic awssns.ITopic `field:"optional" json:"notificationTopic" yaml:"notificationTopic"`
}

Properties for a `PermissionsBroadeningCheck`.

Example:

var pipeline codePipeline

stage := NewMyApplicationStage(this, jsii.String("MyApplication"))
pipeline.AddStage(stage, &AddStageOpts{
	Pre: []step{
		pipelines.NewConfirmPermissionsBroadening(jsii.String("Check"), &PermissionsBroadeningCheckProps{
			Stage: *Stage,
		}),
	},
})

type PipelineBase

type PipelineBase interface {
	constructs.Construct
	// The FileSet tha contains the cloud assembly.
	//
	// This is the primary output of the synth step.
	CloudAssemblyFileSet() FileSet
	// The tree node.
	Node() constructs.Node
	// The build step that produces the CDK Cloud Assembly.
	Synth() IFileSetProducer
	// The waves in this pipeline.
	Waves() *[]Wave
	// Deploy a single Stage by itself.
	//
	// Add a Stage to the pipeline, to be deployed in sequence with other
	// Stages added to the pipeline. All Stacks in the stage will be deployed
	// in an order automatically determined by their relative dependencies.
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
	// Add a Wave to the pipeline, for deploying multiple Stages in parallel.
	//
	// Use the return object of this method to deploy multiple stages in parallel.
	//
	// Example:
	//
	// “`ts
	// declare const pipeline: pipelines.CodePipeline;
	//
	// const wave = pipeline.addWave('MyWave');
	// wave.addStage(new MyApplicationStage(this, 'Stage1'));
	// wave.addStage(new MyApplicationStage(this, 'Stage2'));
	// “`.
	AddWave(id *string, options *WaveOptions) Wave
	// Send the current pipeline definition to the engine, and construct the pipeline.
	//
	// It is not possible to modify the pipeline after calling this method.
	BuildPipeline()
	// Implemented by subclasses to do the actual pipeline construction.
	DoBuildPipeline()
	// Returns a string representation of this construct.
	ToString() *string
}

A generic CDK Pipelines pipeline.

Different deployment systems will provide subclasses of `Pipeline` that generate the deployment infrastructure necessary to deploy CDK apps, specific to that system.

This library comes with the `CodePipeline` class, which uses AWS CodePipeline to deploy CDK apps.

The actual pipeline infrastructure is constructed (by invoking the engine) when `buildPipeline()` is called, or when `app.synth()` is called (whichever happens first).

type PipelineBaseProps

type PipelineBaseProps struct {
	// The build step that produces the CDK Cloud Assembly.
	//
	// The primary output of this step needs to be the `cdk.out` directory
	// generated by the `cdk synth` command.
	//
	// If you use a `ShellStep` here and you don't configure an output directory,
	// the output directory will automatically be assumed to be `cdk.out`.
	Synth IFileSetProducer `field:"required" json:"synth" yaml:"synth"`
}

Properties for a `Pipeline`.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

var fileSetProducer iFileSetProducer

pipelineBaseProps := &PipelineBaseProps{
	Synth: fileSetProducer,
}

type ProduceActionOptions

type ProduceActionOptions struct {
	// Name the action should get.
	ActionName *string `field:"required" json:"actionName" yaml:"actionName"`
	// Helper object to translate FileSets to CodePipeline Artifacts.
	Artifacts ArtifactMap `field:"required" json:"artifacts" yaml:"artifacts"`
	// The pipeline the action is being generated for.
	Pipeline CodePipeline `field:"required" json:"pipeline" yaml:"pipeline"`
	// RunOrder the action should get.
	RunOrder *float64 `field:"required" json:"runOrder" yaml:"runOrder"`
	// Scope in which to create constructs.
	Scope constructs.Construct `field:"required" json:"scope" yaml:"scope"`
	// Helper object to produce variables exported from stack deployments.
	//
	// If your step references outputs from a stack deployment, use
	// this to map the output references to Codepipeline variable names.
	//
	// Note - Codepipeline variables can only be referenced in action
	// configurations.
	StackOutputsMap StackOutputsMap `field:"required" json:"stackOutputsMap" yaml:"stackOutputsMap"`
	// Whether or not this action is inserted before self mutation.
	//
	// If it is, the action should take care to reflect some part of
	// its own definition in the pipeline action definition, to
	// trigger a restart after self-mutation (if necessary).
	// Default: false.
	//
	BeforeSelfMutation *bool `field:"optional" json:"beforeSelfMutation" yaml:"beforeSelfMutation"`
	// If this action factory creates a CodeBuild step, default options to inherit.
	// Default: - No CodeBuild project defaults.
	//
	CodeBuildDefaults *CodeBuildOptions `field:"optional" json:"codeBuildDefaults" yaml:"codeBuildDefaults"`
	// An input artifact that CodeBuild projects that don't actually need an input artifact can use.
	//
	// CodeBuild Projects MUST have an input artifact in order to be added to the Pipeline. If
	// the Project doesn't actually care about its input (it can be anything), it can use the
	// Artifact passed here.
	// Default: - A fallback artifact does not exist.
	//
	FallbackArtifact awscodepipeline.Artifact `field:"optional" json:"fallbackArtifact" yaml:"fallbackArtifact"`
	// If this step is producing outputs, the variables namespace assigned to it.
	//
	// Pass this on to the Action you are creating.
	// Default: - Step doesn't produce any outputs.
	//
	VariablesNamespace *string `field:"optional" json:"variablesNamespace" yaml:"variablesNamespace"`
}

Options for the `CodePipelineActionFactory.produce()` method.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import cdk "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import constructs "github.com/aws/constructs-go/constructs"

var artifact artifact
var artifactMap artifactMap
var bucket bucket
var buildImage iBuildImage
var buildSpec buildSpec
var cache cache
var codePipeline codePipeline
var construct construct
var fileSystemLocation iFileSystemLocation
var logGroup logGroup
var policyStatement policyStatement
var securityGroup securityGroup
var stackOutputsMap stackOutputsMap
var subnet subnet
var subnetFilter subnetFilter
var value interface{}
var vpc vpc

produceActionOptions := &ProduceActionOptions{
	ActionName: jsii.String("actionName"),
	Artifacts: artifactMap,
	Pipeline: codePipeline,
	RunOrder: jsii.Number(123),
	Scope: construct,
	StackOutputsMap: stackOutputsMap,

	// the properties below are optional
	BeforeSelfMutation: jsii.Boolean(false),
	CodeBuildDefaults: &CodeBuildOptions{
		BuildEnvironment: &BuildEnvironment{
			BuildImage: buildImage,
			Certificate: &BuildEnvironmentCertificate{
				Bucket: bucket,
				ObjectKey: jsii.String("objectKey"),
			},
			ComputeType: awscdk.Aws_codebuild.ComputeType_SMALL,
			EnvironmentVariables: map[string]buildEnvironmentVariable{
				"environmentVariablesKey": &buildEnvironmentVariable{
					"value": value,

					// the properties below are optional
					"type": awscdk.*Aws_codebuild.BuildEnvironmentVariableType_PLAINTEXT,
				},
			},
			Privileged: jsii.Boolean(false),
		},
		Cache: cache,
		FileSystemLocations: []*iFileSystemLocation{
			fileSystemLocation,
		},
		Logging: &LoggingOptions{
			CloudWatch: &CloudWatchLoggingOptions{
				Enabled: jsii.Boolean(false),
				LogGroup: logGroup,
				Prefix: jsii.String("prefix"),
			},
			S3: &S3LoggingOptions{
				Bucket: bucket,

				// the properties below are optional
				Enabled: jsii.Boolean(false),
				Encrypted: jsii.Boolean(false),
				Prefix: jsii.String("prefix"),
			},
		},
		PartialBuildSpec: buildSpec,
		RolePolicy: []*policyStatement{
			policyStatement,
		},
		SecurityGroups: []iSecurityGroup{
			securityGroup,
		},
		SubnetSelection: &SubnetSelection{
			AvailabilityZones: []*string{
				jsii.String("availabilityZones"),
			},
			OnePerAz: jsii.Boolean(false),
			SubnetFilters: []*subnetFilter{
				subnetFilter,
			},
			SubnetGroupName: jsii.String("subnetGroupName"),
			Subnets: []iSubnet{
				subnet,
			},
			SubnetType: awscdk.Aws_ec2.SubnetType_PRIVATE_ISOLATED,
		},
		Timeout: cdk.Duration_Minutes(jsii.Number(30)),
		Vpc: vpc,
	},
	FallbackArtifact: artifact,
	VariablesNamespace: jsii.String("variablesNamespace"),
}

type S3SourceOptions

type S3SourceOptions struct {
	// The action name used for this source in the CodePipeline.
	// Default: - The bucket name.
	//
	ActionName *string `field:"optional" json:"actionName" yaml:"actionName"`
	// The role that will be assumed by the pipeline prior to executing the `S3Source` action.
	// Default: - a new role will be generated.
	//
	Role awsiam.IRole `field:"optional" json:"role" yaml:"role"`
	// How should CodePipeline detect source changes for this Action.
	//
	// Note that if this is S3Trigger.EVENTS, you need to make sure to include the source Bucket in a CloudTrail Trail,
	// as otherwise the CloudWatch Events will not be emitted.
	// See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/log-s3-data-events.html
	//
	// Default: S3Trigger.POLL
	//
	Trigger awscodepipelineactions.S3Trigger `field:"optional" json:"trigger" yaml:"trigger"`
}

Options for S3 sources.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var role role

s3SourceOptions := &S3SourceOptions{
	ActionName: jsii.String("actionName"),
	Role: role,
	Trigger: awscdk.Aws_codepipeline_actions.S3Trigger_NONE,
}

type ShellStep

type ShellStep interface {
	Step
	// Commands to run.
	Commands() *[]*string
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Environment variables to set.
	// Default: - No environment variables.
	//
	Env() *map[string]*string
	// Set environment variables based on Stack Outputs.
	// Default: - No environment variables created from stack outputs.
	//
	EnvFromCfnOutputs() *map[string]StackOutputReference
	// Identifier for this step.
	Id() *string
	// Input FileSets.
	//
	// A list of `(FileSet, directory)` pairs, which are a copy of the
	// input properties. This list should not be modified directly.
	Inputs() *[]*FileSetLocation
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	// Default: - No installation commands.
	//
	InstallCommands() *[]*string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// Output FileSets.
	//
	// A list of `(FileSet, directory)` pairs, which are a copy of the
	// input properties. This list should not be modified directly.
	Outputs() *[]*FileSetLocation
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add an additional output FileSet based on a directory.
	//
	// After running the script, the contents of the given directory
	// will be exported as a `FileSet`. Use the `FileSet` as the
	// input to another step.
	//
	// Multiple calls with the exact same directory name string (not normalized)
	// will return the same FileSet.
	AddOutputDirectory(directory *string) FileSet
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	// Configure the given output directory as primary output.
	//
	// If no primary output has been configured yet, this directory
	// will become the primary output of this ShellStep, otherwise this
	// method will throw if the given directory is different than the
	// currently configured primary output directory.
	PrimaryOutputDirectory(directory *string) FileSet
	// Return a string representation of this Step.
	ToString() *string
}

Run shell script commands in the pipeline.

This is a generic step designed to be deployment engine agnostic.

Example:

// Modern API
modernPipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	SelfMutation: jsii.Boolean(false),
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

// Original API
cloudAssemblyArtifact := codepipeline.NewArtifact()
originalPipeline := pipelines.NewCdkPipeline(this, jsii.String("Pipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(false),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})

func NewShellStep

func NewShellStep(id *string, props *ShellStepProps) ShellStep

type ShellStepProps

type ShellStepProps struct {
	// Commands to run.
	Commands *[]*string `field:"required" json:"commands" yaml:"commands"`
	// Additional FileSets to put in other directories.
	//
	// Specifies a mapping from directory name to FileSets. During the
	// script execution, the FileSets will be available in the directories
	// indicated.
	//
	// The directory names may be relative. For example, you can put
	// the main input and an additional input side-by-side with the
	// following configuration:
	//
	// “`ts
	// const script = new pipelines.ShellStep('MainScript', {
	//   commands: ['npm ci','npm run build','npx cdk synth'],
	//   input: pipelines.CodePipelineSource.gitHub('org/source1', 'main'),
	//   additionalInputs: {
	//     '../siblingdir': pipelines.CodePipelineSource.gitHub('org/source2', 'main'),
	//   }
	// });
	// “`.
	// Default: - No additional inputs.
	//
	AdditionalInputs *map[string]IFileSetProducer `field:"optional" json:"additionalInputs" yaml:"additionalInputs"`
	// Environment variables to set.
	// Default: - No environment variables.
	//
	Env *map[string]*string `field:"optional" json:"env" yaml:"env"`
	// Set environment variables based on Stack Outputs.
	//
	// `ShellStep`s following stack or stage deployments may
	// access the `CfnOutput`s of those stacks to get access to
	// --for example--automatically generated resource names or
	// endpoint URLs.
	// Default: - No environment variables created from stack outputs.
	//
	EnvFromCfnOutputs *map[string]awscdk.CfnOutput `field:"optional" json:"envFromCfnOutputs" yaml:"envFromCfnOutputs"`
	// FileSet to run these scripts on.
	//
	// The files in the FileSet will be placed in the working directory when
	// the script is executed. Use `additionalInputs` to download file sets
	// to other directories as well.
	// Default: - No input specified.
	//
	Input IFileSetProducer `field:"optional" json:"input" yaml:"input"`
	// Installation commands to run before the regular commands.
	//
	// For deployment engines that support it, install commands will be classified
	// differently in the job history from the regular `commands`.
	// Default: - No installation commands.
	//
	InstallCommands *[]*string `field:"optional" json:"installCommands" yaml:"installCommands"`
	// The directory that will contain the primary output fileset.
	//
	// After running the script, the contents of the given directory
	// will be treated as the primary output of this Step.
	// Default: - No primary output.
	//
	PrimaryOutputDirectory *string `field:"optional" json:"primaryOutputDirectory" yaml:"primaryOutputDirectory"`
}

Construction properties for a `ShellStep`.

Example:

// Modern API
modernPipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	SelfMutation: jsii.Boolean(false),
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),
})

// Original API
cloudAssemblyArtifact := codepipeline.NewArtifact()
originalPipeline := pipelines.NewCdkPipeline(this, jsii.String("Pipeline"), &cdkPipelineProps{
	selfMutating: jsii.Boolean(false),
	cloudAssemblyArtifact: cloudAssemblyArtifact,
})

type StackAsset

type StackAsset struct {
	// Asset identifier.
	AssetId *string `field:"required" json:"assetId" yaml:"assetId"`
	// Absolute asset manifest path.
	//
	// This needs to be made relative at a later point in time, but when this
	// information is parsed we don't know about the root cloud assembly yet.
	AssetManifestPath *string `field:"required" json:"assetManifestPath" yaml:"assetManifestPath"`
	// Asset selector to pass to `cdk-assets`.
	AssetSelector *string `field:"required" json:"assetSelector" yaml:"assetSelector"`
	// Type of asset to publish.
	AssetType AssetType `field:"required" json:"assetType" yaml:"assetType"`
	// Does this asset represent the CloudFormation template for the stack.
	// Default: false.
	//
	IsTemplate *bool `field:"required" json:"isTemplate" yaml:"isTemplate"`
	// Role ARN to assume to publish.
	// Default: - No need to assume any role.
	//
	AssetPublishingRoleArn *string `field:"optional" json:"assetPublishingRoleArn" yaml:"assetPublishingRoleArn"`
}

An asset used by a Stack.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

stackAsset := &StackAsset{
	AssetId: jsii.String("assetId"),
	AssetManifestPath: jsii.String("assetManifestPath"),
	AssetSelector: jsii.String("assetSelector"),
	AssetType: awscdk.Pipelines.AssetType_FILE,
	IsTemplate: jsii.Boolean(false),

	// the properties below are optional
	AssetPublishingRoleArn: jsii.String("assetPublishingRoleArn"),
}

type StackDeployment

type StackDeployment interface {
	// Template path on disk to CloudAssembly.
	AbsoluteTemplatePath() *string
	// Account where the stack should be deployed.
	// Default: - Pipeline account.
	//
	Account() *string
	// Assets referenced by this stack.
	Assets() *[]*StackAsset
	// Role to assume before deploying this stack.
	// Default: - Don't assume any role.
	//
	AssumeRoleArn() *string
	// Steps that take place after stack is prepared but before stack deploys.
	//
	// Your pipeline engine may not disable `prepareStep`.
	ChangeSet() *[]Step
	// Construct path for this stack.
	ConstructPath() *string
	// Execution role to pass to CloudFormation.
	// Default: - No execution role.
	//
	ExecutionRoleArn() *string
	// Steps to execute after stack deploys.
	Post() *[]Step
	// Steps that take place before stack is prepared.
	//
	// If your pipeline engine disables 'prepareStep', then this will happen before stack deploys.
	Pre() *[]Step
	// Region where the stack should be deployed.
	// Default: - Pipeline region.
	//
	Region() *string
	// Artifact ID for this stack.
	StackArtifactId() *string
	// Other stacks this stack depends on.
	StackDependencies() *[]StackDeployment
	// Name for this stack.
	StackName() *string
	// Tags to apply to the stack.
	Tags() *map[string]*string
	// The asset that represents the CloudFormation template for this stack.
	TemplateAsset() *StackAsset
	// The S3 URL which points to the template asset location in the publishing bucket.
	//
	// This is `undefined` if the stack template is not published. Use the
	// `DefaultStackSynthesizer` to ensure it is.
	//
	// Example value: `https://bucket.s3.amazonaws.com/object/key`
	TemplateUrl() *string
	// Add a dependency on another stack.
	AddStackDependency(stackDeployment StackDeployment)
	// Adds steps to each phase of the stack.
	AddStackSteps(pre *[]Step, changeSet *[]Step, post *[]Step)
}

Deployment of a single Stack.

You don't need to instantiate this class -- it will be automatically instantiated as necessary when you add a `Stage` to a pipeline.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var cloudFormationStackArtifact cloudFormationStackArtifact

stackDeployment := awscdk.Pipelines.StackDeployment_FromArtifact(cloudFormationStackArtifact)

func StackDeployment_FromArtifact

func StackDeployment_FromArtifact(stackArtifact cxapi.CloudFormationStackArtifact) StackDeployment

Build a `StackDeployment` from a Stack Artifact in a Cloud Assembly.

type StackDeploymentProps

type StackDeploymentProps struct {
	// Template path on disk to cloud assembly (cdk.out).
	AbsoluteTemplatePath *string `field:"required" json:"absoluteTemplatePath" yaml:"absoluteTemplatePath"`
	// Construct path for this stack.
	ConstructPath *string `field:"required" json:"constructPath" yaml:"constructPath"`
	// Artifact ID for this stack.
	StackArtifactId *string `field:"required" json:"stackArtifactId" yaml:"stackArtifactId"`
	// Name for this stack.
	StackName *string `field:"required" json:"stackName" yaml:"stackName"`
	// Account where the stack should be deployed.
	// Default: - Pipeline account.
	//
	Account *string `field:"optional" json:"account" yaml:"account"`
	// Assets referenced by this stack.
	// Default: - No assets.
	//
	Assets *[]*StackAsset `field:"optional" json:"assets" yaml:"assets"`
	// Role to assume before deploying this stack.
	// Default: - Don't assume any role.
	//
	AssumeRoleArn *string `field:"optional" json:"assumeRoleArn" yaml:"assumeRoleArn"`
	// Execution role to pass to CloudFormation.
	// Default: - No execution role.
	//
	ExecutionRoleArn *string `field:"optional" json:"executionRoleArn" yaml:"executionRoleArn"`
	// Region where the stack should be deployed.
	// Default: - Pipeline region.
	//
	Region *string `field:"optional" json:"region" yaml:"region"`
	// Tags to apply to the stack.
	// Default: - No tags.
	//
	Tags *map[string]*string `field:"optional" json:"tags" yaml:"tags"`
	// The S3 URL which points to the template asset location in the publishing bucket.
	// Default: - Stack template is not published.
	//
	TemplateS3Uri *string `field:"optional" json:"templateS3Uri" yaml:"templateS3Uri"`
}

Properties for a `StackDeployment`.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

stackDeploymentProps := &StackDeploymentProps{
	AbsoluteTemplatePath: jsii.String("absoluteTemplatePath"),
	ConstructPath: jsii.String("constructPath"),
	StackArtifactId: jsii.String("stackArtifactId"),
	StackName: jsii.String("stackName"),

	// the properties below are optional
	Account: jsii.String("account"),
	Assets: []stackAsset{
		&stackAsset{
			AssetId: jsii.String("assetId"),
			AssetManifestPath: jsii.String("assetManifestPath"),
			AssetSelector: jsii.String("assetSelector"),
			AssetType: awscdk.Pipelines.AssetType_FILE,
			IsTemplate: jsii.Boolean(false),

			// the properties below are optional
			AssetPublishingRoleArn: jsii.String("assetPublishingRoleArn"),
		},
	},
	AssumeRoleArn: jsii.String("assumeRoleArn"),
	ExecutionRoleArn: jsii.String("executionRoleArn"),
	Region: jsii.String("region"),
	Tags: map[string]*string{
		"tagsKey": jsii.String("tags"),
	},
	TemplateS3Uri: jsii.String("templateS3Uri"),
}

type StackOutputReference

type StackOutputReference interface {
	// Output name of the producing stack.
	OutputName() *string
	// A human-readable description of the producing stack.
	StackDescription() *string
	// Whether or not this stack output is being produced by the given Stack deployment.
	IsProducedBy(stack StackDeployment) *bool
}

A Reference to a Stack Output.

Example:

type myLambdaStep struct {
	step
	stackOutputReference stackOutputReference
}

func newMyLambdaStep(fn function, stackOutput cfnOutput) *myLambdaStep {
	this := &myLambdaStep{}
	pipelines.NewStep_Override(this, jsii.String("MyLambdaStep"))
	this.stackOutputReference = pipelines.stackOutputReference_FromCfnOutput(stackOutput)
	return this
}

func (this *myLambdaStep) produceAction(stage iStage, options produceActionOptions) codePipelineActionFactoryResult {

	*stage.AddAction(cpactions.NewLambdaInvokeAction(&LambdaInvokeActionProps{
		ActionName: options.ActionName,
		RunOrder: options.RunOrder,
		// Map the reference to the variable name the CDK has generated for you.
		UserParameters: map[string]interface{}{
			"stackOutput": options.stackOutputsMap.toCodePipeline(this.stackOutputReference),
		},
		Lambda: this.fn,
	}))

	return &codePipelineActionFactoryResult{
		RunOrdersConsumed: jsii.Number(1),
	}
}public get consumedStackOutputs(): pipelines.StackOutputReference[] {
    return [this.stackOutputReference];
  }

func StackOutputReference_FromCfnOutput

func StackOutputReference_FromCfnOutput(output awscdk.CfnOutput) StackOutputReference

Create a StackOutputReference that references the given CfnOutput.

type StackOutputsMap added in v2.60.0

type StackOutputsMap interface {
	// Return the matching variable reference string for a StackOutputReference.
	ToCodePipeline(x StackOutputReference) *string
}

Translate stack outputs to Codepipline variable references.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

var pipelineBase pipelineBase

stackOutputsMap := awscdk.Pipelines.NewStackOutputsMap(pipelineBase)

func NewStackOutputsMap added in v2.60.0

func NewStackOutputsMap(pipeline PipelineBase) StackOutputsMap

type StackSteps

type StackSteps struct {
	// The stack you want the steps to run in.
	Stack awscdk.Stack `field:"required" json:"stack" yaml:"stack"`
	// Steps that execute after stack is prepared but before stack is deployed.
	// Default: - no additional steps.
	//
	ChangeSet *[]Step `field:"optional" json:"changeSet" yaml:"changeSet"`
	// Steps that execute after stack is deployed.
	// Default: - no additional steps.
	//
	Post *[]Step `field:"optional" json:"post" yaml:"post"`
	// Steps that execute before stack is prepared.
	// Default: - no additional steps.
	//
	Pre *[]Step `field:"optional" json:"pre" yaml:"pre"`
}

Instructions for additional steps that are run at stack level.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import cdk "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var stack stack
var step step

stackSteps := &StackSteps{
	Stack: stack,

	// the properties below are optional
	ChangeSet: []*step{
		step,
	},
	Post: []*step{
		step,
	},
	Pre: []*step{
		step,
	},
}

type StageDeployment

type StageDeployment interface {
	// Additional steps that are run after all of the stacks in the stage.
	Post() *[]Step
	// Additional steps that are run before any of the stacks in the stage.
	Pre() *[]Step
	// Determine if all stacks in stage should be deployed with prepare step or not.
	PrepareStep() *bool
	// The stacks deployed in this stage.
	Stacks() *[]StackDeployment
	// Instructions for additional steps that are run at stack level.
	StackSteps() *[]*StackSteps
	// The display name of this stage.
	StageName() *string
	// Add an additional step to run after all of the stacks in this stage.
	AddPost(steps ...Step)
	// Add an additional step to run before any of the stacks in this stage.
	AddPre(steps ...Step)
}

Deployment of a single `Stage`.

A `Stage` consists of one or more `Stacks`, which will be deployed in dependency order.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import cdk "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var stack stack
var stage stage
var step step

stageDeployment := awscdk.Pipelines.StageDeployment_FromStage(stage, &StageDeploymentProps{
	Post: []*step{
		step,
	},
	Pre: []*step{
		step,
	},
	StackSteps: []stackSteps{
		&stackSteps{
			Stack: stack,

			// the properties below are optional
			ChangeSet: []*step{
				step,
			},
			Post: []*step{
				step,
			},
			Pre: []*step{
				step,
			},
		},
	},
	StageName: jsii.String("stageName"),
})

func StageDeployment_FromStage

func StageDeployment_FromStage(stage awscdk.Stage, props *StageDeploymentProps) StageDeployment

Create a new `StageDeployment` from a `Stage`.

Synthesizes the target stage, and deployes the stacks found inside in dependency order.

type StageDeploymentProps

type StageDeploymentProps struct {
	// Additional steps to run after all of the stacks in the stage.
	// Default: - No additional steps.
	//
	Post *[]Step `field:"optional" json:"post" yaml:"post"`
	// Additional steps to run before any of the stacks in the stage.
	// Default: - No additional steps.
	//
	Pre *[]Step `field:"optional" json:"pre" yaml:"pre"`
	// Instructions for additional steps that are run at the stack level.
	// Default: - No additional instructions.
	//
	StackSteps *[]*StackSteps `field:"optional" json:"stackSteps" yaml:"stackSteps"`
	// Stage name to use in the pipeline.
	// Default: - Use Stage's construct ID.
	//
	StageName *string `field:"optional" json:"stageName" yaml:"stageName"`
}

Properties for a `StageDeployment`.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import cdk "github.com/aws/aws-cdk-go/awscdk"
import "github.com/aws/aws-cdk-go/awscdk"

var stack stack
var step step

stageDeploymentProps := &StageDeploymentProps{
	Post: []*step{
		step,
	},
	Pre: []*step{
		step,
	},
	StackSteps: []stackSteps{
		&stackSteps{
			Stack: stack,

			// the properties below are optional
			ChangeSet: []*step{
				step,
			},
			Post: []*step{
				step,
			},
			Pre: []*step{
				step,
			},
		},
	},
	StageName: jsii.String("stageName"),
}

type Step

type Step interface {
	IFileSetProducer
	// StackOutputReferences this step consumes.
	ConsumedStackOutputs() *[]StackOutputReference
	// Return the steps this step depends on, based on the FileSets it requires.
	Dependencies() *[]Step
	// The list of FileSets consumed by this Step.
	DependencyFileSets() *[]FileSet
	// Identifier for this step.
	Id() *string
	// Whether or not this is a Source step.
	//
	// What it means to be a Source step depends on the engine.
	IsSource() *bool
	// The primary FileSet produced by this Step.
	//
	// Not all steps produce an output FileSet--if they do
	// you can substitute the `Step` object for the `FileSet` object.
	PrimaryOutput() FileSet
	// Add an additional FileSet to the set of file sets required by this step.
	//
	// This will lead to a dependency on the producer of that file set.
	AddDependencyFileSet(fs FileSet)
	// Add a dependency on another step.
	AddStepDependency(step Step)
	// Configure the given FileSet as the primary output of this step.
	ConfigurePrimaryOutput(fs FileSet)
	// Crawl the given structure for references to StepOutputs and add dependencies on all steps found.
	//
	// Should be called in the constructor of subclasses based on what the user
	// passes in as construction properties. The format of the structure passed in
	// here does not have to correspond exactly to what gets rendered into the
	// engine, it just needs to contain the same data.
	DiscoverReferencedOutputs(structure interface{})
	// Return a string representation of this Step.
	ToString() *string
}

A generic Step which can be added to a Pipeline.

Steps can be used to add Sources, Build Actions and Validations to your pipeline.

This class is abstract. See specific subclasses of Step for useful steps to add to your Pipeline.

Example:

type myJenkinsStep struct {
	step
}

func newMyJenkinsStep(provider jenkinsProvider, input fileSet) *myJenkinsStep {
	this := &myJenkinsStep{}
	pipelines.NewStep_Override(this, jsii.String("MyJenkinsStep"))

	// This is necessary if your step accepts parameters, like environment variables,
	// that may contain outputs from other steps. It doesn't matter what the
	// structure is, as long as it contains the values that may contain outputs.
	this.DiscoverReferencedOutputs(map[string]map[string]interface{}{
		"env": map[string]interface{}{
		},
	})
	return this
}

func (this *myJenkinsStep) produceAction(stage iStage, options produceActionOptions) codePipelineActionFactoryResult {

	// This is where you control what type of Action gets added to the
	// CodePipeline
	*stage.AddAction(cpactions.NewJenkinsAction(&JenkinsActionProps{
		// Copy 'actionName' and 'runOrder' from the options
		ActionName: options.ActionName,
		RunOrder: options.RunOrder,

		// Jenkins-specific configuration
		Type: cpactions.JenkinsActionType_TEST,
		JenkinsProvider: this.provider,
		ProjectName: jsii.String("MyJenkinsProject"),

		// Translate the FileSet into a codepipeline.Artifact
		Inputs: []artifact{
			options.Artifacts.ToCodePipeline(this.input),
		},
	}))

	return &codePipelineActionFactoryResult{
		RunOrdersConsumed: jsii.Number(1),
	}
}

type Wave

type Wave interface {
	// Identifier for this Wave.
	Id() *string
	// Additional steps that are run after all of the stages in the wave.
	Post() *[]Step
	// Additional steps that are run before any of the stages in the wave.
	Pre() *[]Step
	// The stages that are deployed in this wave.
	Stages() *[]StageDeployment
	// Add an additional step to run after all of the stages in this wave.
	AddPost(steps ...Step)
	// Add an additional step to run before any of the stages in this wave.
	AddPre(steps ...Step)
	// Add a Stage to this wave.
	//
	// It will be deployed in parallel with all other stages in this
	// wave.
	AddStage(stage awscdk.Stage, options *AddStageOpts) StageDeployment
}

Multiple stages that are deployed in parallel.

Example:

var pipeline codePipeline

europeWave := pipeline.AddWave(jsii.String("Europe"))
europeWave.AddStage(NewMyApplicationStage(this, jsii.String("Ireland"), &stageProps{
	Env: &Environment{
		Region: jsii.String("eu-west-1"),
	},
}))
europeWave.AddStage(NewMyApplicationStage(this, jsii.String("Germany"), &stageProps{
	Env: &Environment{
		Region: jsii.String("eu-central-1"),
	},
}))

func NewWave

func NewWave(id *string, props *WaveProps) Wave

type WaveOptions

type WaveOptions struct {
	// Additional steps to run after all of the stages in the wave.
	// Default: - No additional steps.
	//
	Post *[]Step `field:"optional" json:"post" yaml:"post"`
	// Additional steps to run before any of the stages in the wave.
	// Default: - No additional steps.
	//
	Pre *[]Step `field:"optional" json:"pre" yaml:"pre"`
}

Options to pass to `addWave`.

Example:

pipeline := pipelines.NewCodePipeline(this, jsii.String("Pipeline"), &CodePipelineProps{
	Synth: pipelines.NewShellStep(jsii.String("Synth"), &ShellStepProps{
		Input: pipelines.CodePipelineSource_Connection(jsii.String("my-org/my-app"), jsii.String("main"), &ConnectionSourceOptions{
			ConnectionArn: jsii.String("arn:aws:codestar-connections:us-east-1:222222222222:connection/7d2469ff-514a-4e4f-9003-5ca4a43cdc41"),
		}),
		Commands: []*string{
			jsii.String("npm ci"),
			jsii.String("npm run build"),
			jsii.String("npx cdk synth"),
		},
	}),

	// Turn this on because the pipeline uses Docker image assets
	DockerEnabledForSelfMutation: jsii.Boolean(true),
})

pipeline.AddWave(jsii.String("MyWave"), &WaveOptions{
	Post: []step{
		pipelines.NewCodeBuildStep(jsii.String("RunApproval"), &CodeBuildStepProps{
			Commands: []*string{
				jsii.String("command-from-image"),
			},
			BuildEnvironment: &BuildEnvironment{
				// The user of a Docker image asset in the pipeline requires turning on
				// 'dockerEnabledForSelfMutation'.
				BuildImage: codebuild.LinuxBuildImage_FromAsset(this, jsii.String("Image"), &DockerImageAssetProps{
					Directory: jsii.String("./docker-image"),
				}),
			},
		}),
	},
})

type WaveProps

type WaveProps struct {
	// Additional steps to run after all of the stages in the wave.
	// Default: - No additional steps.
	//
	Post *[]Step `field:"optional" json:"post" yaml:"post"`
	// Additional steps to run before any of the stages in the wave.
	// Default: - No additional steps.
	//
	Pre *[]Step `field:"optional" json:"pre" yaml:"pre"`
}

Construction properties for a `Wave`.

Example:

// The code below shows an example of how to instantiate this type.
// The values are placeholders you should change.
import "github.com/aws/aws-cdk-go/awscdk"

var step step

waveProps := &WaveProps{
	Post: []*step{
		step,
	},
	Pre: []*step{
		step,
	},
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL