Skip to content

Commit

Permalink
fix(core): asset bundling skipped when using --exclusively with custo…
Browse files Browse the repository at this point in the history
…m stack name (#21248)

Fixes #19927

[A previous PR](#19950) attempted this fix, but it caused another issue where the default pattern (`['*']`, used on cdk synth/deploy when a pattern isn't supplied) doesn't match stacks under a stage (`stage/stack`), and that caused bundling to be skipped. The default pattern worked (matched all stacks) previously because stackName was being used (and there are no `/` in stack names). See [this comment](#19927 (comment)) in the issue for more details on this.

The first commit 5556b60 adds special handling to the default pattern so that it always returns true (bypassed minimatch which would return false if a stack is under a stage). The second commit a9a48a0 aims to remove the duplication of this pattern across the codebase, by defaulting `BUNDLING_STACKS` to `undefined` and returning true in bundlingRequired if that's the case.

----

### All Submissions:

* [ ] Have you followed the guidelines in our [Contributing guide?](https://github.com/aws/aws-cdk/blob/main/CONTRIBUTING.md)

### Adding new Unconventional Dependencies:

* [ ] This PR adds new unconventional dependencies following the process described [here](https://github.com/aws/aws-cdk/blob/main/CONTRIBUTING.md/#adding-new-unconventional-dependencies)

### New Features

* [ ] Have you added the new feature to an [integration test](https://github.com/aws/aws-cdk/blob/main/INTEGRATION_TESTS.md)?
	* [ ] Did you use `yarn integ` to deploy the infrastructure and generate the snapshot (i.e. `yarn integ` without `--dry-run`)?

*By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license*
  • Loading branch information
stevehodgkiss committed Oct 10, 2022
1 parent 456a2c7 commit 209ddea
Show file tree
Hide file tree
Showing 6 changed files with 162 additions and 14 deletions.
7 changes: 3 additions & 4 deletions packages/@aws-cdk/core/lib/stack.ts
Expand Up @@ -1184,12 +1184,11 @@ export class Stack extends Construct implements ITaggable {
* Indicates whether the stack requires bundling or not
*/
public get bundlingRequired() {
const bundlingStacks: string[] = this.node.tryGetContext(cxapi.BUNDLING_STACKS) ?? ['*'];
const bundlingStacks: string[] = this.node.tryGetContext(cxapi.BUNDLING_STACKS) ?? ['**'];

// bundlingStacks is of the form `Stage/Stack`, convert it to `Stage-Stack` before comparing to stack name
return bundlingStacks.some(pattern => minimatch(
this.stackName,
pattern.replace('/', '-'),
this.node.path, // use the same value for pattern matching as the aws-cdk CLI (displayName / hierarchicalId)
pattern,
));
}
}
Expand Down
151 changes: 150 additions & 1 deletion packages/@aws-cdk/core/test/staging.test.ts
Expand Up @@ -930,6 +930,155 @@ describe('staging', () => {
expect(dockerStubInput).not.toMatch(DockerStubCommand.MULTIPLE_FILES);
});

test('correctly skips bundling with stack under stage and custom stack name', () => {
// GIVEN
const app = new App();

const stage = new Stage(app, 'Stage');
stage.node.setContext(cxapi.BUNDLING_STACKS, ['Stage/Stack1']);

const stack1 = new Stack(stage, 'Stack1', { stackName: 'unrelated-stack1-name' });
const stack2 = new Stack(stage, 'Stack2', { stackName: 'unrelated-stack2-name' });
const directory = path.join(__dirname, 'fs', 'fixtures', 'test1');

// WHEN
new AssetStaging(stack1, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.SUCCESS],
},
});

new AssetStaging(stack2, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.MULTIPLE_FILES],
},
});

// THEN
const dockerStubInput = readDockerStubInputConcat();
// Docker ran for the asset in Stack1
expect(dockerStubInput).toMatch(DockerStubCommand.SUCCESS);
// Docker did not run for the asset in Stack2
expect(dockerStubInput).not.toMatch(DockerStubCommand.MULTIPLE_FILES);
});

test('correctly bundles with stack under stage and the default stack pattern', () => {
// GIVEN
const app = new App();

const stage = new Stage(app, 'Stage');

const stack1 = new Stack(stage, 'Stack1');
const stack2 = new Stack(stage, 'Stack2');
const directory = path.join(__dirname, 'fs', 'fixtures', 'test1');

// WHEN
new AssetStaging(stack1, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.SUCCESS],
},
});

new AssetStaging(stack2, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.MULTIPLE_FILES],
},
});

// THEN
const dockerStubInput = readDockerStubInputConcat();
// Docker ran for the asset in Stack1
expect(dockerStubInput).toMatch(DockerStubCommand.SUCCESS);
// Docker ran for the asset in Stack2
expect(dockerStubInput).toMatch(DockerStubCommand.MULTIPLE_FILES);
});

test('correctly bundles with stack under stage and partial globstar wildcard', () => {
// GIVEN
const app = new App();

const stage = new Stage(app, 'Stage');
stage.node.setContext(cxapi.BUNDLING_STACKS, ['**/Stack1']); // a single wildcard prefix ('*Stack1') won't match

const stack1 = new Stack(stage, 'Stack1');
const stack2 = new Stack(stage, 'Stack2');
const directory = path.join(__dirname, 'fs', 'fixtures', 'test1');

// WHEN
new AssetStaging(stack1, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.SUCCESS],
},
});

new AssetStaging(stack2, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.MULTIPLE_FILES],
},
});

// THEN
const dockerStubInput = readDockerStubInputConcat();
// Docker ran for the asset in Stack1
expect(dockerStubInput).toMatch(DockerStubCommand.SUCCESS);
// Docker did not run for the asset in Stack2
expect(dockerStubInput).not.toMatch(DockerStubCommand.MULTIPLE_FILES);
});

test('correctly bundles selected stacks nested in Stack/Stage/Stack', () => {
// GIVEN
const app = new App();

const topStack = new Stack(app, 'TopStack');
topStack.node.setContext(cxapi.BUNDLING_STACKS, ['TopStack/MiddleStage/BottomStack']);

const middleStage = new Stage(topStack, 'MiddleStage');
const bottomStack = new Stack(middleStage, 'BottomStack');
const directory = path.join(__dirname, 'fs', 'fixtures', 'test1');

// WHEN
new AssetStaging(bottomStack, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.SUCCESS],
},
});
new AssetStaging(topStack, 'Asset', {
sourcePath: directory,
assetHashType: AssetHashType.OUTPUT,
bundling: {
image: DockerImage.fromRegistry('alpine'),
command: [DockerStubCommand.MULTIPLE_FILES],
},
});

const dockerStubInput = readDockerStubInputConcat();
// Docker ran for the asset in BottomStack
expect(dockerStubInput).toMatch(DockerStubCommand.SUCCESS);
// Docker did not run for the asset in TopStack
expect(dockerStubInput).not.toMatch(DockerStubCommand.MULTIPLE_FILES);
});

test('bundling still occurs with partial wildcard', () => {
// GIVEN
const app = new App();
Expand All @@ -954,7 +1103,7 @@ describe('staging', () => {
expect(asset.assetHash).toEqual('33cbf2cae5432438e0f046bc45ba8c3cef7b6afcf47b59d1c183775c1918fb1f'); // hash of MyStack/Asset
});

test('bundling still occurs with full wildcard', () => {
test('bundling still occurs with a single wildcard', () => {
// GIVEN
const app = new App();
const stack = new Stack(app, 'MyStack');
Expand Down
2 changes: 1 addition & 1 deletion packages/aws-cdk/README.md
Expand Up @@ -288,7 +288,7 @@ are written to the same output file where each stack artifact ID is a key in the


```console
$ cdk deploy '*' --outputs-file "/Users/code/myproject/outputs.json"
$ cdk deploy '**' --outputs-file "/Users/code/myproject/outputs.json"
```

Example `outputs.json` after deployment of multiple stacks
Expand Down
2 changes: 1 addition & 1 deletion packages/aws-cdk/lib/api/cxapp/exec.ts
Expand Up @@ -44,7 +44,7 @@ export async function execProgram(aws: SdkProvider, config: Configuration): Prom
context[cxapi.DISABLE_ASSET_STAGING_CONTEXT] = true;
}

const bundlingStacks = config.settings.get(['bundlingStacks']) ?? ['*'];
const bundlingStacks = config.settings.get(['bundlingStacks']) ?? ['**'];
context[cxapi.BUNDLING_STACKS] = bundlingStacks;

debug('context:', context);
Expand Down
4 changes: 2 additions & 2 deletions packages/aws-cdk/lib/settings.ts
Expand Up @@ -256,8 +256,8 @@ export class Settings {
// If we deploy, diff, synth or watch a list of stacks exclusively we skip
// bundling for all other stacks.
bundlingStacks = argv.exclusively
? argv.STACKS ?? ['*']
: ['*'];
? argv.STACKS ?? ['**']
: ['**'];
} else { // Skip bundling for all stacks
bundlingStacks = [];
}
Expand Down
10 changes: 5 additions & 5 deletions packages/aws-cdk/test/settings.test.ts
Expand Up @@ -90,24 +90,24 @@ test('bundling stacks defaults to an empty list', () => {
expect(settings.get(['bundlingStacks'])).toEqual([]);
});

test('bundling stacks defaults to * for deploy', () => {
test('bundling stacks defaults to ** for deploy', () => {
// GIVEN
const settings = Settings.fromCommandLineArguments({
_: [Command.DEPLOY],
});

// THEN
expect(settings.get(['bundlingStacks'])).toEqual(['*']);
expect(settings.get(['bundlingStacks'])).toEqual(['**']);
});

test('bundling stacks defaults to * for watch', () => {
test('bundling stacks defaults to ** for watch', () => {
// GIVEN
const settings = Settings.fromCommandLineArguments({
_: [Command.WATCH],
});

// THEN
expect(settings.get(['bundlingStacks'])).toEqual(['*']);
expect(settings.get(['bundlingStacks'])).toEqual(['**']);
});

test('bundling stacks with deploy exclusively', () => {
Expand Down Expand Up @@ -154,4 +154,4 @@ test('providing a build arg', () => {

// THEN
expect(settings.get(['build'])).toEqual('mvn package');
});
});

0 comments on commit 209ddea

Please sign in to comment.