Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: templategen command e2e integration tests #13984

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .eslint-dictionary.json
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,7 @@
"syncable",
"tablename",
"tailwindcss",
"templategen",
"testother",
"testschemadeployer",
"textract",
Expand Down
1 change: 1 addition & 0 deletions packages/amplify-migration-e2e/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"@aws-sdk/client-appsync": "^3.666.0",
"@aws-sdk/client-cloudcontrol": "^3.658.1",
"@aws-sdk/client-cognito-identity": "^3.670.0",
"@aws-sdk/client-s3": "^3.674.0",
"execa": "^5.1.1",
"fs-extra": "^8.1.0",
"lodash": "^4.17.21"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ void describe('Codegen E2E tests', () => {
await copyFunctionFile(projRoot, 'function', gen1FunctionName);
await copyGen1Schema(projRoot, projName);
// TODO: replace below line with correct package version
await updatePackageDependency(projRoot, '@aws-amplify/backend', '0.0.0-test-20241003180022');
await updatePackageDependency(projRoot, '@aws-amplify/backend');
await npmInstall(projRoot);
const gen2StackName = await runGen2SandboxCommand(projRoot);
await assertAuthResource(projRoot, gen1UserPoolId, gen1ClientIds, gen1IdentityPoolId, gen1Region);
Expand All @@ -79,7 +79,7 @@ void describe('Codegen E2E tests', () => {
await copyFunctionFile(projRoot, 'auth', gen1FunctionName);
await removeErrorThrowsFromAuthResourceFile(projRoot);
// TODO: replace below line with correct package version
await updatePackageDependency(projRoot, '@aws-amplify/backend', '0.0.0-test-20241003180022');
await updatePackageDependency(projRoot, '@aws-amplify/backend');
await npmInstall(projRoot);
await toggleSandboxSecrets(projRoot, 'set');
const gen2StackName = await runGen2SandboxCommand(projRoot);
Expand All @@ -95,7 +95,7 @@ void describe('Codegen E2E tests', () => {
);
await runCodegenCommand(projRoot);
// TODO: replace below line with correct package version
await updatePackageDependency(projRoot, '@aws-amplify/backend', '0.0.0-test-20241003180022');
await updatePackageDependency(projRoot, '@aws-amplify/backend');
await npmInstall(projRoot);
await runGen2SandboxCommand(projRoot);
await assertAuthResource(projRoot, gen1UserPoolId, gen1ClientIds, gen1IdentityPoolId, gen1Region);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import path from 'node:path';
import assert from 'node:assert';
import { createNewProjectDir, npmInstall, deleteS3Bucket } from '@aws-amplify/amplify-e2e-core';
import { assertDefaultGen1Setup } from '../assertions';
import { setupAndPushDefaultGen1Project, runCodegenCommand, runGen2SandboxCommand, cleanupProjects } from '..';
import { copyFunctionFile } from '../function_utils';
import { copyGen1Schema } from '../api_utils';
import { updatePackageDependency } from '../updatePackageJson';
import { createS3Bucket } from '../sdk_calls';
import { runTemplategenCommand, stackRefactor } from '../templategen';

void describe('Templategen E2E tests', () => {
void describe('Full Migration Templategen Flow', () => {
let projRoot: string;
let projName: string;
let bucketName: string;

beforeEach(async () => {
const baseDir = process.env.INIT_CWD ?? process.cwd();
projRoot = await createNewProjectDir('templategen_e2e_flow_test', path.join(baseDir, '..', '..'));
projName = `test${Math.floor(Math.random() * 1000000)}`;
bucketName = `testbucket${Math.floor(Math.random() * 1000000)}`;
});

afterEach(async () => {
await cleanupProjects(projRoot);
await deleteS3Bucket(bucketName);
});

void it('should init a project & add auth, function, storage, api with defaults & perform full migration templategen flow', async () => {
await setupAndPushDefaultGen1Project(projRoot, projName);
const { gen1StackName, gen1FunctionName, gen1Region } = await assertDefaultGen1Setup(projRoot);
await createS3Bucket(bucketName, gen1Region);
assert(gen1StackName);
await runCodegenCommand(projRoot);
await copyFunctionFile(projRoot, 'function', gen1FunctionName);
await copyGen1Schema(projRoot, projName);
// TODO: replace below line with correct package version
await updatePackageDependency(projRoot, '@aws-amplify/backend');
await npmInstall(projRoot);
const gen2StackName = await runGen2SandboxCommand(projRoot);
assert(gen2StackName);
await runTemplategenCommand(projRoot, gen1StackName, gen2StackName);
await stackRefactor(projRoot, 'auth', bucketName);
await stackRefactor(projRoot, 'storage', bucketName);
});
});
});
12 changes: 11 additions & 1 deletion packages/amplify-migration-e2e/src/assertions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ async function assertUserPoolGroups(gen1Meta: $TSAny) {

export async function assertDefaultGen1Setup(projRoot: string) {
const gen1Meta = getProjectMeta(projRoot);
const gen1StackName = gen1Meta.providers.awscloudformation.StackName;
const gen1Region = gen1Meta.providers.awscloudformation.Region;
const { gen1UserPoolId } = await assertUserPool(gen1Meta, gen1Region);
const { gen1FunctionName } = await assertFunction(gen1Meta, gen1Region);
Expand All @@ -96,7 +97,16 @@ export async function assertDefaultGen1Setup(projRoot: string) {
const { gen1GraphqlApiId } = await assertAPI(gen1Meta, gen1Region);
const { gen1IdentityPoolId } = await assertIdentityPool(gen1Meta, gen1Region);
const { gen1ClientIds } = await assertUserPoolClients(gen1Meta, gen1Region);
return { gen1UserPoolId, gen1ClientIds, gen1IdentityPoolId, gen1FunctionName, gen1BucketName, gen1GraphqlApiId, gen1Region };
return {
gen1StackName,
gen1UserPoolId,
gen1ClientIds,
gen1IdentityPoolId,
gen1FunctionName,
gen1BucketName,
gen1GraphqlApiId,
gen1Region,
};
}

export async function assertAuthWithMaxOptionsGen1Setup(projRoot: string) {
Expand Down
1 change: 1 addition & 0 deletions packages/amplify-migration-e2e/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export * from './sdk_calls';
export * from './assertions';
export * from './projectOutputs';
export * from './updatePackageJson';
export * from './templategen';

const pushTimeoutMS = 1000 * 60 * 20; // 20 minutes;

Expand Down
12 changes: 12 additions & 0 deletions packages/amplify-migration-e2e/src/sdk_calls.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,19 @@
import { CloudControlClient, GetResourceCommand } from '@aws-sdk/client-cloudcontrol';
import { AppSyncClient, GetDataSourceCommand } from '@aws-sdk/client-appsync';
import { CognitoIdentityClient, DescribeIdentityPoolCommand } from '@aws-sdk/client-cognito-identity';
import { S3Client, CreateBucketCommand, BucketLocationConstraint } from '@aws-sdk/client-s3';

export async function createS3Bucket(bucketName: string, region: string) {
const client = new S3Client({ region });
const command = new CreateBucketCommand({
Bucket: bucketName,
CreateBucketConfiguration: {
LocationConstraint: region as BucketLocationConstraint,
},
});
const response = await client.send(command);
return response;
}
export async function getAppSyncDataSource(apiId: string, dataSourceName: string, region: string) {
const client = new AppSyncClient({ region });
const command = new GetDataSourceCommand({
Expand Down
180 changes: 180 additions & 0 deletions packages/amplify-migration-e2e/src/templategen.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import execa from 'execa';
import path from 'node:path';
import * as fs from 'fs-extra';
import { getNpxPath, retry, RetrySettings } from '@aws-amplify/amplify-e2e-core';
import { runGen2SandboxCommand } from '.';

export type EnvVariableAction = 'SET' | 'DELETE';
export type RefactorCategory = 'auth' | 'storage';

const RETRY_CONFIG: RetrySettings = {
times: 50,
delayMS: 1000, // 1 second
timeoutMS: 1000 * 60 * 5, // 5 minutes
stopOnError: true,
};

const STATUS_COMPLETE = 'COMPLETE';
const STATUS_IN_PROGRESS = 'IN_PROGRESS';
const STATUS_FAILED = 'FAILED';

export function runTemplategenCommand(cwd: string, gen1StackName: string, gen2StackName: string) {
const parentDir = path.resolve(cwd, '..');
const processResult = execa.sync(
getNpxPath(),
['--prefix', parentDir, '@aws-amplify/migrate', 'to-gen-2', 'generate-templates', '--from', gen1StackName, '--to', gen2StackName],
{
cwd,
env: { ...process.env, npm_config_user_agent: 'npm' },
encoding: 'utf-8',
},
);

if (processResult.exitCode !== 0) {
throw new Error(`Templategen command exit code: ${processResult.exitCode}, message: ${processResult.stderr}`);
}
}

function uncommentBucketNameLineFromBackendFile(projRoot: string) {
const backendFilePath = path.join(projRoot, 'amplify', 'backend.ts');
const backendFileContent = fs.readFileSync(backendFilePath, 'utf8');
const regex = /^\s*\/\/\s*(s3Bucket\.bucketName)/m;
const updatedBackendFileContent = backendFileContent.replace(regex, '$1');
fs.writeFileSync(backendFilePath, updatedBackendFileContent);
}

function toggleEnvVariable(name: string, option: EnvVariableAction, value?: string) {
if (option === 'SET') {
process.env[name] = value;
} else if (option === 'DELETE') {
delete process.env[name];
}
}

function extractContent(readmeContent: string, startRegex: string, endRegex: string) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice!

const pattern = new RegExp(`${startRegex}([\\s\\S]*?)${endRegex}`, 'i');
const match = readmeContent.match(pattern);

if (match && match[1]) {
return match[1].trim();
}
throw new Error('README file parsing failed to get the stack refactor commands');
}

function extractCommands(readmeContent: string) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice!

const pattern = /```([\s\S]*?)```/g;
const matches = readmeContent.matchAll(pattern);
const commands = [];

for (const match of matches) {
if (match[1]) {
commands.push(match[1].trim());
}
}
if (commands.length === 0) {
throw new Error('README file parsing failed to get the stack refactor commands');
}
return commands;
}

function getCommandsFromReadme(readmeContent: string) {

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the README parsing logic is difficult to follow here. Unless a code reviewer knows a logic beforehand, it is not trivial to know what command 1,2,3 are. Some options here:

  1. Black box : Parse the README and get the list of commands, execute them one by one and doesn't care what it is. the test only cares about the end state of the stacks (i.e stack in UPDATE_COMPLETE) and resources are moved/renamed to the right stacks. (Note: You can use stack state to know if update/refactor operation is completed or not, doesn't have to get refactor object or changeset)
  2. White-box: parse the README and assume knowledge on the migration logic, like command 1 : upload to s3, command 2: update stack, command 3: refactor, and so on.
const stackMigrationHandler = StackMigrationHandler.fromTemplateGenReadme(<readme path>)
stackMigrationHandler.uploadToS3()
stackMigrationHandler.updateStack()
stackMigrationHandler.refactorStacks()
......

I would suggest the black box testing for simplicity.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's a great idea. We can do both kinds of testing based on what we are testing. For the happy path, we can do black box testing since we only care about the end state. We already have getCommandsFromReadme, we can just execute them one by one.
For the rollback scenario, we need to extract Rollback specific commands. So that can be its own test.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good, will keep the logic for the getCommandsFromReadme and executing commands one by one logic as it is. Should the commands be stored in a variable or accessing them by indices should be okay for this approach?

Copy link
Contributor

@abhi7cr abhi7cr Oct 21, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah in the black box mode, we don't need the names. Iterating them one by one would suffice.

const step1Content = extractContent(readmeContent, '### STEP 1', '#### Rollback step');
const step2Content = extractContent(readmeContent, '### STEP 2', '#### Rollback step');
const step3Content = extractContent(readmeContent, '### STEP 3', '#### Rollback step');
const step1Commands = extractCommands(step1Content);
const step2commands = extractCommands(step2Content);
const step3Commands = extractCommands(step3Content);
return { step1Commands, step2commands, step3Commands };
}

async function executeCommand(command: string, cwd?: string) {
cwd = cwd ?? process.cwd();
const processResult = execa.sync(command, {
cwd,
env: { ...process.env, npm_config_user_agent: 'npm' },
encoding: 'utf-8',
shell: true,
});

if (processResult.exitCode === 0) {
return processResult.stdout;
} else {
throw new Error(`Command exit code: ${processResult.exitCode}, message: ${processResult.stderr}`);
}
}

async function executeCreateStackRefactorCallCommand(command: string, cwd: string) {
const processResult = JSON.parse(await executeCommand(command, cwd));
const stackRefactorId = processResult.StackRefactorId;
return stackRefactorId;
}

async function assertStepCompletion(command: string) {
const processResult = JSON.parse(await executeCommand(command));
return processResult.Stacks[0].StackStatus;
}

async function assertRefactorStepCompletion(command: string) {
const processResult = JSON.parse(await executeCommand(command));
return processResult.Status;
}

async function executeStep1(cwd: string, commands: string[]) {
await executeCommand(commands[0], cwd);
await retry(
() => assertStepCompletion(commands[1]),
(status) => status.includes(STATUS_COMPLETE) && !status.includes(STATUS_IN_PROGRESS),
RETRY_CONFIG,
(status) => status.includes(STATUS_FAILED),
);
}

async function executeStep2(cwd: string, commands: string[]) {
await executeCommand(commands[0], cwd);
await retry(
() => assertStepCompletion(commands[1]),
(status) => status.includes(STATUS_COMPLETE) && !status.includes(STATUS_IN_PROGRESS),
RETRY_CONFIG,
(status) => status.includes(STATUS_FAILED),
);
}

async function executeStep3(cwd: string, commands: string[], bucketName: string) {
toggleEnvVariable('BUCKET_NAME', 'SET', bucketName);
await executeCommand(commands[1], cwd);
Copy link
Contributor

@abhi7cr abhi7cr Oct 21, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we name the commands instead of accessing by indices for easier understanding: e.g. const [uploadSourceTemplate , uploadDestinationTemplate, ...] = commands ?

Copy link
Contributor Author

@Sanayshah2 Sanayshah2 Oct 21, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we follow the Black Box Testing approach, this approach is out of scope then?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yup!

await executeCommand(commands[2], cwd);
const stackRefactorId = await executeCreateStackRefactorCallCommand(commands[3], cwd);
toggleEnvVariable('STACK_REFACTOR_ID', 'SET', stackRefactorId);
await retry(
() => assertRefactorStepCompletion(commands[5]),
(status) => status.includes(STATUS_COMPLETE) && !status.includes(STATUS_IN_PROGRESS),
RETRY_CONFIG,
(status) => status.includes(STATUS_FAILED),
);
await executeCommand(commands[6], cwd);
await retry(
() => assertRefactorStepCompletion(commands[7]),
(status) => status.includes(STATUS_COMPLETE) && !status.includes(STATUS_IN_PROGRESS),
RETRY_CONFIG,
(status) => status.includes(STATUS_FAILED),
);
}

export async function stackRefactor(projRoot: string, category: RefactorCategory, bucketName: string) {
const readmeFilePath = path.join(projRoot, '.amplify', 'migration', 'templates', category, 'MIGRATION_README.md');
const readmeContent = fs.readFileSync(readmeFilePath, 'utf-8');
const { step1Commands, step2commands, step3Commands } = getCommandsFromReadme(readmeContent);

await executeStep1(projRoot, step1Commands);
await executeStep2(projRoot, step2commands);
await executeStep3(projRoot, step3Commands, bucketName);

if (category == 'storage') {
await uncommentBucketNameLineFromBackendFile(projRoot);
}

await runGen2SandboxCommand(projRoot);

toggleEnvVariable('BUCKET_NAME', 'DELETE');
toggleEnvVariable('STACK_REFACTOR_ID', 'DELETE');
}
2 changes: 1 addition & 1 deletion packages/amplify-migration-e2e/src/updatePackageJson.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { readJsonFile } from '@aws-amplify/amplify-e2e-core';
import * as fs from 'fs-extra';
import path from 'node:path';

export function updatePackageDependency(cwd: string, dependencyName: string, version: string) {
export function updatePackageDependency(cwd: string, dependencyName: string, version = '0.0.0-test-20241018150827') {
const packageJsonPath = path.join(cwd, 'package.json');
const packageJson = readJsonFile(packageJsonPath);

Expand Down
Loading
Loading