ci: Split spec improvements for cypress ci runs (#26774)

> Pull Request Template
>
> Use this template to quickly create a well written pull request.
Delete all quotes before creating the pull request.
>
## Description
> Add a TL;DR when description is extra long (helps content team)
>
> Please include a summary of the changes and which issue has been
fixed. Please also include relevant motivation
> and context. List any dependencies that are required for this change
>
> Links to Notion, Figma or any other documents that might be relevant
to the PR
>
>
#### PR fixes following issue(s)
Fixes # (issue number)
> if no issue exists, please create an issue and ask the maintainers
about this first
>
>
#### Media
> A video or a GIF is preferred. when using Loom, don’t embed because it
looks like it’s a GIF. instead, just link to the video
>
>
#### Type of change
> Please delete options that are not relevant.
- Bug fix (non-breaking change which fixes an issue)
- New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing
functionality to not work as expected)
- Chore (housekeeping or task changes that don't impact user perception)
- This change requires a documentation update
>
>
>
## Testing
>
#### How Has This Been Tested?
> Please describe the tests that you ran to verify your changes. Also
list any relevant details for your test configuration.
> Delete anything that is not relevant
- [ ] Manual
- [ ] JUnit
- [ ] Jest
- [ ] Cypress
>
>
#### Test Plan
> Add Testsmith test cases links that relate to this PR
>
>
#### Issues raised during DP testing
> Link issues raised during DP testing for better visiblity and tracking
(copy link from comments dropped on this PR)
>
>
>
## Checklist:
#### Dev activity
- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [ ] My changes generate no new warnings
- [ ] I have added tests that prove my fix is effective or that my
feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] PR is being merged under a feature flag


#### QA activity:
- [ ] [Speedbreak
features](https://github.com/appsmithorg/TestSmith/wiki/Guidelines-for-test-plans#speedbreakers-)
have been covered
- [ ] Test plan covers all impacted features and [areas of
interest](https://github.com/appsmithorg/TestSmith/wiki/Guidelines-for-test-plans#areas-of-interest-)
- [ ] Test plan has been peer reviewed by project stakeholders and other
QA members
- [ ] Manually tested functionality on DP
- [ ] We had an implementation alignment call with stakeholders post QA
Round 2
- [ ] Cypress test cases have been added and approved by SDET/manual QA
- [ ] Added `Test Plan Approved` label after Cypress tests were reviewed
- [ ] Added `Test Plan Approved` label after JUnit tests were reviewed
This commit is contained in:
Saroj 2023-09-25 09:49:21 +05:30 committed by GitHub
parent 0a066981c0
commit 9895ee217e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 1022 additions and 533 deletions

View File

@ -186,45 +186,63 @@ jobs:
run:
shell: bash
steps:
# Deleting the existing dir's if any
- name: Delete existing directories
- name: Setup node
if: needs.ci-test-limited.result != 'success'
run: |
rm -f ~/failed_spec_ci
rm -f ~/combined_failed_spec_ci
# Force store previous cypress dashboard url from cache
- name: Store the previous cypress dashboard url
if: success()
uses: actions/cache@v3
uses: actions/setup-node@v3
with:
path: |
~/cypress_url
key: ${{ github.run_id }}-dashboard-url-${{ github.run_attempt }}
restore-keys: |
${{ github.run_id }}-dashboard-url
node-version: 18
- name: Print cypress dashboard url
id: dashboard_url
run: |
cypress_url="https://internal.appsmith.com/app/cypressdashboard/rundetails-64ec3df0c632e24c00764938?branch=master&workflowId=${{ github.run_id }}&attempt=${{ github.run_attempt }}"
echo "dashboard_url=$cypress_url" >> $GITHUB_OUTPUT
# Download failed_spec list for all jobs
- uses: actions/download-artifact@v3
- name: install pg
if: needs.ci-test-limited.result != 'success'
id: download_ci
run : npm install pg
- name: Fetch the failed specs
if: needs.ci-test-limited.result != 'success'
id: failed_specs
env:
DB_HOST: ${{ secrets.CYPRESS_DB_HOST }}
DB_NAME: ${{ secrets.CYPRESS_DB_NAME }}
DB_USER: ${{ secrets.CYPRESS_DB_USER }}
DB_PWD: ${{ secrets.CYPRESS_DB_PWD }}
RUN_ID: ${{ github.run_id }}
ATTEMPT_NUMBER: ${{ github.run_attempt }}
uses: actions/github-script@v6
with:
name: failed-spec-ci-${{github.run_attempt}}
path: ~/failed_spec_ci
script: |
const { Pool } = require("pg");
const { DB_HOST, DB_NAME, DB_USER, DB_PWD, RUN_ID, ATTEMPT_NUMBER } = process.env
const client = await new Pool({
user: DB_USER,
host: DB_HOST,
database: DB_NAME,
password: DB_PWD,
port: 5432,
connectionTimeoutMillis: 60000,
}).connect();
const result = await client.query(
`SELECT DISTINCT name FROM public."specs"
WHERE "matrixId" IN
(SELECT id FROM public."matrix"
WHERE "attemptId" = (
SELECT id FROM public."attempt" WHERE "workflowId" = $1 and "attempt" = $2
)
) AND status = 'fail'`,
[RUN_ID, ATTEMPT_NUMBER],
);
client.release();
return result.rows.map((spec) => spec.name);
# In case for any ci job failure, create combined failed spec
- name: "combine all specs for CI"
- name: combine all specs for CI
id: combine_ci
if: needs.ci-test-limited.result != 'success'
run: |
echo "Debugging: failed specs in ~/failed_spec_ci/failed_spec_ci*"
cat ~/failed_spec_ci/failed_spec_ci*
cat ~/failed_spec_ci/failed_spec_ci* | sort -u >> ~/combined_failed_spec_ci
failed_specs=$(echo ${{steps.test.outputs.result}} | sed 's/\[\|\]//g' | tr -d ' ' | tr ',' '\n')
while read -r line; do
echo "$line" >> ~/combined_failed_spec_ci
done <<< "$failed_specs"
# Upload combined failed CI spec list to a file
# This is done for debugging.
@ -285,45 +303,63 @@ jobs:
run:
shell: bash
steps:
# Deleting the existing dir's if any
- name: Delete existing directories
- name: Setup node
if: needs.ci-test-limited-existing-docker-image.result != 'success'
run: |
rm -f ~/failed_spec_ci
rm -f ~/combined_failed_spec_ci
# Force store previous cypress dashboard url from cache
- name: Store the previous cypress dashboard url
if: success()
uses: actions/cache@v3
uses: actions/setup-node@v3
with:
path: |
~/cypress_url
key: ${{ github.run_id }}-dashboard-url-${{ github.run_attempt }}
restore-keys: |
${{ github.run_id }}-dashboard-url
node-version: 18
- name: Print cypress dashboard url
id: dashboard_url
run: |
cypress_url="https://internal.appsmith.com/app/cypressdashboard/rundetails-64ec3df0c632e24c00764938?branch=master&workflowId=${{ github.run_id }}&attempt=${{ github.run_attempt }}"
echo "dashboard_url=$cypress_url" >> $GITHUB_OUTPUT
# Download failed_spec list for all jobs
- uses: actions/download-artifact@v3
- name: install pg
if: needs.ci-test-limited-existing-docker-image.result != 'success'
id: download_ci
run : npm install pg
- name: Fetch the failed specs
if: needs.ci-test-limited-existing-docker-image.result != 'success'
id: failed_specs
env:
DB_HOST: ${{ secrets.CYPRESS_DB_HOST }}
DB_NAME: ${{ secrets.CYPRESS_DB_NAME }}
DB_USER: ${{ secrets.CYPRESS_DB_USER }}
DB_PWD: ${{ secrets.CYPRESS_DB_PWD }}
RUN_ID: ${{ github.run_id }}
ATTEMPT_NUMBER: ${{ github.run_attempt }}
uses: actions/github-script@v6
with:
name: failed-spec-ci-${{github.run_attempt}}
path: ~/failed_spec_ci
script: |
const { Pool } = require("pg");
const { DB_HOST, DB_NAME, DB_USER, DB_PWD, RUN_ID, ATTEMPT_NUMBER } = process.env
const client = await new Pool({
user: DB_USER,
host: DB_HOST,
database: DB_NAME,
password: DB_PWD,
port: 5432,
connectionTimeoutMillis: 60000,
}).connect();
const result = await client.query(
`SELECT DISTINCT name FROM public."specs"
WHERE "matrixId" IN
(SELECT id FROM public."matrix"
WHERE "attemptId" = (
SELECT id FROM public."attempt" WHERE "workflowId" = $1 and "attempt" = $2
)
) AND status = 'fail'`,
[RUN_ID, ATTEMPT_NUMBER],
);
client.release();
return result.rows.map((spec) => spec.name);
# In case for any ci job failure, create combined failed spec
- name: "combine all specs for CI"
- name: combine all specs for CI
id: combine_ci
if: needs.ci-test-limited-existing-docker-image.result != 'success'
run: |
echo "Debugging: failed specs in ~/failed_spec_ci/failed_spec_ci*"
cat ~/failed_spec_ci/failed_spec_ci*
cat ~/failed_spec_ci/failed_spec_ci* | sort -u >> ~/combined_failed_spec_ci
failed_specs=$(echo ${{steps.test.outputs.result}} | sed 's/\[\|\]//g' | tr -d ' ' | tr ',' '\n')
while read -r line; do
echo "$line" >> ~/combined_failed_spec_ci
done <<< "$failed_specs"
# Upload combined failed CI spec list to a file
# This is done for debugging.

View File

@ -95,48 +95,63 @@ jobs:
PAYLOAD_CONTEXT: ${{ toJson(github.event.client_payload) }}
run: echo "$PAYLOAD_CONTEXT"
# Deleting the existing dir's if any
- name: Delete existing directories
- name: Setup node
if: needs.ci-test.result != 'success'
run: |
rm -f ~/failed_spec_ci
rm -f ~/combined_failed_spec_ci
# Force store previous cypress dashboard url from cache
- name: Store the previous cypress dashboard url
continue-on-error: true
if: success()
uses: actions/cache@v3
uses: actions/setup-node@v3
with:
path: |
~/cypress_url
key: ${{ github.run_id }}-dashboard-url-${{ github.run_attempt }}
restore-keys: |
${{ github.run_id }}-dashboard-url
node-version: 18
- name: Print cypress dashboard url
continue-on-error: true
id: dashboard_url
run: |
cypress_url="https://internal.appsmith.com/app/cypressdashboard/rundetails-64ec3df0c632e24c00764938?branch=master&workflowId=${{ github.run_id }}&attempt=${{ github.run_attempt }}"
echo "dashboard_url=$cypress_url" >> $GITHUB_OUTPUT
# Download failed_spec list for all jobs
- uses: actions/download-artifact@v3
- name: install pg
if: needs.ci-test.result != 'success'
id: download_ci
run : npm install pg
- name: Fetch the failed specs
if: needs.ci-test.result != 'success'
id: failed_specs
env:
DB_HOST: ${{ secrets.CYPRESS_DB_HOST }}
DB_NAME: ${{ secrets.CYPRESS_DB_NAME }}
DB_USER: ${{ secrets.CYPRESS_DB_USER }}
DB_PWD: ${{ secrets.CYPRESS_DB_PWD }}
RUN_ID: ${{ github.run_id }}
ATTEMPT_NUMBER: ${{ github.run_attempt }}
uses: actions/github-script@v6
with:
name: failed-spec-ci-${{github.run_attempt}}
path: ~/failed_spec_ci
script: |
const { Pool } = require("pg");
const { DB_HOST, DB_NAME, DB_USER, DB_PWD, RUN_ID, ATTEMPT_NUMBER } = process.env
const client = await new Pool({
user: DB_USER,
host: DB_HOST,
database: DB_NAME,
password: DB_PWD,
port: 5432,
connectionTimeoutMillis: 60000,
}).connect();
const result = await client.query(
`SELECT DISTINCT name FROM public."specs"
WHERE "matrixId" IN
(SELECT id FROM public."matrix"
WHERE "attemptId" = (
SELECT id FROM public."attempt" WHERE "workflowId" = $1 and "attempt" = $2
)
) AND status = 'fail'`,
[RUN_ID, ATTEMPT_NUMBER],
);
client.release();
return result.rows.map((spec) => spec.name);
# In case for any ci job failure, create combined failed spec
- name: combine all specs for CI
id: combine_ci
if: needs.ci-test.result != 'success'
run: |
echo "Debugging: failed specs in ~/failed_spec_ci/failed_spec_ci*"
cat ~/failed_spec_ci/failed_spec_ci*
cat ~/failed_spec_ci/failed_spec_ci* | sort -u >> ~/combined_failed_spec_ci
failed_specs=$(echo ${{steps.test.outputs.result}} | sed 's/\[\|\]//g' | tr -d ' ' | tr ',' '\n')
while read -r line; do
echo "$line" >> ~/combined_failed_spec_ci
done <<< "$failed_specs"
if [[ -z $(grep '[^[:space:]]' ~/combined_failed_spec_ci) ]] ; then
echo "specs_failed=0" >> $GITHUB_OUTPUT
else
@ -156,7 +171,7 @@ jobs:
shell: bash
run: |
curl --request POST --url https://yatin-s-workspace-jk8ru5.us-east-1.xata.sh/db/CypressKnownFailures:main/tables/CypressKnownFailuires/query --header 'Authorization: Bearer ${{ secrets.XATA_TOKEN }}' --header 'Content-Type: application/json'|jq -r |grep Spec|cut -d ':' -f 2 2> /dev/null|sed 's/"//g'|sed 's/,//g' > ~/knownfailures
# Verify CI test failures against known failures
- name: Verify CI test failures against known failures
if: needs.ci-test.result != 'success'
@ -181,7 +196,18 @@ jobs:
To know the list of identified flaky tests - <a href="https://app.appsmith.com/applications/613868bedd7786286ddd4a6a/pages/63ec710e8e503f763651791a" target="_blank">Refer here</a>
- name: Add a comment on the PR when ci-test is success
if: needs.ci-test.result == 'success' || steps.combine_ci.outputs.specs_failed == '0'
if: needs.ci-test.result != 'success' && steps.combine_ci.outputs.specs_failed == '0'
uses: peter-evans/create-or-update-comment@v1
with:
issue-number: ${{ github.event.client_payload.pull_request.number }}
body: |
Workflow run: <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}>.
Commit: `${{ github.event.client_payload.slash_command.args.named.sha }}`.
Cypress dashboard url: <a href="https://internal.appsmith.com/app/cypressdashboard/rundetails-64ec3df0c632e24c00764938?branch=master&workflowId=${{ github.run_id }}&attempt=${{ github.run_attempt }}" target="_blank">Click here!</a>
It seems like there are some failures 😔. We are not able to recognize it, please check this manually <a href="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" target="_blank">here.</a>
- name: Add a comment on the PR when ci-test is success
if: needs.ci-test.result == 'success' && steps.combine_ci.outputs.specs_failed == '0'
uses: peter-evans/create-or-update-comment@v1
with:
issue-number: ${{ github.event.client_payload.pull_request.number }}

View File

@ -1,172 +0,0 @@
/* eslint-disable no-console */
import globby from "globby";
import minimatch from "minimatch";
import { exec } from "child_process";
const fs = require("fs/promises");
type GetEnvOptions = {
required?: boolean;
};
// used to roughly determine how many tests are in a file
const testPattern = /(^|\s)(it)\(/g;
// This function will get all the spec paths using the pattern
async function getSpecFilePaths(
specPattern: any,
ignoreTestFiles: any,
): Promise<string[]> {
const files = globby.sync(specPattern, {
ignore: ignoreTestFiles,
});
// ignore the files that doesn't match
const ignorePatterns = [...(ignoreTestFiles || [])];
// a function which returns true if the file does NOT match
const doesNotMatchAllIgnoredPatterns = (file: string) => {
// using {dot: true} here so that folders with a '.' in them are matched
const MINIMATCH_OPTIONS = { dot: true, matchBase: true };
return ignorePatterns.every((pattern) => {
return !minimatch(file, pattern, MINIMATCH_OPTIONS);
});
};
const filtered = files.filter(doesNotMatchAllIgnoredPatterns);
return filtered;
}
// This function will determine the test counts in each file to sort it further
async function getTestCount(filePath: string): Promise<number> {
const content = await fs.readFile(filePath, "utf8");
return content.match(testPattern)?.length || 0;
}
// Sorting the spec files as per the test count in it
async function sortSpecFilesByTestCount(
specPathsOriginal: string[],
): Promise<string[]> {
const specPaths = [...specPathsOriginal];
const testPerSpec: Record<string, number> = {};
for (const specPath of specPaths) {
testPerSpec[specPath] = await getTestCount(specPath);
}
return (
Object.entries(testPerSpec)
// Sort by the number of tests per spec file. And this will create a consistent file list/ordering so that file division proper.
.sort((a, b) => b[1] - a[1])
.map((x) => x[0])
);
}
// This function will split the specs between the runners by calculating the modulus between spec index and the totalRunners
function splitSpecs(
specs: string[],
totalRunnersCount: number,
currentRunner: number,
): string[] {
let specs_to_run = specs.filter((_, index) => {
return index % totalRunnersCount === currentRunner;
});
return specs_to_run;
}
// This function will finally get the specs as a comma separated string to pass the specs to the command
async function getSpecsToRun(
totalRunnersCount = 0,
currentRunner = 0,
specPattern: string | string[] = "cypress/e2e/**/**/*.{js,ts}",
ignorePattern: string | string[],
): Promise<string[]> {
try {
const specFilePaths = await sortSpecFilesByTestCount(
await getSpecFilePaths(specPattern, ignorePattern),
);
if (!specFilePaths.length) {
throw Error("No spec files found.");
}
const specsToRun = splitSpecs(
specFilePaths,
totalRunnersCount,
currentRunner,
);
return specsToRun;
} catch (err) {
console.error(err);
process.exit(1);
}
}
// This function will help to get and convert the env variables
function getEnvNumber(
varName: string,
{ required = false }: GetEnvOptions = {},
): number {
if (required && process.env[varName] === undefined) {
throw Error(`${varName} is not set.`);
}
const value = Number(process.env[varName]);
if (isNaN(value)) {
throw Error(`${varName} is not a number.`);
}
return value;
}
// This function will helps to check and get env variables
function getEnvValue(
varName: string,
{ required = false }: GetEnvOptions = {},
) {
if (required && process.env[varName] === undefined) {
throw Error(`${varName} is not set.`);
}
const value = process.env[varName] === undefined ? "" : process.env[varName];
return value;
}
// This is to fetch the env variables from CI
function getArgs() {
return {
totalRunners: getEnvValue("TOTAL_RUNNERS", { required: false }),
thisRunner: getEnvValue("THIS_RUNNER", { required: false }),
cypressSpecs: getEnvValue("CYPRESS_SPECS", { required: false }),
};
}
export async function cypressSplit(on: any, config: any) {
try {
let currentRunner = 0;
let allRunners = 1;
let specPattern = await config.specPattern;
const ignorePattern = await config.excludeSpecPattern;
const { cypressSpecs, thisRunner, totalRunners } = getArgs();
if (cypressSpecs != "")
specPattern = cypressSpecs?.split(",").filter((val) => val !== "");
if (totalRunners != "") {
currentRunner = Number(thisRunner);
allRunners = Number(totalRunners);
}
const specs = await getSpecsToRun(
allRunners,
currentRunner,
specPattern,
ignorePattern,
);
if (specs.length > 0) {
config.specPattern = specs.length == 1 ? specs[0] : specs;
} else {
config.specPattern = "cypress/scripts/no_spec.ts";
}
return config;
} catch (err) {
console.log(err);
}
}

View File

@ -12,7 +12,7 @@ const {
} = require("cypress-image-snapshot/plugin");
const { tagify } = require("cypress-tags");
const { cypressHooks } = require("../scripts/cypress-hooks");
const { cypressSplit } = require("../cypress-split");
const { cypressSplit } = require("../scripts/cypress-split");
// ***********************************************************
// This example plugins/index.js can be used to load plugins
//
@ -216,7 +216,7 @@ module.exports = async (on, config) => {
});
if (process.env["RUNID"]) {
config = await cypressSplit(on, config);
config = await new cypressSplit().splitSpecs(on, config);
cypressHooks(on, config);
}

View File

@ -1,244 +0,0 @@
const { Pool } = require("pg");
const os = require("os");
const AWS = require("aws-sdk");
const fs = require("fs");
exports.cypressHooks = cypressHooks;
// This function will helps to check and get env variables
function getEnvValue(varName, { required = true }) {
if (required && process.env[varName] === undefined) {
throw Error(
`Please check some or all the following ENV variables are not set properly [ RUNID, ATTEMPT_NUMBER, REPOSITORY, COMMITTER, TAG, BRANCH, THIS_RUNNER, CYPRESS_DB_USER, CYPRESS_DB_HOST, CYPRESS_DB_NAME, CYPRESS_DB_PWD, CYPRESS_S3_ACCESS, CYPRESS_S3_SECRET ].`,
);
}
const value =
process.env[varName] === undefined ? "Cypress test" : process.env[varName];
return value;
}
//This is to setup the db client
function configureDbClient() {
const dbConfig = {
user: getEnvValue("CYPRESS_DB_USER", { required: true }),
host: getEnvValue("CYPRESS_DB_HOST", { required: true }),
database: getEnvValue("CYPRESS_DB_NAME", { required: true }),
password: getEnvValue("CYPRESS_DB_PWD", { required: true }),
port: 5432,
connectionTimeoutMillis: 60000,
ssl: true,
keepalives: 0,
};
const dbClient = new Pool(dbConfig);
return dbClient;
}
// This is to setup the AWS client
function configureS3() {
AWS.config.update({ region: "ap-south-1" });
const s3client = new AWS.S3({
credentials: {
accessKeyId: getEnvValue("CYPRESS_S3_ACCESS", { required: true }),
secretAccessKey: getEnvValue("CYPRESS_S3_SECRET", { required: true }),
},
});
return s3client;
}
// This is to upload files to s3 when required
function uploadToS3(s3Client, filePath, key) {
const fileContent = fs.readFileSync(filePath);
const params = {
Bucket: "appsmith-internal-cy-db",
Key: key,
Body: fileContent,
};
return s3Client.upload(params).promise();
}
async function cypressHooks(on, config) {
const s3 = configureS3();
const runData = {
commitMsg: getEnvValue("COMMIT_INFO_MESSAGE", { required: false }),
workflowId: getEnvValue("RUNID", { required: true }),
attempt: getEnvValue("ATTEMPT_NUMBER", { required: true }),
os: os.type(),
repo: getEnvValue("REPOSITORY", { required: true }),
committer: getEnvValue("COMMITTER", { required: true }),
type: getEnvValue("TAG", { required: true }),
branch: getEnvValue("BRANCH", { required: true }),
};
const matrix = {
matrixId: getEnvValue("THIS_RUNNER", { required: true }),
matrixStatus: "started",
};
const specData = {};
await on("before:run", async (runDetails) => {
runData.browser = runDetails.browser.name;
const dbClient = await configureDbClient().connect();
try {
const runResponse = await dbClient.query(
`INSERT INTO public.attempt ("workflowId", "attempt", "browser", "os", "repo", "committer", "type", "commitMsg", "branch")
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
ON CONFLICT ("workflowId", attempt) DO NOTHING
RETURNING id;`,
[
runData.workflowId,
runData.attempt,
runData.browser,
runData.os,
runData.repo,
runData.committer,
runData.type,
runData.commitMsg,
runData.branch,
],
);
if (runResponse.rows.length > 0) {
runData.attemptId = runResponse.rows[0].id; // Save the inserted attempt ID for later updates
} else {
const res = await dbClient.query(
`SELECT id FROM public.attempt WHERE "workflowId" = $1 AND attempt = $2`,
[runData.workflowId, runData.attempt],
);
runData.attemptId = res.rows[0].id;
}
const matrixResponse = await dbClient.query(
`INSERT INTO public.matrix ("workflowId", "matrixId", "status", "attemptId")
VALUES ($1, $2, $3, $4)
ON CONFLICT ("matrixId", "attemptId") DO NOTHING
RETURNING id;`,
[
runData.workflowId,
matrix.matrixId,
matrix.matrixStatus,
runData.attemptId,
],
);
matrix.id = matrixResponse.rows[0].id; // Save the inserted matrix ID for later updates
} catch (err) {
console.log(err);
} finally {
await dbClient.release();
}
});
await on("before:spec", async (spec) => {
specData.name = spec.relative;
specData.matrixId = matrix.id;
const dbClient = await configureDbClient().connect();
try {
if (!specData.name.includes("no_spec.ts")) {
const specResponse = await dbClient.query(
'INSERT INTO public.specs ("name", "matrixId") VALUES ($1, $2) RETURNING id',
[specData.name, matrix.id],
);
specData.specId = specResponse.rows[0].id; // Save the inserted spec ID for later updates
}
} catch (err) {
console.log(err);
} finally {
await dbClient.release();
}
});
await on("after:spec", async (spec, results) => {
specData.testCount = results.stats.tests;
specData.passes = results.stats.passes;
specData.failed = results.stats.failures;
specData.pending = results.stats.pending;
specData.skipped = results.stats.skipped;
specData.status = results.stats.failures > 0 ? "fail" : "pass";
specData.duration = results.stats.wallClockDuration;
const dbClient = await configureDbClient().connect();
try {
if (!specData.name.includes("no_spec.ts")) {
await dbClient.query(
'UPDATE public.specs SET "testCount" = $1, "passes" = $2, "failed" = $3, "skipped" = $4, "pending" = $5, "status" = $6, "duration" = $7 WHERE id = $8',
[
results.stats.tests,
results.stats.passes,
results.stats.failures,
results.stats.skipped,
results.stats.pending,
specData.status,
specData.duration,
specData.specId,
],
);
for (const test of results.tests) {
const testResponse = await dbClient.query(
`INSERT INTO public.tests ("name", "specId", "status", "retries", "retryData") VALUES ($1, $2, $3, $4, $5) RETURNING id`,
[
test.title[1],
specData.specId,
test.state,
test.attempts.length,
JSON.stringify(test.attempts),
],
);
if (
test.attempts.some((attempt) => attempt.state === "failed") &&
results.screenshots
) {
const out = results.screenshots.filter(
(scr) => scr.testId === test.testId,
);
console.log("Uploading screenshots...");
for (const scr of out) {
const key = `${testResponse.rows[0].id}_${specData.specId}_${
scr.testAttemptIndex + 1
}`;
Promise.all([uploadToS3(s3, scr.path, key)]).catch((error) => {
console.log("Error in uploading screenshots:", error);
});
}
}
}
if (
results.tests.some((test) =>
test.attempts.some((attempt) => attempt.state === "failed"),
) &&
results.video
) {
console.log("Uploading video...");
const key = `${specData.specId}`;
Promise.all([uploadToS3(s3, results.video, key)]).catch((error) => {
console.log("Error in uploading video:", error);
});
}
}
} catch (err) {
console.log(err);
} finally {
await dbClient.release();
}
});
on("after:run", async (runDetails) => {
const dbClient = await configureDbClient().connect();
try {
await dbClient.query(
`UPDATE public.matrix SET "status" = $1 WHERE id = $2`,
["done", matrix.id],
);
await dbClient.query(
`UPDATE public.attempt SET "endTime" = $1 WHERE "id" = $2`,
[new Date(), runData.attemptId],
);
} catch (err) {
console.log(err);
} finally {
await dbClient.end();
}
});
}

View File

@ -0,0 +1,147 @@
import os from "os";
import util from "./util";
export async function cypressHooks(
on: Cypress.PluginEvents,
config: Cypress.PluginConfigOptions,
) {
const _ = new util();
const s3 = _.configureS3();
const dbClient = _.configureDbClient();
const runData: any = {
workflowId: _.getVars().runId,
attempt: _.getVars().attempt_number,
os: os.type(),
};
const matrix: any = {
matrixId: _.getVars().thisRunner,
matrixStatus: "started",
};
const specData: any = {};
on("before:run", async (runDetails: Cypress.BeforeRunDetails) => {
runData.browser = runDetails.browser?.name;
const client = await dbClient.connect();
try {
const attemptRes = await client.query(
`UPDATE public."attempt" SET "browser" = $1, "os" = $2 WHERE "workflowId" = $3 AND attempt = $4 RETURNING id`,
[runData.browser, runData.os, runData.workflowId, runData.attempt],
);
runData.attemptId = attemptRes.rows[0].id;
const matrixRes = await client.query(
`SELECT id FROM public."matrix" WHERE "attemptId" = $1 AND "matrixId" = $2`,
[runData.attemptId, matrix.matrixId],
);
matrix.id = matrixRes.rowCount > 0 ? matrixRes.rows[0].id : "";
} catch (err) {
console.log(err);
} finally {
client.release();
}
});
on("before:spec", async (spec: Cypress.Spec) => {
specData.name = spec.relative;
specData.matrixId = matrix.id;
const client = await dbClient.connect();
try {
if (!specData.name.includes("no_spec.ts")) {
const specResponse = await client.query(
`UPDATE public."specs" SET "status" = $1 WHERE "name" = $2 AND "matrixId" = $3 RETURNING id`,
["in-progress", specData.name, matrix.id],
);
specData.specId = specResponse.rows[0].id;
}
} catch (err) {
console.log(err);
} finally {
client.release();
}
});
on(
"after:spec",
async (spec: Cypress.Spec, results: CypressCommandLine.RunResult) => {
const client = await dbClient.connect();
try {
if (!specData.name.includes("no_spec.ts")) {
await client.query(
'UPDATE public.specs SET "testCount" = $1, "passes" = $2, "failed" = $3, "skipped" = $4, "pending" = $5, "status" = $6, "duration" = $7 WHERE id = $8',
[
results.stats.tests,
results.stats.passes,
results.stats.failures,
results.stats.skipped,
results.stats.pending,
results.stats.failures > 0 ? "fail" : "pass",
results.stats.duration,
specData.specId,
],
);
for (const test of results.tests) {
const testResponse = await client.query(
`INSERT INTO public.tests ("name", "specId", "status", "retries", "retryData") VALUES ($1, $2, $3, $4, $5) RETURNING id`,
[
test.title[1],
specData.specId,
test.state,
test.attempts.length,
test.displayError,
],
);
if (
test.attempts.some((attempt) => attempt.state === "failed") &&
results.screenshots.length > 0
) {
const out = results.screenshots.filter((scr) =>
scr.path.includes(test.title[1]),
);
console.log("Uploading screenshots...");
for (const scr of out) {
const attempt = scr.path.includes("attempt 2") ? 2 : 1;
const key = `${testResponse.rows[0].id}_${specData.specId}_${attempt}`;
await _.uploadToS3(s3, scr.path, key);
}
}
}
if (
results.tests.some((test) =>
test.attempts.some((attempt) => attempt.state === "failed"),
) &&
results.video
) {
console.log("Uploading video...");
const key = `${specData.specId}`;
await _.uploadToS3(s3, results.video, key);
}
}
} catch (err) {
console.log(err);
} finally {
client.release();
}
},
);
on("after:run", async (runDetails) => {
const client = await dbClient.connect();
try {
if (!specData.name.includes("no_spec.ts")) {
await client.query(
`UPDATE public.matrix SET "status" = $1 WHERE id = $2`,
["done", matrix.id],
);
await client.query(
`UPDATE public.attempt SET "endTime" = $1 WHERE "id" = $2`,
[new Date(), runData.attemptId],
);
}
} catch (err) {
console.log(err);
} finally {
client.release();
await dbClient.end();
}
});
}

View File

@ -0,0 +1,325 @@
/* eslint-disable no-console */
import type { DataItem } from "./util";
import util from "./util";
import globby from "globby";
import minimatch from "minimatch";
import cypress from "cypress";
export class cypressSplit {
util = new util();
dbClient = this.util.configureDbClient();
// This function will get all the spec paths using the pattern
private async getSpecFilePaths(
specPattern: any,
ignoreTestFiles: any,
): Promise<string[]> {
const files = globby.sync(specPattern, {
ignore: ignoreTestFiles,
});
// ignore the files that doesn't match
const ignorePatterns = [...(ignoreTestFiles || [])];
// a function which returns true if the file does NOT match
const doesNotMatchAllIgnoredPatterns = (file: string) => {
// using {dot: true} here so that folders with a '.' in them are matched
const MINIMATCH_OPTIONS = { dot: true, matchBase: true };
return ignorePatterns.every((pattern) => {
return !minimatch(file, pattern, MINIMATCH_OPTIONS);
});
};
const filtered = files.filter(doesNotMatchAllIgnoredPatterns);
return filtered;
}
private async getSpecsWithTime(specs: string[], attemptId: number) {
const client = await this.dbClient.connect();
const defaultDuration = 180000;
const specsMap = new Map();
try {
const queryRes = await client.query(
'SELECT * FROM public."spec_avg_duration" ORDER BY duration DESC',
);
queryRes.rows.forEach((obj) => {
specsMap.set(obj.name, obj);
});
const allSpecsWithDuration = specs.map((spec) => {
const match = specsMap.get(spec);
return match ? match : { name: spec, duration: defaultDuration };
});
const activeRunners = await this.util.getActiveRunners();
const activeRunnersFromDb = await this.getActiveRunnersFromDb(attemptId);
return await this.util.divideSpecsIntoBalancedGroups(
allSpecsWithDuration,
Number(activeRunners) - Number(activeRunnersFromDb),
);
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
// This function will finally get the specs as a comma separated string to pass the specs to the command
private async getSpecsToRun(
specPattern: string | string[] = "cypress/e2e/**/**/*.{js,ts}",
ignorePattern: string | string[],
attemptId: number,
): Promise<string[]> {
try {
const specFilePaths = await this.getSpecFilePaths(
specPattern,
ignorePattern,
);
const specsToRun = await this.getSpecsWithTime(specFilePaths, attemptId);
return specsToRun === undefined
? []
: specsToRun[0].map((spec) => spec.name);
} catch (err) {
console.error(err);
process.exit(1);
}
}
private async getActiveRunnersFromDb(attemptId: number) {
const client = await this.dbClient.connect();
try {
const matrixRes = await client.query(
`SELECT * FROM public."matrix" WHERE "attemptId" = $1`,
[attemptId],
);
return matrixRes.rowCount;
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async getAlreadyRunningSpecs() {
const client = await this.dbClient.connect();
try {
const dbRes = await client.query(
`SELECT name FROM public."specs"
WHERE "matrixId" IN
(SELECT id FROM public."matrix"
WHERE "attemptId" = (
SELECT id FROM public."attempt" WHERE "workflowId" = $1 and "attempt" = $2
)
)`,
[this.util.getVars().runId, this.util.getVars().attempt_number],
);
const specs: string[] =
dbRes.rows.length > 0 ? dbRes.rows.map((row) => row.name) : [];
return specs;
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async createAttempt() {
const client = await this.dbClient.connect();
try {
const runResponse = await client.query(
`INSERT INTO public."attempt" ("workflowId", "attempt", "repo", "committer", "type", "commitMsg", "branch")
VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT ("workflowId", attempt) DO NOTHING
RETURNING id;`,
[
this.util.getVars().runId,
this.util.getVars().attempt_number,
this.util.getVars().repository,
this.util.getVars().committer,
this.util.getVars().tag,
this.util.getVars().commitMsg,
this.util.getVars().branch,
],
);
if (runResponse.rows.length > 0) {
return runResponse.rows[0].id;
} else {
const res = await client.query(
`SELECT id FROM public."attempt" WHERE "workflowId" = $1 AND attempt = $2`,
[this.util.getVars().runId, this.util.getVars().attempt_number],
);
return res.rows[0].id;
}
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async createMatrix(attemptId: number) {
const client = await this.dbClient.connect();
try {
const matrixResponse = await client.query(
`INSERT INTO public."matrix" ("workflowId", "matrixId", "status", "attemptId")
VALUES ($1, $2, $3, $4)
ON CONFLICT ("matrixId", "attemptId") DO NOTHING
RETURNING id;`,
[
this.util.getVars().runId,
this.util.getVars().thisRunner,
"started",
attemptId,
],
);
return matrixResponse.rows[0].id;
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async getFailedSpecsFromPreviousRun(
workflowId = Number(this.util.getVars().runId),
attempt_number = Number(this.util.getVars().attempt_number) - 1,
) {
const client = await this.dbClient.connect();
try {
const dbRes = await client.query(
`SELECT name FROM public."specs"
WHERE "matrixId" IN
(SELECT id FROM public."matrix"
WHERE "attemptId" = (
SELECT id FROM public."attempt" WHERE "workflowId" = $1 and "attempt" = $2
)
) AND status IN ('fail', 'queued', 'in-progress')`,
[workflowId, attempt_number],
);
const specs: string[] =
dbRes.rows.length > 0 ? dbRes.rows.map((row) => row.name) : [];
return specs;
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async addSpecsToMatrix(matrixId: number, specs: string[]) {
const client = await this.dbClient.connect();
try {
for (const spec of specs) {
const res = await client.query(
`INSERT INTO public."specs" ("name", "matrixId", "status") VALUES ($1, $2, $3) RETURNING id`,
[spec, matrixId, "queued"],
);
}
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async addLockGetTheSpecs(
attemptId: number,
specPattern: string | string[],
ignorePattern: string | string[],
) {
const client = await this.dbClient.connect();
let specs: string[] = [];
let locked = false;
try {
while (!locked) {
const result = await client.query(
`UPDATE public."attempt" SET is_locked = true WHERE id = $1 AND is_locked = false RETURNING id`,
[attemptId],
);
if (result.rows.length === 1) {
locked = true;
let runningSpecs: string[] =
(await this.getAlreadyRunningSpecs()) ?? [];
if (typeof ignorePattern === "string") {
runningSpecs.push(ignorePattern);
ignorePattern = runningSpecs;
} else {
ignorePattern = runningSpecs.concat(ignorePattern);
}
specs = await this.getSpecsToRun(
specPattern,
ignorePattern,
attemptId,
);
return specs;
} else {
await this.sleep(5000);
}
}
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private async updateTheSpecsAndReleaseLock(
attemptId: number,
specs: string[],
) {
const client = await this.dbClient.connect();
try {
const matrixRes = await this.createMatrix(attemptId);
await this.addSpecsToMatrix(matrixRes, specs);
await client.query(
`UPDATE public."attempt" SET is_locked = false WHERE id = $1 AND is_locked = true RETURNING id`,
[attemptId],
);
} catch (err) {
console.log(err);
} finally {
client.release();
}
}
private sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
public async splitSpecs(
on: Cypress.PluginEvents,
config: Cypress.PluginConfigOptions,
) {
try {
let specPattern = config.specPattern;
let ignorePattern: string | string[] = config.excludeSpecPattern;
const cypressSpecs = this.util.getVars().cypressSpecs;
const defaultSpec = "cypress/scripts/no_spec.ts";
if (cypressSpecs != "")
specPattern = cypressSpecs?.split(",").filter((val) => val !== "");
if (this.util.getVars().cypressRerun === "true") {
specPattern =
(await this.getFailedSpecsFromPreviousRun()) ?? defaultSpec;
}
const attempt = await this.createAttempt();
const specs =
(await this.addLockGetTheSpecs(attempt, specPattern, ignorePattern)) ??
[];
if (specs.length > 0 && !specs.includes(defaultSpec)) {
config.specPattern = specs.length == 1 ? specs[0] : specs;
await this.updateTheSpecsAndReleaseLock(attempt, specs);
} else {
config.specPattern = defaultSpec;
}
return config;
} catch (err) {
console.log(err);
} finally {
this.dbClient.end();
}
}
}

View File

@ -0,0 +1,149 @@
import { Pool } from "pg";
import AWS from "aws-sdk";
import fs from "fs";
import { Octokit } from "@octokit/rest";
import fetch from "node-fetch";
export interface DataItem {
name: string;
duration: string;
}
export default class util {
public getVars() {
return {
runId: this.getEnvValue("RUNID", { required: true }),
attempt_number: this.getEnvValue("ATTEMPT_NUMBER", { required: true }),
repository: this.getEnvValue("REPOSITORY", { required: true }),
committer: this.getEnvValue("COMMITTER", { required: true }),
tag: this.getEnvValue("TAG", { required: true }),
branch: this.getEnvValue("BRANCH", { required: true }),
cypressDbUser: this.getEnvValue("CYPRESS_DB_USER", { required: true }),
cypressDbHost: this.getEnvValue("CYPRESS_DB_HOST", { required: true }),
cypressDbName: this.getEnvValue("CYPRESS_DB_NAME", { required: true }),
cypressDbPwd: this.getEnvValue("CYPRESS_DB_PWD", { required: true }),
cypressS3Access: this.getEnvValue("CYPRESS_S3_ACCESS", {
required: true,
}),
cypressS3Secret: this.getEnvValue("CYPRESS_S3_SECRET", {
required: true,
}),
githubToken: process.env["GITHUB_TOKEN"],
commitMsg: this.getEnvValue("COMMIT_INFO_MESSAGE", { required: false }),
totalRunners: this.getEnvValue("TOTAL_RUNNERS", { required: false }),
thisRunner: this.getEnvValue("THIS_RUNNER", { required: true }),
cypressSpecs: this.getEnvValue("CYPRESS_SPECS", { required: false }),
cypressRerun: this.getEnvValue("CYPRESS_RERUN", { required: false }),
};
}
public async divideSpecsIntoBalancedGroups(
data: DataItem[],
numberOfGroups: number,
): Promise<DataItem[][]> {
const groups: DataItem[][] = Array.from(
{ length: numberOfGroups },
() => [],
);
data.forEach((item) => {
// Find the group with the shortest total duration and add the item to it
const shortestGroupIndex = groups.reduce(
(minIndex, group, currentIndex) => {
const totalDuration = groups[minIndex].reduce(
(acc, item) => acc + Number(item.duration),
0,
);
const totalDurationCurrent = group.reduce(
(acc, item) => acc + Number(item.duration),
0,
);
return totalDurationCurrent < totalDuration ? currentIndex : minIndex;
},
0,
);
groups[shortestGroupIndex].push(item);
});
return groups;
}
public getEnvValue(varName: string, { required = true }): string {
if (required && process.env[varName] === undefined) {
throw Error(
`${varName} is not defined.
Please check all the following environment variables are defined properly
[ RUNID, ATTEMPT_NUMBER, REPOSITORY, COMMITTER, TAG, BRANCH, THIS_RUNNER, CYPRESS_DB_USER, CYPRESS_DB_HOST, CYPRESS_DB_NAME, CYPRESS_DB_PWD, CYPRESS_S3_ACCESS, CYPRESS_S3_SECRET ].`,
);
}
return process.env[varName] ?? "";
}
//This is to setup the db client
public configureDbClient() {
const dbConfig = {
user: this.getVars().cypressDbUser,
host: this.getVars().cypressDbHost,
database: this.getVars().cypressDbName,
password: this.getVars().cypressDbPwd,
port: 5432,
connectionTimeoutMillis: 60000,
ssl: true,
keepalives: 30,
};
const dbClient = new Pool(dbConfig);
return dbClient;
}
// This is to setup the AWS client
public configureS3() {
AWS.config.update({ region: "ap-south-1" });
const s3client = new AWS.S3({
credentials: {
accessKeyId: this.getVars().cypressS3Access,
secretAccessKey: this.getVars().cypressS3Secret,
},
});
return s3client;
}
// This is to upload files to s3 when required
public async uploadToS3(s3Client: AWS.S3, filePath: string, key: string) {
const fileContent = fs.readFileSync(filePath);
const params = {
Bucket: "appsmith-internal-cy-db",
Key: key,
Body: fileContent,
};
return await s3Client.upload(params).promise();
}
public async getActiveRunners() {
const octokit = new Octokit({
auth: this.getVars().githubToken,
request: {
fetch: fetch,
},
});
try {
const repo: string[] = this.getVars().repository.split("/");
const response = await octokit.request(
"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
{
owner: repo[0],
repo: repo[1],
run_id: Number(this.getVars().runId),
per_page: 100,
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
},
);
const active_runners = response.data.jobs.filter(
(job) =>
(job.status === "in_progress" || job.status === "queued") &&
job.run_attempt === Number(this.getVars().attempt_number),
);
return active_runners.length;
} catch (error) {
console.error("Error:", error);
}
}
}

View File

@ -6,7 +6,7 @@ export default defineConfig({
requestTimeout: 60000,
responseTimeout: 60000,
pageLoadTimeout: 60000,
videoUploadOnPasses: false,
video: true,
numTestsKeptInMemory: 5,
experimentalMemoryManagement: true,
reporter: "cypress-mochawesome-reporter",

View File

@ -7,7 +7,7 @@ export default defineConfig({
responseTimeout: 60000,
pageLoadTimeout: 60000,
videoCompression: false,
videoUploadOnPasses: false,
video: true,
numTestsKeptInMemory: 5,
experimentalMemoryManagement: true,
reporter: "cypress-mochawesome-reporter",

View File

@ -140,7 +140,6 @@
"normalizr": "^3.3.0",
"object-hash": "^3.0.0",
"path-to-regexp": "^6.2.0",
"pg": "^8.11.3",
"popper.js": "^1.15.0",
"prismjs": "^1.27.0",
"proxy-memoize": "^1.2.0",
@ -231,6 +230,7 @@
"@babel/helper-string-parser": "^7.19.4",
"@craco/craco": "^7.0.0",
"@faker-js/faker": "^7.4.0",
"@octokit/rest": "^20.0.1",
"@peculiar/webcrypto": "^1.4.3",
"@redux-saga/testing-utils": "^1.1.5",
"@sentry/webpack-plugin": "^1.18.9",
@ -252,6 +252,7 @@
"@types/node": "^10.12.18",
"@types/node-forge": "^0.10.0",
"@types/object-hash": "^2.2.1",
"@types/pg": "^8.10.2",
"@types/prismjs": "^1.16.1",
"@types/react": "^17.0.2",
"@types/react-beautiful-dnd": "^11.0.4",
@ -288,7 +289,7 @@
"compression-webpack-plugin": "^10.0.0",
"cra-bundle-analyzer": "^0.1.0",
"cy-verify-downloads": "^0.0.5",
"cypress": "^12.17.4",
"cypress": "13.2.0",
"cypress-file-upload": "^4.1.1",
"cypress-image-snapshot": "^4.0.1",
"cypress-mochawesome-reporter": "^3.5.1",
@ -322,6 +323,7 @@
"json5": "^2.2.3",
"lint-staged": "^13.2.0",
"msw": "^0.28.0",
"pg": "^8.11.3",
"plop": "^3.1.1",
"postcss": "^8.4.30",
"postcss-at-rules-variables": "^0.3.0",

View File

@ -2265,9 +2265,9 @@ __metadata:
languageName: node
linkType: hard
"@cypress/request@npm:2.88.12":
version: 2.88.12
resolution: "@cypress/request@npm:2.88.12"
"@cypress/request@npm:^3.0.0":
version: 3.0.0
resolution: "@cypress/request@npm:3.0.0"
dependencies:
aws-sign2: ~0.7.0
aws4: ^1.8.0
@ -2287,7 +2287,7 @@ __metadata:
tough-cookie: ^4.1.3
tunnel-agent: ^0.6.0
uuid: ^8.3.2
checksum: 2c6fbf7f3127d41bffca8374beaa8cf95450495a8a077b00309ea9d94dd2a4da450a77fe038e8ad26c97cdd7c39b65c53c850f8338ce9bc2dbe23ce2e2b48329
checksum: 8ec81075b800b84df8a616dce820a194d562a35df251da234f849344022979f3675baa0b82988843f979a488e39bc1eec6204cfe660c75ace9bf4d2951edec43
languageName: node
linkType: hard
@ -3941,6 +3941,133 @@ __metadata:
languageName: node
linkType: hard
"@octokit/auth-token@npm:^4.0.0":
version: 4.0.0
resolution: "@octokit/auth-token@npm:4.0.0"
checksum: d78f4dc48b214d374aeb39caec4fdbf5c1e4fd8b9fcb18f630b1fe2cbd5a880fca05445f32b4561f41262cb551746aeb0b49e89c95c6dd99299706684d0cae2f
languageName: node
linkType: hard
"@octokit/core@npm:^5.0.0":
version: 5.0.0
resolution: "@octokit/core@npm:5.0.0"
dependencies:
"@octokit/auth-token": ^4.0.0
"@octokit/graphql": ^7.0.0
"@octokit/request": ^8.0.2
"@octokit/request-error": ^5.0.0
"@octokit/types": ^11.0.0
before-after-hook: ^2.2.0
universal-user-agent: ^6.0.0
checksum: 1a5d1112a2403d146aa1db7aaf81a31192ef6b0310a1e6f68c3e439fded22bd4b3a930f5071585e6ca0f2f5e7fc4a1aac68910525b71b03732c140e362d26a33
languageName: node
linkType: hard
"@octokit/endpoint@npm:^9.0.0":
version: 9.0.0
resolution: "@octokit/endpoint@npm:9.0.0"
dependencies:
"@octokit/types": ^11.0.0
is-plain-object: ^5.0.0
universal-user-agent: ^6.0.0
checksum: 0e402c4d0fbe5b8053630cedb30dde5074bb6410828a05dc93d7e0fdd6c17f9a44b66586ef1a4e4ee0baa8d34ef7d6f535e2f04d9ea42909b7fc7ff55ce56a48
languageName: node
linkType: hard
"@octokit/graphql@npm:^7.0.0":
version: 7.0.1
resolution: "@octokit/graphql@npm:7.0.1"
dependencies:
"@octokit/request": ^8.0.1
"@octokit/types": ^11.0.0
universal-user-agent: ^6.0.0
checksum: 7ee907987b1b8312c6f870c44455cbd3eed805bb1a4095038f4e7e62ee2e006bd766f2a71dfbe56b870cd8f7558309c602f00d3e252fe59578f4acf6249a4f17
languageName: node
linkType: hard
"@octokit/openapi-types@npm:^18.0.0":
version: 18.0.0
resolution: "@octokit/openapi-types@npm:18.0.0"
checksum: d487d6c6c1965e583eee417d567e4fe3357a98953fc49bce1a88487e7908e9b5dbb3e98f60dfa340e23b1792725fbc006295aea071c5667a813b9c098185b56f
languageName: node
linkType: hard
"@octokit/plugin-paginate-rest@npm:^8.0.0":
version: 8.0.0
resolution: "@octokit/plugin-paginate-rest@npm:8.0.0"
dependencies:
"@octokit/types": ^11.0.0
peerDependencies:
"@octokit/core": ">=5"
checksum: b5d7cee50523862c6ce7be057f7200e14ee4dcded462f27304c822c960a37efa23ed51080ea879f5d1e56e78f74baa17d2ce32eed5d726794abc35755777e32c
languageName: node
linkType: hard
"@octokit/plugin-request-log@npm:^4.0.0":
version: 4.0.0
resolution: "@octokit/plugin-request-log@npm:4.0.0"
peerDependencies:
"@octokit/core": ">=5"
checksum: 2a8a6619640942092009a9248ceeb163ce01c978e2d7b2a7eb8686bd09a04b783c4cd9071eebb16652d233587abcde449a02ce4feabc652f0a171615fb3e9946
languageName: node
linkType: hard
"@octokit/plugin-rest-endpoint-methods@npm:^9.0.0":
version: 9.0.0
resolution: "@octokit/plugin-rest-endpoint-methods@npm:9.0.0"
dependencies:
"@octokit/types": ^11.0.0
peerDependencies:
"@octokit/core": ">=5"
checksum: 8795cb29be042c839098886a03c2ec6051e3fd7a29f16f4f8a487aa2d85ceb00df8a4432499a43af550369bd730ce9b1b9d7eeff768745b80a3e67698ca9a5dd
languageName: node
linkType: hard
"@octokit/request-error@npm:^5.0.0":
version: 5.0.0
resolution: "@octokit/request-error@npm:5.0.0"
dependencies:
"@octokit/types": ^11.0.0
deprecation: ^2.0.0
once: ^1.4.0
checksum: 2012eca66f6b8fa4038b3bfe81d65a7134ec58e2caf45d229aca13b9653ab260abd95229bd1a8c11180ee0bcf738e2556831a85de28f39b175175653c3b79fdd
languageName: node
linkType: hard
"@octokit/request@npm:^8.0.1, @octokit/request@npm:^8.0.2":
version: 8.1.1
resolution: "@octokit/request@npm:8.1.1"
dependencies:
"@octokit/endpoint": ^9.0.0
"@octokit/request-error": ^5.0.0
"@octokit/types": ^11.1.0
is-plain-object: ^5.0.0
universal-user-agent: ^6.0.0
checksum: dec3ba2cba14739159cd8d1653ad8ac6d58095e4ac294d312d20ce2c63c60c3cad2e5499137244dba3d681fd5cd7f74b4b5d4df024a19c0ee1831204e5a3a894
languageName: node
linkType: hard
"@octokit/rest@npm:^20.0.1":
version: 20.0.1
resolution: "@octokit/rest@npm:20.0.1"
dependencies:
"@octokit/core": ^5.0.0
"@octokit/plugin-paginate-rest": ^8.0.0
"@octokit/plugin-request-log": ^4.0.0
"@octokit/plugin-rest-endpoint-methods": ^9.0.0
checksum: 9fb2e154a498e00598379b09d76cc7b67b3801e9c97d753f1a76e1163924188bf4cb1411ec152a038ae91e97b86d7146ff220b05adfb6e500e2300c87e14100a
languageName: node
linkType: hard
"@octokit/types@npm:^11.0.0, @octokit/types@npm:^11.1.0":
version: 11.1.0
resolution: "@octokit/types@npm:11.1.0"
dependencies:
"@octokit/openapi-types": ^18.0.0
checksum: 72627a94ddaf7bc14db06572bcde67649aad608cd86548818380db9305f4c0ca9ca078a62dd883858a267e8ec8fd596a0fce416aa04197c439b9548efef609a7
languageName: node
linkType: hard
"@open-draft/until@npm:^1.0.3":
version: 1.0.3
resolution: "@open-draft/until@npm:1.0.3"
@ -8076,10 +8203,10 @@ __metadata:
languageName: node
linkType: hard
"@types/node@npm:*, @types/node@npm:>=10.0.0":
version: 18.14.5
resolution: "@types/node@npm:18.14.5"
checksum: 415fb0edc132baa9580f1b7a381a3f10b662f5d7a7d11641917fa0961788ccede3272badc414aadc47306e9fc35c5f6c59159ac470b46d3f3a15fb0446224c8c
"@types/node@npm:*, @types/node@npm:>=10.0.0, @types/node@npm:^18.17.5":
version: 18.17.15
resolution: "@types/node@npm:18.17.15"
checksum: eed11d4398ccdb999a4c65658ee75de621a4ad57aece48ed2fb8803b1e2711fadf58d8aefbdb0a447d69cf3cba602ca32fe0fc92077575950a796e1dc13baa0f
languageName: node
linkType: hard
@ -8090,7 +8217,7 @@ __metadata:
languageName: node
linkType: hard
"@types/node@npm:^16.0.0, @types/node@npm:^16.18.39":
"@types/node@npm:^16.0.0":
version: 16.18.40
resolution: "@types/node@npm:16.18.40"
checksum: a683930491b4fd7cb2dc7684e32bbeedc4a83fb1949a7b15ea724fbfaa9988cec59091f169a3f1090cb91992caba8c1a7d50315b2c67c6e2579a3788bb09eec4
@ -8118,6 +8245,17 @@ __metadata:
languageName: node
linkType: hard
"@types/pg@npm:^8.10.2":
version: 8.10.2
resolution: "@types/pg@npm:8.10.2"
dependencies:
"@types/node": "*"
pg-protocol: "*"
pg-types: ^4.0.1
checksum: 49da89f64cec1bd12a3fbc0c72b17d685c2fee579726a529f62fcab395dbc5696d80455073409947a577164b3c53a90181a331e4a5d9357679f724d4ce37f4b9
languageName: node
linkType: hard
"@types/prettier@npm:^2.1.5":
version: 2.7.0
resolution: "@types/prettier@npm:2.7.0"
@ -9913,6 +10051,7 @@ __metadata:
"@loadable/component": ^5.15.3
"@manaflair/redux-batch": ^1.0.0
"@mantine/hooks": ^5.10.1
"@octokit/rest": ^20.0.1
"@peculiar/webcrypto": ^1.4.3
"@redux-saga/testing-utils": ^1.1.5
"@sentry/react": ^6.2.4
@ -9941,6 +10080,7 @@ __metadata:
"@types/node": ^10.12.18
"@types/node-forge": ^0.10.0
"@types/object-hash": ^2.2.1
"@types/pg": ^8.10.2
"@types/prismjs": ^1.16.1
"@types/react": ^17.0.2
"@types/react-beautiful-dnd": ^11.0.4
@ -10003,7 +10143,7 @@ __metadata:
craco-babel-loader: ^1.0.4
cssnano: ^6.0.1
cy-verify-downloads: ^0.0.5
cypress: ^12.17.4
cypress: 13.2.0
cypress-file-upload: ^4.1.1
cypress-image-snapshot: ^4.0.1
cypress-log-to-output: ^1.1.2
@ -11081,6 +11221,13 @@ __metadata:
languageName: node
linkType: hard
"before-after-hook@npm:^2.2.0":
version: 2.2.3
resolution: "before-after-hook@npm:2.2.3"
checksum: a1a2430976d9bdab4cd89cb50d27fa86b19e2b41812bf1315923b0cba03371ebca99449809226425dd3bcef20e010db61abdaff549278e111d6480034bebae87
languageName: node
linkType: hard
"better-opn@npm:^3.0.2":
version: 3.0.2
resolution: "better-opn@npm:3.0.2"
@ -13583,13 +13730,13 @@ __metadata:
languageName: node
linkType: hard
"cypress@npm:^12.17.4":
version: 12.17.4
resolution: "cypress@npm:12.17.4"
"cypress@npm:13.2.0":
version: 13.2.0
resolution: "cypress@npm:13.2.0"
dependencies:
"@cypress/request": 2.88.12
"@cypress/request": ^3.0.0
"@cypress/xvfb": ^1.2.4
"@types/node": ^16.18.39
"@types/node": ^18.17.5
"@types/sinonjs__fake-timers": 8.1.1
"@types/sizzle": ^2.3.2
arch: ^2.2.0
@ -13632,7 +13779,7 @@ __metadata:
yauzl: ^2.10.0
bin:
cypress: bin/cypress
checksum: c9c79f5493b23e9c8cfb92d45d50ea9d0fae54210dde203bfa794a79436faf60108d826fe9007a7d67fddf7919802ad8f006b7ae56c5c198c75d5bc85bbc851b
checksum: 7647814f07626bd63e7b8dc4d066179fa40bf492c588bbc2626d983a2baab6cb77c29958dc92442f277e0a8e94866decc51c4de306021739c47e32baf5970219
languageName: node
linkType: hard
@ -13945,6 +14092,13 @@ __metadata:
languageName: node
linkType: hard
"deprecation@npm:^2.0.0":
version: 2.3.1
resolution: "deprecation@npm:2.3.1"
checksum: f56a05e182c2c195071385455956b0c4106fe14e36245b00c689ceef8e8ab639235176a96977ba7c74afb173317fac2e0ec6ec7a1c6d1e6eaa401c586c714132
languageName: node
linkType: hard
"deps-sort@npm:^2.0.0, deps-sort@npm:^2.0.1":
version: 2.0.1
resolution: "deps-sort@npm:2.0.1"
@ -22271,7 +22425,7 @@ __metadata:
languageName: node
linkType: hard
"obuf@npm:^1.0.0, obuf@npm:^1.1.2":
"obuf@npm:^1.0.0, obuf@npm:^1.1.2, obuf@npm:~1.1.2":
version: 1.1.2
resolution: "obuf@npm:1.1.2"
checksum: 41a2ba310e7b6f6c3b905af82c275bf8854896e2e4c5752966d64cbcd2f599cfffd5932006bcf3b8b419dfdacebb3a3912d5d94e10f1d0acab59876c8757f27f
@ -22910,6 +23064,13 @@ __metadata:
languageName: node
linkType: hard
"pg-numeric@npm:1.0.2":
version: 1.0.2
resolution: "pg-numeric@npm:1.0.2"
checksum: 8899f8200caa1744439a8778a9eb3ceefb599d893e40a09eef84ee0d4c151319fd416634a6c0fc7b7db4ac268710042da5be700b80ef0de716fe089b8652c84f
languageName: node
linkType: hard
"pg-pool@npm:^3.6.1":
version: 3.6.1
resolution: "pg-pool@npm:3.6.1"
@ -22919,7 +23080,7 @@ __metadata:
languageName: node
linkType: hard
"pg-protocol@npm:^1.6.0":
"pg-protocol@npm:*, pg-protocol@npm:^1.6.0":
version: 1.6.0
resolution: "pg-protocol@npm:1.6.0"
checksum: e12662d2de2011e0c3a03f6a09f435beb1025acdc860f181f18a600a5495dc38a69d753bbde1ace279c8c442536af9c1a7c11e1d0fe3fad3aa1348b28d9d2683
@ -22939,6 +23100,21 @@ __metadata:
languageName: node
linkType: hard
"pg-types@npm:^4.0.1":
version: 4.0.1
resolution: "pg-types@npm:4.0.1"
dependencies:
pg-int8: 1.0.1
pg-numeric: 1.0.2
postgres-array: ~3.0.1
postgres-bytea: ~3.0.0
postgres-date: ~2.0.1
postgres-interval: ^3.0.0
postgres-range: ^1.1.1
checksum: 05258ef2f27a75f1bf4e243f36bb749f85148339d3be818147bcc4aebe019ad7589a6869150713140250d81e5a46ec25dc6e0a031ea77e23db5ca232a0d7a3dc
languageName: node
linkType: hard
"pg@npm:^8.11.3":
version: 8.11.3
resolution: "pg@npm:8.11.3"
@ -24440,6 +24616,13 @@ __metadata:
languageName: node
linkType: hard
"postgres-array@npm:~3.0.1":
version: 3.0.2
resolution: "postgres-array@npm:3.0.2"
checksum: 5955f9dffeb6fa960c1a0b04fd4b2ba16813ddb636934ad26f902e4d76a91c0b743dcc6edc4cffc52deba7d547505e0020adea027c1d50a774f989cf955420d1
languageName: node
linkType: hard
"postgres-bytea@npm:~1.0.0":
version: 1.0.0
resolution: "postgres-bytea@npm:1.0.0"
@ -24447,6 +24630,15 @@ __metadata:
languageName: node
linkType: hard
"postgres-bytea@npm:~3.0.0":
version: 3.0.0
resolution: "postgres-bytea@npm:3.0.0"
dependencies:
obuf: ~1.1.2
checksum: 5f917a003fcaa0df7f285e1c37108ad474ce91193466b9bd4bcaecef2cdea98ca069c00aa6a8dbe6d2e7192336cadc3c9b36ae48d1555a299521918e00e2936b
languageName: node
linkType: hard
"postgres-date@npm:~1.0.4":
version: 1.0.7
resolution: "postgres-date@npm:1.0.7"
@ -24454,6 +24646,13 @@ __metadata:
languageName: node
linkType: hard
"postgres-date@npm:~2.0.1":
version: 2.0.1
resolution: "postgres-date@npm:2.0.1"
checksum: 0304bf8641a01412e4f5c3a374604e2e3dbc9dbee71d30df12fe60b32560c5674f887c2d15bafa2996f3b618b617398e7605f0e3669db43f31e614dfe69f8de7
languageName: node
linkType: hard
"postgres-interval@npm:^1.1.0":
version: 1.2.0
resolution: "postgres-interval@npm:1.2.0"
@ -24463,6 +24662,20 @@ __metadata:
languageName: node
linkType: hard
"postgres-interval@npm:^3.0.0":
version: 3.0.0
resolution: "postgres-interval@npm:3.0.0"
checksum: c7a1cf006de97de663b6b8c4d2b167aa9909a238c4866a94b15d303762f5ac884ff4796cd6e2111b7f0a91302b83c570453aa8506fd005b5a5d5dfa87441bebc
languageName: node
linkType: hard
"postgres-range@npm:^1.1.1":
version: 1.1.3
resolution: "postgres-range@npm:1.1.3"
checksum: bf7e194a18c490d02bda0bd02035a8da454d8fd2b22c55d3d03f185c038b2a6f52d0804417d8090864afefc2b7ed664b2d12c2454a4a0f545dcbbb86488fbdf1
languageName: node
linkType: hard
"postinstall-postinstall@npm:^2.1.0":
version: 2.1.0
resolution: "postinstall-postinstall@npm:2.1.0"
@ -30031,6 +30244,13 @@ __metadata:
languageName: node
linkType: hard
"universal-user-agent@npm:^6.0.0":
version: 6.0.0
resolution: "universal-user-agent@npm:6.0.0"
checksum: 5092bbc80dd0d583cef0b62c17df0043193b74f425112ea6c1f69bc5eda21eeec7a08d8c4f793a277eb2202ffe9b44bec852fa3faff971234cd209874d1b79ef
languageName: node
linkType: hard
"universalify@npm:^0.1.0":
version: 0.1.2
resolution: "universalify@npm:0.1.2"