From d2c4a0da5d5a04b646eefb35b0553698fde084c0 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Tue, 22 Oct 2024 14:55:56 +0200 Subject: [PATCH 1/3] e2e: allow tests failure --- .github/workflows/e2ePerformanceTests.yml | 30 ++++ src/libs/E2E/tests/appStartTimeTest.e2e.ts | 7 +- src/libs/E2E/tests/chatOpeningTest.e2e.ts | 5 +- src/libs/E2E/tests/linkingTest.e2e.ts | 3 +- .../E2E/tests/openSearchRouterTest.e2e.ts | 10 +- src/libs/E2E/tests/reportTypingTest.e2e.ts | 5 +- tests/e2e/compare/compare.ts | 13 +- tests/e2e/compare/output/console.ts | 6 +- tests/e2e/compare/output/markdown.ts | 10 +- tests/e2e/testRunner.ts | 168 ++++++++++-------- 10 files changed, 170 insertions(+), 87 deletions(-) diff --git a/.github/workflows/e2ePerformanceTests.yml b/.github/workflows/e2ePerformanceTests.yml index b48c7b2175eb..089f7886b527 100644 --- a/.github/workflows/e2ePerformanceTests.yml +++ b/.github/workflows/e2ePerformanceTests.yml @@ -221,6 +221,36 @@ jobs: env: GITHUB_TOKEN: ${{ github.token }} + - name: Check if test has skipped tests + id: checkIfSkippedTestsDetected + run: | + if grep -q '⚠️' "./Host_Machine_Files/\$WORKING_DIRECTORY/output.md"; then + # Create an output to the GH action that the tests were skipped: + echo "skippedTestsDetected=true" >> "$GITHUB_OUTPUT" + else + echo "skippedTestsDetected=false" >> "$GITHUB_OUTPUT" + echo '✅ no skipped tests detected' + fi + env: + GITHUB_TOKEN: ${{ github.token }} + + - name: 'Announce skipped tests in Slack' + if: ${{ steps.checkIfSkippedTestsDetected.outputs.skippedTestsDetected == 'true' }} + uses: 8398a7/action-slack@v3 + with: + status: custom + custom_payload: | + { + channel: '#e2e-announce', + attachments: [{ + color: 'danger', + text: `⚠️ ${process.env.AS_REPO} Some of E2E tests were skipped on workflow ⚠️`, + }] + } + env: + GITHUB_TOKEN: ${{ github.token }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} + - name: 'Announce regression in Slack' if: ${{ steps.checkIfRegressionDetected.outputs.performanceRegressionDetected == 'true' }} uses: 8398a7/action-slack@v3 diff --git a/src/libs/E2E/tests/appStartTimeTest.e2e.ts b/src/libs/E2E/tests/appStartTimeTest.e2e.ts index 188dd65c85e9..ccd781e08514 100644 --- a/src/libs/E2E/tests/appStartTimeTest.e2e.ts +++ b/src/libs/E2E/tests/appStartTimeTest.e2e.ts @@ -1,11 +1,14 @@ import Config from 'react-native-config'; +import type {NativeConfig} from 'react-native-config'; import type {PerformanceEntry} from 'react-native-performance'; import E2ELogin from '@libs/E2E/actions/e2eLogin'; import waitForAppLoaded from '@libs/E2E/actions/waitForAppLoaded'; import E2EClient from '@libs/E2E/client'; +import getConfigValueOrThrow from '@libs/E2E/utils/getConfigValueOrThrow'; import Performance from '@libs/Performance'; -const test = () => { +const test = (config: NativeConfig) => { + const name = getConfigValueOrThrow('name', config); // check for login (if already logged in the action will simply resolve) E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -25,7 +28,7 @@ const test = () => { metrics.map((metric) => E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: `App start ${metric.name}`, + name: `${name} ${metric.name}`, metric: metric.duration, unit: 'ms', }), diff --git a/src/libs/E2E/tests/chatOpeningTest.e2e.ts b/src/libs/E2E/tests/chatOpeningTest.e2e.ts index 8e2a0a81da7d..cf0c4889aa69 100644 --- a/src/libs/E2E/tests/chatOpeningTest.e2e.ts +++ b/src/libs/E2E/tests/chatOpeningTest.e2e.ts @@ -15,6 +15,7 @@ const test = (config: NativeConfig) => { console.debug('[E2E] Logging in for chat opening'); const reportID = getConfigValueOrThrow('reportID', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -48,7 +49,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.CHAT_RENDER) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Chat opening', + name: `${name} Chat opening`, metric: entry.duration, unit: 'ms', }) @@ -64,7 +65,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.OPEN_REPORT) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Chat TTI', + name: `${name} Chat TTI`, metric: entry.duration, unit: 'ms', }) diff --git a/src/libs/E2E/tests/linkingTest.e2e.ts b/src/libs/E2E/tests/linkingTest.e2e.ts index c4d580e8c57b..18ba438c2ca6 100644 --- a/src/libs/E2E/tests/linkingTest.e2e.ts +++ b/src/libs/E2E/tests/linkingTest.e2e.ts @@ -24,6 +24,7 @@ const test = (config: NativeConfig) => { const reportID = getConfigValueOrThrow('reportID', config); const linkedReportID = getConfigValueOrThrow('linkedReportID', config); const linkedReportActionID = getConfigValueOrThrow('linkedReportActionID', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -74,7 +75,7 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Comment linking', + name, metric: entry.duration, unit: 'ms', }); diff --git a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts index 48278aee536a..01dfe57ab715 100644 --- a/src/libs/E2E/tests/openSearchRouterTest.e2e.ts +++ b/src/libs/E2E/tests/openSearchRouterTest.e2e.ts @@ -1,16 +1,20 @@ +import type {NativeConfig} from 'react-native-config'; import Config from 'react-native-config'; import * as E2EGenericPressableWrapper from '@components/Pressable/GenericPressable/index.e2e'; import E2ELogin from '@libs/E2E/actions/e2eLogin'; import waitForAppLoaded from '@libs/E2E/actions/waitForAppLoaded'; import E2EClient from '@libs/E2E/client'; +import getConfigValueOrThrow from '@libs/E2E/utils/getConfigValueOrThrow'; import getPromiseWithResolve from '@libs/E2E/utils/getPromiseWithResolve'; import Performance from '@libs/Performance'; import CONST from '@src/CONST'; -const test = () => { +const test = (config: NativeConfig) => { // check for login (if already logged in the action will simply resolve) console.debug('[E2E] Logging in for new search router'); + const name = getConfigValueOrThrow('name', config); + E2ELogin().then((neededLogin: boolean): Promise | undefined => { if (neededLogin) { return waitForAppLoaded().then(() => @@ -59,7 +63,7 @@ const test = () => { if (entry.name === CONST.TIMING.SEARCH_ROUTER_RENDER) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Open Search Router TTI', + name: `${name} Open Search Router TTI`, metric: entry.duration, unit: 'ms', }) @@ -75,7 +79,7 @@ const test = () => { if (entry.name === CONST.TIMING.LOAD_SEARCH_OPTIONS) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Load Search Options', + name: `${name} Load Search Options`, metric: entry.duration, unit: 'ms', }) diff --git a/src/libs/E2E/tests/reportTypingTest.e2e.ts b/src/libs/E2E/tests/reportTypingTest.e2e.ts index efe1c380dfd0..e042a688c37d 100644 --- a/src/libs/E2E/tests/reportTypingTest.e2e.ts +++ b/src/libs/E2E/tests/reportTypingTest.e2e.ts @@ -21,6 +21,7 @@ const test = (config: NativeConfig) => { const reportID = getConfigValueOrThrow('reportID', config); const message = getConfigValueOrThrow('message', config); + const name = getConfigValueOrThrow('name', config); E2ELogin().then((neededLogin) => { if (neededLogin) { @@ -45,7 +46,7 @@ const test = (config: NativeConfig) => { if (entry.name === CONST.TIMING.MESSAGE_SENT) { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Message sent', + name: `${name} Message sent`, metric: entry.duration, unit: 'ms', }).then(messageSentResolve); @@ -77,7 +78,7 @@ const test = (config: NativeConfig) => { E2EClient.submitTestResults({ branch: Config.E2E_BRANCH, - name: 'Composer typing rerender count', + name: `${name} Composer typing rerender count`, metric: rerenderCount, unit: 'renders', }) diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index 40a728545668..8761043e97e1 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -91,16 +91,23 @@ function compareResults(baselineEntries: Metric | string, compareEntries: Metric }; } -export default (main: Metric | string, delta: Metric | string, outputFile: string, outputFormat = 'all', metricForTest = {}) => { +type Options = { + outputFile: string; + outputFormat: 'console' | 'markdown' | 'all'; + metricForTest: Record; + hasMissingData: boolean; +}; + +export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, hasMissingData}: Options) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { - printToConsole(outputData); + printToConsole(outputData, hasMissingData); } if (outputFormat === 'markdown' || outputFormat === 'all') { - return writeToMarkdown(outputFile, outputData); + return writeToMarkdown(outputFile, outputData, hasMissingData); } }; export {compareResults}; diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index 41ae5a4e0ccf..c91cebbbb610 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => { /** * Prints the result simply to console. */ -export default (data: Data) => { +export default (data: Data, hasMissingData: boolean) => { // No need to log errors or warnings as these were be logged on the fly console.debug(''); console.debug('❇️ Performance comparison results:'); @@ -38,6 +38,10 @@ export default (data: Data) => { data.meaningless.forEach(printRegularLine); console.debug(''); + + if (hasMissingData) { + console.debug('⚠️ Some tests did not pass successfully, so some results are omitted from final report'); + } }; export type {Data, Entry}; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 32af6c5e22ad..5d049939e139 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => { return collapse ? collapsibleSection('Show entries', content) : content; }; -const buildMarkdown = (data: Data) => { +const buildMarkdown = (data: Data, hasMissingData: boolean) => { let result = '## Performance Comparison Report 📊'; if (data.errors?.length) { @@ -92,6 +92,10 @@ const buildMarkdown = (data: Data) => { result += `\n${buildDetailsTable(data.meaningless)}`; result += '\n'; + if (hasMissingData) { + result += '⚠️ Some tests did not pass successfully, so some results are omitted from final report'; + } + return result; }; @@ -109,8 +113,8 @@ const writeToFile = (filePath: string, content: string) => throw error; }); -const writeToMarkdown = (filePath: string, data: Data) => { - const markdown = buildMarkdown(data); +const writeToMarkdown = (filePath: string, data: Data, hasMissingData: boolean) => { + const markdown = buildMarkdown(data, hasMissingData); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error; diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index 58fb6b9cdae1..a4e9bfab1e0c 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,6 +123,20 @@ const runTests = async (): Promise => { } }; + let hasSkippedTests = false; + const clearTestResults = (test: TestConfig) => { + hasSkippedTests = true; + + Object.keys(results).forEach((branch: string) => { + Object.keys(results[branch]).forEach((metric: string) => { + if (!metric.startsWith(test.name)) { + return; + } + delete results[branch][metric]; + }); + }); + }; + // Collect results while tests are being executed server.addTestResultListener((testResult) => { const {isCritical = true} = testResult; @@ -244,88 +258,102 @@ const runTests = async (): Promise => { server.setTestConfig(test as TestConfig); server.setReadyToAcceptTestResults(false); - const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; - - // For each warmup we allow the warmup to fail three times before we stop the warmup run: - const errorCountWarmupRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // by default we do 2 warmups: - // - first warmup to pass a login flow - // - second warmup to pass an actual flow and cache network requests - const iterations = 2; - for (let i = 0; i < iterations; i++) { - try { - // Warmup the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); - - // Warmup the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); - } catch (e) { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Warmup failed with error: ${e}`); - - errorCountWarmupRef.errorCount++; - i--; // repeat warmup again - - if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { - Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); - throw e; + try { + const warmupText = `Warmup for test '${test?.name}' [${testIndex + 1}/${tests.length}]`; + + // For each warmup we allow the warmup to fail three times before we stop the warmup run: + const errorCountWarmupRef = { + errorCount: 0, + allowedExceptions: 3, + }; + + // by default we do 2 warmups: + // - first warmup to pass a login flow + // - second warmup to pass an actual flow and cache network requests + const iterations = 2; + for (let i = 0; i < iterations; i++) { + try { + // Warmup the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, `[MAIN] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_MAIN); + + // Warmup the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, `[DELTA] ${warmupText}. Iteration ${i + 1}/${iterations}`, config.BRANCH_DELTA); + } catch (e) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Warmup failed with error: ${e}`); + + MeasureUtils.stop('error-warmup'); + server.clearAllTestDoneListeners(); + + errorCountWarmupRef.errorCount++; + i--; // repeat warmup again + + if (errorCountWarmupRef.errorCount === errorCountWarmupRef.allowedExceptions) { + Logger.error("There was an error running the warmup and we've reached the maximum number of allowed exceptions. Stopping the test run."); + throw e; + } } } - } - server.setReadyToAcceptTestResults(true); - - // For each test case we allow the test to fail three times before we stop the test run: - const errorCountRef = { - errorCount: 0, - allowedExceptions: 3, - }; - - // We run each test multiple time to average out the results - for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { - const onError = (e: Error) => { - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.error(`Unexpected error during test execution: ${e}. `); - MeasureUtils.stop('error'); - server.clearAllTestDoneListeners(); - errorCountRef.errorCount += 1; - if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { - Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); - // If the error happened on the first test run, the test is broken - // and we should not continue running it. Or if we have reached the - // maximum number of allowed exceptions, we should stop the test run. - throw e; - } - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); - }; + server.setReadyToAcceptTestResults(true); - const launchArgs = { - mockNetwork: true, + // For each test case we allow the test to fail three times before we stop the test run: + const errorCountRef = { + errorCount: 0, + allowedExceptions: 3, }; - const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; - const mainIterationText = `[MAIN] ${iterationText}`; - const deltaIterationText = `[DELTA] ${iterationText}`; - try { - // Run the test on the main app: - await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); - - // Run the test on the delta app: - await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); - } catch (e) { - onError(e as Error); + // We run each test multiple time to average out the results + for (let testIteration = 0; testIteration < config.RUNS; testIteration++) { + const onError = (e: Error) => { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.error(`Unexpected error during test execution: ${e}. `); + MeasureUtils.stop('error'); + server.clearAllTestDoneListeners(); + errorCountRef.errorCount += 1; + if (testIteration === 0 || errorCountRef.errorCount === errorCountRef.allowedExceptions) { + Logger.error("There was an error running the test and we've reached the maximum number of allowed exceptions. Stopping the test run."); + // If the error happened on the first test run, the test is broken + // and we should not continue running it. Or if we have reached the + // maximum number of allowed exceptions, we should stop the test run. + throw e; + } + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`There was an error running the test. Continuing the test run. Error: ${e}`); + }; + + const launchArgs = { + mockNetwork: true, + }; + + const iterationText = `Test '${test?.name}' [${testIndex + 1}/${tests.length}], iteration [${testIteration + 1}/${config.RUNS}]`; + const mainIterationText = `[MAIN] ${iterationText}`; + const deltaIterationText = `[DELTA] ${iterationText}`; + try { + // Run the test on the main app: + await runTestIteration(config.MAIN_APP_PACKAGE, mainIterationText, config.BRANCH_MAIN, launchArgs); + + // Run the test on the delta app: + await runTestIteration(config.DELTA_APP_PACKAGE, deltaIterationText, config.BRANCH_DELTA, launchArgs); + } catch (e) { + onError(e as Error); + } } + } catch (exception) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + Logger.warn(`Test ${test?.name} can not be finished due to error: ${exception}`); + clearTestResults(test as TestConfig); } } // Calculate statistics and write them to our work file Logger.info('Calculating statics and writing results'); - compare(results.main, results.delta, `${config.OUTPUT_DIR}/output.md`, 'all', metricForTest); + compare(results.main, results.delta, { + outputFile: `${config.OUTPUT_DIR}/output.md`, + outputFormat: 'all', + metricForTest, + hasMissingData: hasSkippedTests, + }); await server.stop(); }; From b8a0935bbdd2b7004b8ea2205b95a0c58a69fa3d Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Wed, 23 Oct 2024 12:13:31 +0200 Subject: [PATCH 2/3] e2e: print skipped tests --- tests/e2e/compare/compare.ts | 8 ++++---- tests/e2e/compare/output/console.ts | 6 +++--- tests/e2e/compare/output/markdown.ts | 10 +++++----- tests/e2e/testRunner.ts | 6 +++--- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/e2e/compare/compare.ts b/tests/e2e/compare/compare.ts index 8761043e97e1..ad38c249bff3 100644 --- a/tests/e2e/compare/compare.ts +++ b/tests/e2e/compare/compare.ts @@ -95,19 +95,19 @@ type Options = { outputFile: string; outputFormat: 'console' | 'markdown' | 'all'; metricForTest: Record; - hasMissingData: boolean; + skippedTests: string[]; }; -export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, hasMissingData}: Options) => { +export default (main: Metric | string, delta: Metric | string, {outputFile, outputFormat = 'all', metricForTest = {}, skippedTests}: Options) => { // IMPORTANT NOTE: make sure you are passing the main/baseline results first, then the delta/compare results: const outputData = compareResults(main, delta, metricForTest); if (outputFormat === 'console' || outputFormat === 'all') { - printToConsole(outputData, hasMissingData); + printToConsole(outputData, skippedTests); } if (outputFormat === 'markdown' || outputFormat === 'all') { - return writeToMarkdown(outputFile, outputData, hasMissingData); + return writeToMarkdown(outputFile, outputData, skippedTests); } }; export {compareResults}; diff --git a/tests/e2e/compare/output/console.ts b/tests/e2e/compare/output/console.ts index c91cebbbb610..2e303ffa1538 100644 --- a/tests/e2e/compare/output/console.ts +++ b/tests/e2e/compare/output/console.ts @@ -26,7 +26,7 @@ const printRegularLine = (entry: Entry) => { /** * Prints the result simply to console. */ -export default (data: Data, hasMissingData: boolean) => { +export default (data: Data, skippedTests: string[]) => { // No need to log errors or warnings as these were be logged on the fly console.debug(''); console.debug('❇️ Performance comparison results:'); @@ -39,8 +39,8 @@ export default (data: Data, hasMissingData: boolean) => { console.debug(''); - if (hasMissingData) { - console.debug('⚠️ Some tests did not pass successfully, so some results are omitted from final report'); + if (skippedTests.length > 0) { + console.debug(`⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`); } }; diff --git a/tests/e2e/compare/output/markdown.ts b/tests/e2e/compare/output/markdown.ts index 5d049939e139..eaca6a049516 100644 --- a/tests/e2e/compare/output/markdown.ts +++ b/tests/e2e/compare/output/markdown.ts @@ -67,7 +67,7 @@ const buildSummaryTable = (entries: Entry[], collapse = false) => { return collapse ? collapsibleSection('Show entries', content) : content; }; -const buildMarkdown = (data: Data, hasMissingData: boolean) => { +const buildMarkdown = (data: Data, skippedTests: string[]) => { let result = '## Performance Comparison Report 📊'; if (data.errors?.length) { @@ -92,8 +92,8 @@ const buildMarkdown = (data: Data, hasMissingData: boolean) => { result += `\n${buildDetailsTable(data.meaningless)}`; result += '\n'; - if (hasMissingData) { - result += '⚠️ Some tests did not pass successfully, so some results are omitted from final report'; + if (skippedTests.length > 0) { + result += `⚠️ Some tests did not pass successfully, so some results are omitted from final report: ${skippedTests.join(', ')}`; } return result; @@ -113,8 +113,8 @@ const writeToFile = (filePath: string, content: string) => throw error; }); -const writeToMarkdown = (filePath: string, data: Data, hasMissingData: boolean) => { - const markdown = buildMarkdown(data, hasMissingData); +const writeToMarkdown = (filePath: string, data: Data, skippedTests: string[]) => { + const markdown = buildMarkdown(data, skippedTests); return writeToFile(filePath, markdown).catch((error) => { console.error(error); throw error; diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index a4e9bfab1e0c..b4e79220b4ed 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,9 +123,9 @@ const runTests = async (): Promise => { } }; - let hasSkippedTests = false; + let skippedTests: string[] = []; const clearTestResults = (test: TestConfig) => { - hasSkippedTests = true; + skippedTests.push(test.name); Object.keys(results).forEach((branch: string) => { Object.keys(results[branch]).forEach((metric: string) => { @@ -352,7 +352,7 @@ const runTests = async (): Promise => { outputFile: `${config.OUTPUT_DIR}/output.md`, outputFormat: 'all', metricForTest, - hasMissingData: hasSkippedTests, + skippedTests, }); await server.stop(); From eeb623d4265e053898a5ee1d56c758a0a261bf82 Mon Sep 17 00:00:00 2001 From: kirillzyusko Date: Wed, 23 Oct 2024 12:19:41 +0200 Subject: [PATCH 3/3] fix: CI --- tests/e2e/testRunner.ts | 2 +- tests/unit/E2EMarkdownTest.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/e2e/testRunner.ts b/tests/e2e/testRunner.ts index b4e79220b4ed..d1e16b6999e1 100644 --- a/tests/e2e/testRunner.ts +++ b/tests/e2e/testRunner.ts @@ -123,7 +123,7 @@ const runTests = async (): Promise => { } }; - let skippedTests: string[] = []; + const skippedTests: string[] = []; const clearTestResults = (test: TestConfig) => { skippedTests.push(test.name); diff --git a/tests/unit/E2EMarkdownTest.ts b/tests/unit/E2EMarkdownTest.ts index 74c5659c9487..766ec708f31b 100644 --- a/tests/unit/E2EMarkdownTest.ts +++ b/tests/unit/E2EMarkdownTest.ts @@ -13,6 +13,6 @@ const results = { describe('markdown formatter', () => { it('should format significant changes properly', () => { const data = compareResults(results.main, results.delta, {commentLinking: 'ms'}); - expect(buildMarkdown(data)).toMatchSnapshot(); + expect(buildMarkdown(data, [])).toMatchSnapshot(); }); });