mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 23:21:08 +02:00
Implement rock-solid automated testing infrastructure for mana-core-auth with daily execution, notifications, and comprehensive monitoring. Test Suite Improvements: - Fix all 36 failing BetterAuthService tests (missing service mocks) - Add 21 JwtAuthGuard tests achieving 100% statement coverage - Create silentError helper to suppress intentional error logs - Fix Todo backend TaskService test structure - Add jose mock for JWT testing - Configure jest collectCoverageFrom for mana-core-auth GitHub Actions Workflow: - Daily automated test execution (2 AM UTC + manual trigger) - Matrix parallelization across 6 backend services - PostgreSQL and Redis service containers - Coverage enforcement (80% threshold) - Multi-channel notifications (Discord, Slack, GitHub Issues) - Support for success notifications (opt-in) Test Infrastructure: - Coverage aggregation across multiple services - Flaky test detection with 30-run history tracking - Performance metrics tracking with regression detection - Test data seeding and cleanup scripts - Comprehensive test reporting with formatted metrics Documentation: - TESTING_GUIDE.md (4000+ words) - Complete testing documentation - AUTOMATED_TESTING_SYSTEM.md - System architecture and workflows - DISCORD_NOTIFICATIONS_SETUP.md - Discord webhook setup guide - TESTING_DEPLOYMENT_CHECKLIST.md - Pre-deployment verification - TESTING_QUICK_REFERENCE.md - Quick command reference Final Result: - 180/180 tests passing (100% pass rate) - Zero console errors in test output - Automated daily testing with rich notifications - Production-ready test infrastructure
64 lines
1.7 KiB
JavaScript
64 lines
1.7 KiB
JavaScript
#!/usr/bin/env node
|
|
/* eslint-disable @typescript-eslint/no-require-imports, no-console */
|
|
/**
|
|
* Format Metrics for GitHub Summary
|
|
*
|
|
* Formats test performance metrics for display in GitHub Actions summary.
|
|
*
|
|
* Usage:
|
|
* node format-metrics.js <metrics-file>
|
|
*/
|
|
|
|
const fs = require('fs');
|
|
|
|
function formatDuration(ms) {
|
|
if (ms < 1000) {
|
|
return `${ms}ms`;
|
|
}
|
|
return `${(ms / 1000).toFixed(2)}s`;
|
|
}
|
|
|
|
function formatMetrics(metrics) {
|
|
let output = '';
|
|
|
|
output += `\n**Total Tests:** ${metrics.totalTests}\n`;
|
|
output += `**Total Duration:** ${formatDuration(metrics.totalDuration)}\n`;
|
|
output += `**Average Duration:** ${formatDuration(metrics.averageDuration)}\n\n`;
|
|
|
|
if (metrics.slowestTest) {
|
|
output += `**Slowest Test:** ${metrics.slowestTest.name} (${formatDuration(metrics.slowestTest.duration)})\n\n`;
|
|
}
|
|
|
|
// Suite breakdown
|
|
output += '### Suite Performance\n\n';
|
|
output += '| Suite | Tests | Duration | Avg/Test |\n';
|
|
output += '|-------|-------|----------|----------|\n';
|
|
|
|
for (const [suite, data] of Object.entries(metrics.suiteMetrics)) {
|
|
const avgPerTest = data.tests > 0 ? Math.round(data.duration / data.tests) : 0;
|
|
output += `| ${suite} | ${data.tests} | ${formatDuration(data.duration)} | ${formatDuration(avgPerTest)} |\n`;
|
|
}
|
|
|
|
return output;
|
|
}
|
|
|
|
function main() {
|
|
const metricsFile = process.argv[2];
|
|
|
|
if (!metricsFile) {
|
|
console.error('Usage: node format-metrics.js <metrics-file>');
|
|
process.exit(1);
|
|
}
|
|
|
|
if (!fs.existsSync(metricsFile)) {
|
|
console.log('No metrics file found.');
|
|
return;
|
|
}
|
|
|
|
const metrics = JSON.parse(fs.readFileSync(metricsFile, 'utf8'));
|
|
const formatted = formatMetrics(metrics);
|
|
|
|
console.log(formatted);
|
|
}
|
|
|
|
main();
|