feat: Implement i18n Static Analysis Pipeline with Pre-Commit Enforcement and CI Support #9884
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ############################################################################## | |
| ############################################################################## | |
| # | |
| # NOTE! | |
| # | |
| # Please read the README.md file in this directory that defines what should | |
| # be placed in this file | |
| # | |
| ############################################################################## | |
| ############################################################################## | |
| name: PR Workflow | |
| on: | |
| pull_request: | |
| branches: | |
| - '**' | |
| env: | |
| CODECOV_UNIQUE_NAME: CODECOV_UNIQUE_NAME-${{ github.run_id }}-${{ github.run_number }} | |
| jobs: | |
| Code-Quality-Checks: | |
| name: Performs linting, formatting, type-checking, unused file detection, checking for different source and target branch | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Count number of lines | |
| run: | | |
| chmod +x ./.github/workflows/scripts/countline.py | |
| ./.github/workflows/scripts/countline.py --lines 600 --exclude_files src/screens/LoginPage/LoginPage.tsx src/GraphQl/Queries/Queries.ts src/screens/OrgList/OrgList.tsx src/GraphQl/Mutations/mutations.ts src/components/EventListCard/EventListCardModals.tsx src/components/TagActions/TagActionsMocks.ts src/utils/interfaces.ts src/screens/MemberDetail/MemberDetail.tsx src/components/OrgPostCard/OrgPostCard.tsx src/components/UsersTableItem/UsersTableItem.tsx src/components/UserPortal/ChatRoom/ChatRoom.tsx | |
| - name: Get changed TypeScript files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Get all changed files | |
| ALL_CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | tr '\n' ' ') | |
| echo "all_changed_files=${ALL_CHANGED_FILES}" >> $GITHUB_OUTPUT | |
| # Count all changed files | |
| ALL_CHANGED_FILES_COUNT=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | wc -l | tr -d ' ') | |
| echo "all_changed_files_count=$ALL_CHANGED_FILES_COUNT" >> $GITHUB_OUTPUT | |
| # Check if any files changed | |
| if [ "$ALL_CHANGED_FILES_COUNT" -gt 0 ]; then | |
| echo "any_changed=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "any_changed=false" >> $GITHUB_OUTPUT | |
| fi | |
| # Set only_changed to false by default (adjust logic as needed) | |
| echo "only_changed=false" >> $GITHUB_OUTPUT | |
| - name: Check formatting | |
| if: steps.changed-files.outputs.only_changed != 'true' | |
| run: pnpm format:check | |
| - name: Run formatting if check fails | |
| if: failure() | |
| run: pnpm format:fix | |
| - name: Check for type errors | |
| if: steps.changed-files.outputs.only_changed != 'true' | |
| run: pnpm typecheck | |
| - name: Check for linting errors in modified files | |
| if: steps.changed-files.outputs.only_changed != 'true' | |
| env: | |
| CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} | |
| run: pnpm exec eslint ${CHANGED_FILES} | |
| - name: Check for TSDoc comments | |
| run: pnpm check-tsdoc | |
| - name: Check for localStorage Usage | |
| run: pnpm exec tsx scripts/githooks/check-localstorage-usage.ts --scan-entire-repo | |
| - name: Compare translation files | |
| run: | | |
| chmod +x .github/workflows/scripts/compare_translations.py | |
| python .github/workflows/scripts/compare_translations.py --directory public/locales | |
| - name: Get changed source files | |
| id: changed-src | |
| run: | | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| CHANGED=$(git diff --name-only --diff-filter=ACMRT "$BASE_SHA" ${{ github.event.pull_request.head.sha }} \ | |
| | grep -E '^src/.*\.(ts|tsx|js|jsx)$' | tr '\n' ' ' || true) | |
| echo "files=$CHANGED" >> $GITHUB_OUTPUT | |
| if [ -z "$CHANGED" ]; then | |
| echo "none=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "none=false" >> $GITHUB_OUTPUT | |
| fi | |
| # TODO: Re-enable after fixing existing i18n violations in all necessary files | |
| # See: scripts/check-i18n.js for the detection script | |
| # - name: Check for non-internationalized text | |
| # if: steps.changed-src.outputs.none != 'true' | |
| # run: pnpm run check-i18n -- ${{ steps.changed-src.outputs.files }} | |
| - name: Check if the source and target branches are different | |
| if: ${{ github.event.pull_request.base.ref == github.event.pull_request.head.ref }} | |
| run: | | |
| echo "Source Branch ${{ github.event.pull_request.head.ref }}" | |
| echo "Target Branch ${{ github.event.pull_request.base.ref }}" | |
| echo "Error: Source and Target Branches are the same. Please ensure they are different." | |
| echo "Error: Close this PR and try again." | |
| exit 1 | |
| - name: Check for unused files and exports in src/ and docs/src | |
| run: pnpm knip --include files,exports | |
| Check-Mock-Isolation: | |
| name: Check for proper mock cleanup in test files | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| - name: Check for proper mock cleanup | |
| run: | | |
| chmod +x scripts/githooks/check-mock-cleanup.sh | |
| ./scripts/githooks/check-mock-cleanup.sh | |
| Check-AutoDocs: | |
| name: Generate and Validate Documentation | |
| runs-on: ubuntu-latest | |
| needs: [Code-Quality-Checks] | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies (frozen) | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Run documentation generation | |
| run: pnpm run generate-docs | |
| - name: Check for uncommitted doc changes | |
| run: | | |
| if [ -n "$(git status --porcelain)" ]; then | |
| echo "::error::Documentation files are outdated or missing." | |
| echo "Please run 'pnpm run generate-docs' locally and commit the updated files." | |
| echo "" | |
| echo "Changed files:" | |
| git status --porcelain | |
| exit 1 | |
| else | |
| echo "Documentation is up to date." | |
| fi | |
| - name: Check for unused dependencies | |
| run: pnpm knip --config knip.deps.json --include dependencies | |
| Check-Sensitive-Files: | |
| if: ${{ github.actor != 'dependabot[bot]' }} | |
| name: Checks if sensitive files have been changed without authorization | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Get PR labels | |
| id: check-labels | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| if [ -z "${{ github.event.pull_request.number }}" ]; then | |
| echo "skip=false" >> $GITHUB_OUTPUT | |
| exit 0 | |
| fi | |
| LABELS="$(gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels --jq '.[].name' | tr '\n' ' ')" | |
| if echo "$LABELS" | grep -qw "ignore-sensitive-files-pr"; then | |
| echo "::notice::Skipping sensitive files check due to 'ignore-sensitive-files-pr' label." | |
| echo "skip=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "skip=false" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Get Changed Unauthorized files | |
| if: steps.check-labels.outputs.skip != 'true' | |
| id: changed-unauth-files | |
| run: | | |
| # Skip if not in PR context | |
| if [ -z "${{ github.event.pull_request.base.sha }}" ]; then | |
| echo "any_changed=false" >> $GITHUB_OUTPUT | |
| exit 0 | |
| fi | |
| # Determine base and head commits for comparison | |
| HEAD_SHA="${{ github.event.pull_request.head.sha || github.sha }}" | |
| BASE_SHA=$(git merge-base "${{ github.event.pull_request.base.sha }}" "$HEAD_SHA") | |
| # Define sensitive files patterns as a bash array | |
| SENSITIVE_PATTERNS=( | |
| '.flake8$' | |
| '.pydocstyle$' | |
| 'pyproject.toml$' | |
| '.env..*$' | |
| 'vitest.config.js$' | |
| 'src/App.tsx$' | |
| '^.github/.*' | |
| '^.husky/.*' | |
| '^scripts/.*' | |
| '^src/style/.*' | |
| 'schema.graphql$' | |
| 'package.json$' | |
| 'package-lock.json$' | |
| 'tsconfig.json$' | |
| '^.gitignore$' | |
| '^env.example$' | |
| '.node-version$' | |
| '.eslintrc.json$' | |
| '.eslintignore$' | |
| '.prettierrc$' | |
| '.prettierignore$' | |
| 'vite.config.ts$' | |
| '^docker/docker-compose.prod.yaml$' | |
| '^docker/docker-compose.dev.yaml$' | |
| '^docker/Dockerfile.dev$' | |
| '^docker/Dockerfile.prod$' | |
| '^config/docker/setup/nginx.conf$' | |
| '^config/docker/setup/nginx.prod.conf$' | |
| 'CODEOWNERS$' | |
| 'LICENSE$' | |
| 'setup.ts$' | |
| '.coderabbit.yaml$' | |
| 'CODE_OF_CONDUCT.md$' | |
| 'CODE_STYLE.md$' | |
| 'CONTRIBUTING.md$' | |
| 'DOCUMENTATION.md$' | |
| 'INSTALLATION.md$' | |
| 'ISSUE_GUIDELINES.md$' | |
| 'PR_GUIDELINES.md$' | |
| 'README.md$' | |
| '.*.pem$' | |
| '.*.key$' | |
| '.*.cert$' | |
| '.*.password$' | |
| '.*.secret$' | |
| '.*.credentials$' | |
| '.nojekyll$' | |
| 'yarn.lock$' | |
| 'knip.json$' | |
| 'knip.deps.json$' | |
| '^docs/docusaurus.config.ts$' | |
| '^docs/sidebar..*' | |
| 'CNAME$' | |
| ) | |
| # Check for changes in sensitive files | |
| CHANGED_UNAUTH_FILES="" | |
| for pattern in "${SENSITIVE_PATTERNS[@]}"; do | |
| FILES=$(git diff --name-only --diff-filter=ACMRD "$BASE_SHA" "$HEAD_SHA" | grep -E "$pattern" || true) | |
| if [ ! -z "$FILES" ]; then | |
| CHANGED_UNAUTH_FILES="$CHANGED_UNAUTH_FILES $FILES" | |
| fi | |
| done | |
| # Trim and format output | |
| CHANGED_UNAUTH_FILES=$(echo "$CHANGED_UNAUTH_FILES" | xargs) | |
| echo "all_changed_files=$CHANGED_UNAUTH_FILES" >> $GITHUB_OUTPUT | |
| # Check if any unauthorized files changed | |
| if [ ! -z "$CHANGED_UNAUTH_FILES" ]; then | |
| echo "any_changed=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "any_changed=false" >> $GITHUB_OUTPUT | |
| fi | |
| - name: List all changed unauthorized files | |
| if: steps.changed-unauth-files.outputs.any_changed == 'true' | |
| env: | |
| CHANGED_UNAUTH_FILES: ${{ steps.changed-unauth-files.outputs.all_changed_files }} | |
| run: | | |
| echo "::error::Unauthorized changes detected in sensitive files:" | |
| echo "" | |
| for file in $CHANGED_UNAUTH_FILES; do | |
| echo "- $file" | |
| done | |
| echo "" | |
| echo "To override:" | |
| echo "Add the 'ignore-sensitive-files-pr' label to this PR." | |
| exit 1 | |
| Count-Changed-Files: | |
| if: ${{ github.actor != 'dependabot[bot]' }} | |
| name: Checks if number of files changed is acceptable | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Get changed files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Count all changed files excluding .md files | |
| ALL_CHANGED_FILES_COUNT=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | grep -v -i "\.md$" | wc -l | tr -d ' ') | |
| echo "all_changed_files_count=$ALL_CHANGED_FILES_COUNT" >> $GITHUB_OUTPUT | |
| - name: Echo number of changed files | |
| env: | |
| CHANGED_FILES_COUNT: ${{ steps.changed-files.outputs.all_changed_files_count }} | |
| run: | | |
| echo "Number of files changed: $CHANGED_FILES_COUNT" | |
| - name: Check if the number of changed files is less than 100 | |
| if: steps.changed-files.outputs.all_changed_files_count > 100 | |
| env: | |
| CHANGED_FILES_COUNT: ${{ steps.changed-files.outputs.all_changed_files_count }} | |
| run: | | |
| echo "Error: Too many files (greater than 100) changed in the pull request." | |
| echo "Possible issues:" | |
| echo "- Contributor may be merging into an incorrect branch." | |
| echo "- Source branch may be incorrect please use develop as source branch." | |
| exit 1 | |
| Check-ESlint-Disable: | |
| name: Check for eslint-disable | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Get changed files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Get all changed files | |
| ALL_CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | tr '\n' ' ') | |
| echo "all_changed_files=${ALL_CHANGED_FILES}" >> $GITHUB_OUTPUT | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: 3.9 | |
| - name: Run Python script | |
| run: | | |
| python .github/workflows/scripts/eslint_disable_check.py --files ${{ steps.changed-files.outputs.all_changed_files }} | |
| Check-Code-Coverage-Disable: | |
| name: Check for code coverage disable | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Get changed files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Get all changed files | |
| ALL_CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | tr '\n' ' ') | |
| echo "all_changed_files=${ALL_CHANGED_FILES}" >> $GITHUB_OUTPUT | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: 3.9 | |
| - name: Run Python script | |
| run: | | |
| python .github/workflows/scripts/code_coverage_disable_check.py --files ${{ steps.changed-files.outputs.all_changed_files }} | |
| Check-ItSkip-Disable: | |
| name: Check for it.skip commands | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for all branches and tags | |
| - name: Get changed files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Get all changed files | |
| ALL_CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | tr '\n' ' ') | |
| echo "all_changed_files=${ALL_CHANGED_FILES}" >> $GITHUB_OUTPUT | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: 3.9 | |
| - name: Run Python script | |
| run: | | |
| python .github/workflows/scripts/itskip_disable_check.py --files ${{ steps.changed-files.outputs.all_changed_files }} | |
| Test-Application: | |
| name: Test Application (Shard ${{ matrix.shard }}) | |
| runs-on: ubuntu-latest | |
| needs: | |
| [ | |
| Code-Quality-Checks, | |
| Check-AutoDocs, | |
| Check-ESlint-Disable, | |
| Check-Code-Coverage-Disable, | |
| Check-ItSkip-Disable, | |
| Check-Mock-Isolation, | |
| ] | |
| env: | |
| TOTAL_SHARDS: 12 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| shard: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Get changed TypeScript files | |
| id: changed-files | |
| run: | | |
| # Get the base branch ref | |
| BASE_SHA=$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) | |
| # Check if any files changed | |
| ANY_CHANGED=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | wc -l) | |
| if [ "$ANY_CHANGED" -gt 0 ]; then | |
| echo "any_changed=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "any_changed=false" >> $GITHUB_OUTPUT | |
| fi | |
| # Get all changed files | |
| ALL_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | tr '\n' ' ') | |
| echo "all_files=$ALL_FILES" >> $GITHUB_OUTPUT | |
| # Get TypeScript files specifically | |
| TS_FILES=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA ${{ github.event.pull_request.head.sha }} | grep -E '\.tsx?$' | tr '\n' ' ') | |
| echo "ts_files=$TS_FILES" >> $GITHUB_OUTPUT | |
| - name: TypeScript compilation | |
| run: pnpm exec tsc --noEmit | |
| - name: Run Vitest Tests (Shard ${{ matrix.shard }}/${{ env.TOTAL_SHARDS }}) | |
| if: steps.changed-files.outputs.any_changed == 'true' | |
| env: | |
| NODE_V8_COVERAGE: './coverage/vitest' | |
| NODE_OPTIONS: '--max-old-space-size=4096 --disable-warning=ExperimentalWarning' | |
| SHARD_INDEX: ${{ matrix.shard }} | |
| SHARD_COUNT: ${{ env.TOTAL_SHARDS }} | |
| CI: true | |
| run: pnpm test:shard:coverage | |
| - name: Upload coverage artifact | |
| if: always() && steps.changed-files.outputs.any_changed == 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: coverage-shard-${{ matrix.shard }} | |
| path: ./coverage/vitest/ | |
| retention-days: 1 | |
| Merge-Coverage: | |
| name: Merge Coverage Reports | |
| runs-on: ubuntu-latest | |
| needs: [Test-Application] | |
| if: success() | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 # Fetch all history for Codecov to calculate patch coverage | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Download all coverage artifacts | |
| id: download-artifacts | |
| continue-on-error: true | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: coverage-shard-* | |
| path: ./coverage-shards/ | |
| merge-multiple: false | |
| - name: Check if artifacts were downloaded | |
| id: check-artifacts | |
| run: | | |
| # Check if any coverage files exist | |
| if find coverage-shards -name "lcov.info" -type f | grep -q .; then | |
| echo "artifacts_found=true" >> $GITHUB_OUTPUT | |
| echo "Coverage artifacts found" | |
| else | |
| echo "artifacts_found=false" >> $GITHUB_OUTPUT | |
| echo "No coverage artifacts found - tests may have been skipped" | |
| fi | |
| - name: Merge coverage reports | |
| if: steps.check-artifacts.outputs.artifacts_found == 'true' | |
| run: | | |
| mkdir -p ./coverage/vitest | |
| mkdir -p ./coverage/tmp | |
| # Find all coverage directories from shards | |
| echo "Finding coverage data from shards..." | |
| SHARD_DIRS=$(find coverage-shards -type d -name "coverage-shard-*" 2>/dev/null || true) | |
| if [ -z "$SHARD_DIRS" ]; then | |
| echo "ERROR: No shard directories found!" | |
| ls -la coverage-shards/ || true | |
| exit 1 | |
| fi | |
| echo "Found shard directories:" | |
| echo "$SHARD_DIRS" | |
| # Check if we have JSON coverage files (better for merging) | |
| JSON_FILES=$(find coverage-shards -name "coverage-final.json" -type f 2>/dev/null || true) | |
| if [ -n "$JSON_FILES" ]; then | |
| echo "Using JSON coverage files for accurate merging..." | |
| # Copy all JSON files to a temp directory for nyc merge | |
| for shard_dir in coverage-shards/coverage-shard-*/; do | |
| if [ -f "${shard_dir}coverage-final.json" ]; then | |
| echo "Found JSON coverage in: $shard_dir" | |
| cp "${shard_dir}coverage-final.json" "./coverage/tmp/coverage-shard-$(basename $shard_dir).json" | |
| fi | |
| done | |
| # Validate JSON files before merging | |
| echo "Validating JSON coverage files..." | |
| JSON_COUNT=$(find ./coverage/tmp -name "*.json" -type f | wc -l) | |
| echo "Found $JSON_COUNT JSON files to merge" | |
| if [ "$JSON_COUNT" -eq 0 ]; then | |
| echo "ERROR: No JSON coverage files found!" | |
| exit 1 | |
| fi | |
| # Show sample of file count in each JSON | |
| for json_file in ./coverage/tmp/*.json; do | |
| FILE_COUNT=$(jq 'keys | length' "$json_file" 2>/dev/null || echo "0") | |
| echo " $(basename $json_file): $FILE_COUNT files" | |
| done | |
| # Merge using nyc (more accurate than lcov merge) | |
| echo "Merging coverage with nyc..." | |
| pnpm exec nyc merge ./coverage/tmp ./.nyc_output/coverage-final.json | |
| # Validate merged JSON | |
| MERGED_FILE_COUNT=$(jq 'keys | length' ./.nyc_output/coverage-final.json 2>/dev/null || echo "0") | |
| echo "Merged coverage contains $MERGED_FILE_COUNT files" | |
| # Generate lcov from merged JSON | |
| echo "Generating lcov report from merged coverage..." | |
| pnpm exec nyc report --reporter=lcov --report-dir=./coverage/vitest | |
| else | |
| echo "Using LCOV files for merging..." | |
| # Find all lcov.info files from shards | |
| find coverage-shards -name "lcov.info" -type f > lcov-files.txt | |
| if [ ! -s lcov-files.txt ]; then | |
| echo "ERROR: No lcov.info files found!" | |
| exit 1 | |
| fi | |
| echo "Found coverage files:" | |
| cat lcov-files.txt | |
| # Validate each file exists and is not empty (fail-fast) | |
| while IFS= read -r file; do | |
| if [ ! -f "$file" ]; then | |
| echo "ERROR: Coverage file does not exist: $file" | |
| exit 1 | |
| fi | |
| if [ ! -s "$file" ]; then | |
| echo "ERROR: Coverage file is empty: $file" | |
| exit 1 | |
| fi | |
| done < lcov-files.txt | |
| # Use lcov-result-merger as fallback | |
| pnpm exec lcov-result-merger "coverage-shards/*/lcov.info" ./coverage/vitest/lcov.info | |
| fi | |
| # Validate merged file exists and is not empty | |
| if [ ! -s ./coverage/vitest/lcov.info ]; then | |
| echo "ERROR: Merged coverage file is empty or missing!" | |
| exit 1 | |
| fi | |
| echo "Coverage merge successful" | |
| echo "Merged file size: $(wc -l < ./coverage/vitest/lcov.info) lines" | |
| # Count number of source files in merged coverage | |
| SF_COUNT=$(grep -c "^SF:" ./coverage/vitest/lcov.info || echo "0") | |
| echo "Number of files in merged coverage: $SF_COUNT" | |
| if [ "$SF_COUNT" -lt 250 ]; then | |
| echo "WARNING: Only $SF_COUNT files in coverage (expected 300+)" | |
| echo "This might indicate incomplete coverage merge" | |
| fi | |
| echo "First 30 lines of merged coverage:" | |
| head -30 ./coverage/vitest/lcov.info | |
| - name: Clean up individual shard coverage files | |
| if: steps.check-artifacts.outputs.artifacts_found == 'true' | |
| run: | | |
| echo "Cleaning up individual shard coverage files..." | |
| # Remove all individual coverage JSON files to prevent Codecov from finding them | |
| # This ensures only the merged lcov.info is uploaded | |
| find ./coverage -name "coverage-*.json" -type f -delete | |
| find ./coverage -name "coverage-final.json" -type f -delete | |
| rm -rf ./coverage/tmp ./.nyc_output 2>/dev/null || true | |
| echo "Cleanup complete. Remaining coverage files:" | |
| find ./coverage -type f \( -name "*.info" -o -name "*.json" \) | |
| echo "" | |
| echo "Final coverage file to upload:" | |
| ls -lh ./coverage/vitest/lcov.info | |
| - name: Present and upload merged coverage to Codecov | |
| if: steps.check-artifacts.outputs.artifacts_found == 'true' | |
| uses: codecov/codecov-action@v5 | |
| with: | |
| name: '${{env.CODECOV_UNIQUE_NAME}}-merged' | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| # Using fail_ci_if_error: true to match develop branch behavior | |
| # This is safe now because we validate the merged file is non-empty above | |
| fail_ci_if_error: true | |
| verbose: true | |
| exclude: 'docs/' | |
| gcov_ignore: 'docs/' | |
| files: ./coverage/vitest/lcov.info | |
| flags: vitest | |
| - name: Test acceptable level of code coverage | |
| if: steps.check-artifacts.outputs.artifacts_found == 'true' | |
| uses: VeryGoodOpenSource/very_good_coverage@v3 | |
| with: | |
| path: './coverage/vitest/lcov.info' | |
| min_coverage: 95.0 | |
| # Graphql-Inspector: | |
| # if: ${{ github.actor != 'dependabot[bot]' }} | |
| # name: Runs Introspection on the GitHub talawa-api repo on the schema.graphql file | |
| # runs-on: ubuntu-latest | |
| # steps: | |
| # - name: Checkout the Repository | |
| # uses: actions/checkout@v4 | |
| # - name: Set up Node.js | |
| # uses: actions/setup-node@v4 | |
| # with: | |
| # node-version: '24.x' | |
| # - name: resolve dependency | |
| # run: npm install -g @graphql-inspector/cli | |
| # - name: Clone API Repository | |
| # run: | | |
| # # Retrieve the complete branch name directly from the GitHub context | |
| # FULL_BRANCH_NAME=${{ github.base_ref }} | |
| # echo "FULL_Branch_NAME: $FULL_BRANCH_NAME" | |
| # # Clone the specified repository using the extracted branch name | |
| # git clone --branch $FULL_BRANCH_NAME https://github.com/PalisadoesFoundation/talawa-api && ls -a | |
| # - name: Validate Documents | |
| # run: graphql-inspector validate './src/GraphQl/**/*.ts' './talawa-api/schema.graphql' | |
| Start-App-Without-Docker: | |
| name: Check if Talawa Admin app starts (No Docker) | |
| runs-on: ubuntu-latest | |
| needs: [Code-Quality-Checks, Merge-Coverage] | |
| if: github.actor != 'dependabot' | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Build Production App | |
| run: pnpm run build | |
| - name: Start Production App | |
| run: | | |
| pnpm run preview & | |
| echo $! > .pidfile_prod | |
| - name: Check if Production App is running | |
| run: | | |
| chmod +x .github/workflows/scripts/app_health_check.sh | |
| .github/workflows/scripts/app_health_check.sh 4173 120 | |
| - name: Stop Production App | |
| run: | | |
| if [ -f .pidfile_prod ]; then | |
| kill "$(cat .pidfile_prod)" | |
| fi | |
| - name: Start Development App | |
| run: | | |
| pnpm run serve & | |
| echo $! > .pidfile_dev | |
| - name: Check if Development App is running | |
| run: | | |
| chmod +x .github/workflows/scripts/app_health_check.sh | |
| .github/workflows/scripts/app_health_check.sh 4321 120 | |
| - name: Stop Development App | |
| if: always() | |
| run: | | |
| if [ -f .pidfile_dev ]; then | |
| kill "$(cat .pidfile_dev)" | |
| fi | |
| Start-App-Using-Docker: | |
| name: Check if Talawa Admin app starts in Docker | |
| runs-on: ubuntu-latest | |
| needs: [Code-Quality-Checks, Merge-Coverage] | |
| if: github.actor != 'dependabot' | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| - name: Generate `.env` File with Hardcoded Values | |
| run: | | |
| cat <<EOF > .env | |
| PORT=4321 | |
| REACT_APP_TALAWA_URL=http://localhost:4000/graphql | |
| REACT_APP_USE_RECAPTCHA= | |
| REACT_APP_RECAPTCHA_SITE_KEY= | |
| REACT_APP_BACKEND_WEBSOCKET_URL=ws://localhost:4000/graphql | |
| ALLOW_LOGS=NO | |
| USE_DOCKER=YES | |
| DOCKER_PORT=4321 | |
| EOF | |
| - name: Set up Docker | |
| uses: docker/setup-buildx-action@v3 | |
| with: | |
| driver-opts: | | |
| image=moby/buildkit:latest | |
| - name: Build Docker images | |
| run: | | |
| set -e | |
| echo "Building Docker images..." | |
| docker compose -f docker/docker-compose.prod.yaml build | |
| docker compose -f docker/docker-compose.dev.yaml build | |
| echo "Docker images built successfully" | |
| - name: Run Docker Containers (Production) | |
| run: | | |
| set -e | |
| echo "Starting Docker container for production..." | |
| docker compose -f docker/docker-compose.prod.yaml up -d | |
| echo "Production Docker container started successfully" | |
| - name: Check if Talawa Admin App is running (Production) | |
| run: | | |
| chmod +x .github/workflows/scripts/app_health_check.sh | |
| .github/workflows/scripts/app_health_check.sh 4321 120 true | |
| - name: Stop prod Docker Containers | |
| if: always() | |
| run: | | |
| docker compose -f docker/docker-compose.prod.yaml down | |
| echo "Prod Docker container stopped and removed" | |
| - name: Run Docker Containers (Development) | |
| run: | | |
| set -e | |
| echo "Starting Docker container for development..." | |
| docker compose -f docker/docker-compose.dev.yaml up -d | |
| echo "Development Docker container started successfully" | |
| - name: Check if Talawa Admin App is running (Development) | |
| run: | | |
| chmod +x .github/workflows/scripts/app_health_check.sh | |
| .github/workflows/scripts/app_health_check.sh 4321 120 true | |
| - name: Stop dev Docker Containers | |
| if: always() | |
| run: | | |
| docker compose -f docker/docker-compose.dev.yaml down | |
| echo "Dev Docker containers stopped and removed" | |
| Test-Docusaurus-Deployment: | |
| name: Test Deployment to https://docs-admin.talawa.io | |
| runs-on: ubuntu-latest | |
| needs: [Start-App-Using-Docker, Start-App-Without-Docker] | |
| # Run only if the develop branch and not dependabot | |
| if: ${{ github.actor != 'dependabot[bot]' && github.event.pull_request.base.ref == 'develop' }} | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| working-directory: ./docs | |
| run: | | |
| if [ -f pnpm-lock.yaml ]; then | |
| echo "pnpm-lock.yaml found — running pnpm fetch" | |
| pnpm fetch | |
| else | |
| echo "No pnpm-lock.yaml found — running pnpm install to generate it" | |
| pnpm install --frozen-lockfile=false | |
| fi | |
| - name: Install dependencies (allow lockfile creation) | |
| working-directory: ./docs | |
| run: | | |
| if [ -f pnpm-lock.yaml ]; then | |
| pnpm install --frozen-lockfile --prefer-offline | |
| else | |
| echo "pnpm-lock.yaml not found — installing without --frozen-lockfile" | |
| pnpm install --prefer-offline | |
| fi | |
| - name: Test building the website | |
| working-directory: ./docs | |
| run: pnpm run build | |
| Check-Target-Branch: | |
| if: ${{ github.actor != 'dependabot[bot]' }} | |
| name: Check Target Branch | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Check if the target branch is develop | |
| if: github.event.pull_request.base.ref != 'develop' | |
| run: | | |
| echo "Error: Pull request target branch must be 'develop'. Please refer PR_GUIDELINES.md" | |
| echo "Error: Close this PR and try again." | |
| exit 1 | |
| Python-Compliance: | |
| name: Check Python Code Style | |
| runs-on: ubuntu-latest | |
| needs: [Code-Quality-Checks] | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Set up Python 3.11 | |
| uses: actions/setup-python@v4 | |
| with: | |
| python-version: 3.11 | |
| - name: Cache pip packages | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.cache/pip | |
| key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | |
| restore-keys: | | |
| ${{ runner.os }}-pip- | |
| - name: Install dependencies | |
| run: | | |
| python3 -m venv venv | |
| source venv/bin/activate | |
| python -m pip install --upgrade pip | |
| pip install -r .github/workflows/requirements.txt | |
| - name: Run Black Formatter Check | |
| run: | | |
| source venv/bin/activate | |
| black --check . | |
| - name: Run Flake8 Linter | |
| run: | | |
| source venv/bin/activate | |
| flake8 --docstring-convention google --ignore E402,E722,E203,F401,W503 .github | |
| - name: Run pydocstyle | |
| run: | | |
| source venv/bin/activate | |
| pydocstyle --convention=google --add-ignore=D415,D205 .github | |
| - name: Run docstring compliance check | |
| run: | | |
| source venv/bin/activate | |
| python .github/workflows/scripts/check_docstrings.py --directories .github | |
| Test-Application-E2E: | |
| timeout-minutes: 35 | |
| runs-on: ubuntu-latest | |
| needs: | |
| [ | |
| Code-Quality-Checks, | |
| Check-AutoDocs, | |
| Check-ESlint-Disable, | |
| Check-Code-Coverage-Disable, | |
| Check-ItSkip-Disable, | |
| ] | |
| env: | |
| REACT_APP_TALAWA_URL: http://127.0.0.1:4000/graphql | |
| steps: | |
| - name: Checkout Backend | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: palisadoesFoundation/talawa-api | |
| ref: develop | |
| - name: Setup Devcontainer | |
| run: | | |
| npm install -g @devcontainers/cli | |
| cp envFiles/.env.devcontainer .env | |
| devcontainer up --workspace-folder . | |
| echo "Waiting for devcontainer services to be ready..." | |
| sleep 10 | |
| - name: Install Backend Dependencies | |
| run: docker exec talawa-api-1 /bin/bash -c 'pnpm install' | |
| - name: Apply Database Migrations | |
| run: | | |
| docker exec talawa-api-1 /bin/bash -c 'pnpm apply_drizzle_migrations' | |
| - name: Start Backend Server | |
| run: | | |
| docker exec -d talawa-api-1 /bin/bash -c 'pnpm run start_development_server' | |
| sleep 15 | |
| - name: Wait for GraphQL endpoint to become available | |
| if: success() | |
| run: | | |
| echo "Waiting for the GraphQL endpoint to become available..." | |
| for i in {1..40}; do | |
| if ! docker ps | grep -q talawa-api-1; then | |
| echo "Container talawa-api-1 not found. Waiting..." | |
| sleep 2 | |
| continue | |
| fi | |
| docker exec talawa-api-1 which curl >/dev/null 2>&1 || { | |
| docker exec talawa-api-1 apt-get update && \ | |
| docker exec talawa-api-1 apt-get install -y curl | |
| } | |
| RESPONSE=$(docker exec talawa-api-1 curl -s -X POST \ | |
| http://127.0.0.1:4000/graphql -H "Content-Type: application/json" \ | |
| -d '{"query":"{__typename}"}' 2>/dev/null || echo "Connection failed") | |
| if echo "$RESPONSE" | grep -q '__typename'; then | |
| echo "GraphQL endpoint is available, Backend is ready!" | |
| exit 0 | |
| fi | |
| echo "GraphQL endpoint not ready. Retrying in 2 seconds..." | |
| sleep 2 | |
| done | |
| echo "GraphQL endpoint did not become available within the expected time." | |
| exit 1 | |
| - name: Seed Sample Data | |
| run: | | |
| echo "=== Seeding Sample Data ===" | |
| if docker exec talawa-api-1 /bin/bash -c 'set -a; source ./.env; set +a; pnpm run add:sample_data'; then | |
| echo "Seeding completed successfully" | |
| else | |
| echo "Seeding failed - Debug Information:" | |
| echo "Container status:" | |
| docker ps | grep talawa | |
| echo "Recent container logs:" | |
| docker logs talawa-api-1 --tail 50 | |
| echo "=== Users table contents ===" | |
| docker exec talawa-postgres-1 psql -U talawa -d talawa \ | |
| -c "SELECT id, email_address, name, role FROM users;" 2>/dev/null || echo "Could not query users" | |
| exit 1 | |
| fi | |
| - name: Checkout Frontend | |
| uses: actions/checkout@v4 | |
| with: | |
| path: frontend | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| working-directory: frontend | |
| run: pnpm fetch | |
| - name: Install Frontend Dependencies (frozen) | |
| working-directory: frontend | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Ensure Cypress binary is installed | |
| working-directory: frontend | |
| run: pnpm exec cypress install | |
| - name: Setup .env | |
| working-directory: frontend | |
| run: | | |
| pwd && cp .env.example .env | |
| echo $REACT_APP_TALAWA_URL | |
| curl -s -X POST http://127.0.0.1:4000/graphql \ | |
| -H "Content-Type: application/json" \ | |
| -d '{"query":"{__typename}"}' 2>/dev/null | |
| - name: Run Cypress Tests with Dev Server | |
| uses: cypress-io/github-action@v6 | |
| with: | |
| working-directory: frontend | |
| start: pnpm run serve | |
| wait-on: 'http://localhost:4321' | |
| wait-on-timeout: 120 | |
| config-file: cypress.config.ts | |
| install: false | |
| env: | |
| CYPRESS_BASE_URL: http://localhost:4321 | |
| CYPRESS_API_URL: http://127.0.0.1:4000/graphql | |
| REACT_APP_BACKEND_WEBSOCKET_URL: ws://127.0.0.1:4000/graphql | |
| - name: Upload cypress screenshots on failure | |
| uses: actions/upload-artifact@v4 | |
| if: failure() | |
| with: | |
| name: cypress-screenshots | |
| path: frontend/cypress/screenshots | |
| compression-level: 9 | |
| ZAP-Security-Scan: | |
| name: ZAP Security Scan | |
| runs-on: ubuntu-latest | |
| needs: | |
| [ | |
| Test-Application, | |
| Test-Application-E2E, | |
| Start-App-Without-Docker, | |
| Start-App-Using-Docker, | |
| ] | |
| permissions: | |
| contents: read | |
| steps: | |
| - name: Checkout the Repository | |
| uses: actions/checkout@v4 | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| run_install: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '24.x' | |
| cache: 'pnpm' | |
| - name: Prepare dependency store | |
| run: pnpm fetch | |
| - name: Install Dependencies (frozen) | |
| run: pnpm install --frozen-lockfile --prefer-offline | |
| - name: Start Application | |
| run: | | |
| pnpm run serve & | |
| echo $! > .pidfile_dev | |
| - name: Check if Development App is running | |
| run: | | |
| chmod +x .github/workflows/scripts/app_health_check.sh | |
| .github/workflows/scripts/app_health_check.sh 4321 120 | |
| - name: ZAP Scan | |
| uses: zaproxy/[email protected] | |
| with: | |
| target: 'http://localhost:4321' | |
| allow_issue_writing: false | |
| - name: Upload ZAP Report | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: zap-scan-report | |
| path: report_html.html | |
| - name: Stop Development App | |
| if: always() | |
| run: | | |
| if [ -f .pidfile_dev ]; then | |
| kill "$(cat .pidfile_dev)" | |
| fi |