diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..bf4c16a --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: "chore(deps)" + labels: + - "dependencies" diff --git a/.github/workflows/aws-cdk.yml b/.github/workflows/aws-cdk.yml index 3453802..09c33d4 100644 --- a/.github/workflows/aws-cdk.yml +++ b/.github/workflows/aws-cdk.yml @@ -110,11 +110,11 @@ jobs: id: node-version run: | if [ -f ".nvmrc" ]; then - NODE_VERSION=$(cat .nvmrc | tr -d '\n' | tr -d 'v') - echo "version=$NODE_VERSION" >> $GITHUB_OUTPUT - echo "✅ Using Node.js version from .nvmrc: $NODE_VERSION" + NODE_VERSION="$(tr -d '\n' < .nvmrc | tr -d 'v')" + echo "version=$NODE_VERSION" >> "$GITHUB_OUTPUT" + echo "Using Node.js version from .nvmrc: $NODE_VERSION" else - echo "❌ Error: No .nvmrc file found. Please create an .nvmrc file with the required Node.js version." + echo "Error: No .nvmrc file found. Please create an .nvmrc file with the required Node.js version." exit 1 fi @@ -123,17 +123,17 @@ jobs: run: | if [ -f "yarn.lock" ]; then if [ -f ".yarnrc.yml" ] || [ -f ".yarnrc" ]; then - echo "manager=yarn-berry" >> $GITHUB_OUTPUT + echo "manager=yarn-berry" >> "$GITHUB_OUTPUT" echo "✅ Detected Yarn Berry (v2+)" else - echo "manager=yarn-classic" >> $GITHUB_OUTPUT + echo "manager=yarn-classic" >> "$GITHUB_OUTPUT" echo "✅ Detected Yarn Classic (v1)" fi elif [ -f "pnpm-lock.yaml" ]; then - echo "manager=pnpm" >> $GITHUB_OUTPUT + echo "manager=pnpm" >> "$GITHUB_OUTPUT" echo "✅ Detected pnpm" else - echo "manager=npm" >> $GITHUB_OUTPUT + echo "manager=npm" >> "$GITHUB_OUTPUT" echo "✅ Detected npm" fi @@ -156,7 +156,7 @@ jobs: # Initialize yarn and ensure cache directory exists for post-job cache step yarn --version - CACHE_DIR=$(yarn config get cacheFolder) + CACHE_DIR="$(yarn config get cacheFolder)" mkdir -p "$CACHE_DIR" echo "✅ Corepack enabled and cache directory created: $CACHE_DIR" @@ -170,7 +170,7 @@ jobs: id: cache-config run: | CACHE_KEY="node-${{ steps.node-version.outputs.version }}-${{ steps.detect-package-manager.outputs.manager }}-${{ hashFiles('**/package-lock.json', '**/yarn.lock', '**/pnpm-lock.yaml') }}" - echo "key=$CACHE_KEY" >> $GITHUB_OUTPUT + echo "key=$CACHE_KEY" >> "$GITHUB_OUTPUT" # Validate inputs and prepare deployment configuration prepare: @@ -192,16 +192,18 @@ jobs: - name: Set CDK commands id: parse-cdk-config run: | - echo "✅ CDK commands:" + echo "CDK commands:" echo " bootstrap: ${{ inputs.bootstrap-command }}" echo " synth: ${{ inputs.synth-command }}" echo " diff: ${{ inputs.diff-command }}" echo " deploy: ${{ inputs.deploy-command }}" - echo "bootstrap-cmd=${{ inputs.bootstrap-command }}" >> $GITHUB_OUTPUT - echo "synth-cmd=${{ inputs.synth-command }}" >> $GITHUB_OUTPUT - echo "diff-cmd=${{ inputs.diff-command }}" >> $GITHUB_OUTPUT - echo "deploy-cmd=${{ inputs.deploy-command }}" >> $GITHUB_OUTPUT + { + echo "bootstrap-cmd=${{ inputs.bootstrap-command }}" + echo "synth-cmd=${{ inputs.synth-command }}" + echo "diff-cmd=${{ inputs.diff-command }}" + echo "deploy-cmd=${{ inputs.deploy-command }}" + } >> "$GITHUB_OUTPUT" - name: Resolve stack name id: resolve-stack-name @@ -212,7 +214,7 @@ jobs: else STACK_NAME="${{ vars.STACK_NAME }}" fi - echo "stack-name=$STACK_NAME" >> $GITHUB_OUTPUT + echo "stack-name=$STACK_NAME" >> "$GITHUB_OUTPUT" - name: Validate required inputs run: | @@ -266,8 +268,7 @@ jobs: # Validate context JSON if provided if [ "${{ inputs.context-values }}" != "{}" ]; then - echo '${{ inputs.context-values }}' | jq . > /dev/null - if [ $? -ne 0 ]; then + if ! echo '${{ inputs.context-values }}' | jq . > /dev/null; then echo "❌ Error: context-values must be valid JSON" exit 1 fi @@ -298,14 +299,14 @@ jobs: done < <(echo '${{ inputs.context-values }}' | jq -r 'to_entries[] | "--context \(.key)=\(.value)"') fi - echo "args=$context_args" >> $GITHUB_OUTPUT + echo "args=$context_args" >> "$GITHUB_OUTPUT" echo "✅ Context arguments configured" - name: Sanitise stack name id: sanitise run: | - sanitised_cdk_stack_name=$(echo "${{ steps.resolve-stack-name.outputs.stack-name }}" | tr -cd '[:alnum:]-_') - echo "sanitised-cdk-stack-name=$sanitised_cdk_stack_name" >> $GITHUB_OUTPUT + sanitised_cdk_stack_name="$(echo "${{ steps.resolve-stack-name.outputs.stack-name }}" | tr -cd '[:alnum:]-_')" + echo "sanitised-cdk-stack-name=$sanitised_cdk_stack_name" >> "$GITHUB_OUTPUT" # Bootstrap CDK environment if required bootstrap: @@ -379,7 +380,7 @@ jobs: - name: Bootstrap CDK environment run: | - echo "🥾 Bootstrapping CDK environment..." + echo "Bootstrapping CDK environment..." verbose="" if [ "${{ inputs.debug }}" = "true" ]; then @@ -396,15 +397,18 @@ jobs: role_args="--cloudformation-execution-policies ${{ secrets.CFN_EXECUTION_ROLE }}" fi + account_id="$(aws sts get-caller-identity --query Account --output text)" + # shellcheck disable=SC2086 $BOOTSTRAP_CMD \ - aws://$(aws sts get-caller-identity --query Account --output text)/${{ inputs.aws-region }} \ + "aws://${account_id}/${{ inputs.aws-region }}" \ $role_args \ $verbose else + # shellcheck disable=SC2086 $BOOTSTRAP_CMD ${{ inputs.extra-arguments }} $verbose fi - echo "✅ CDK environment bootstrapped successfully" + echo "CDK environment bootstrapped successfully" # Synthesize CDK application synth: @@ -603,11 +607,13 @@ jobs: # Analyze diff for changes if echo "$diff_output" | grep -q "There were no differences"; then - echo "has-changes=false" >> $GITHUB_OUTPUT - echo "summary=No infrastructure changes detected" >> $GITHUB_OUTPUT - echo "ℹ️ No changes detected in infrastructure" + { + echo "has-changes=false" + echo "summary=No infrastructure changes detected" + } >> "$GITHUB_OUTPUT" + echo "No changes detected in infrastructure" else - echo "has-changes=true" >> $GITHUB_OUTPUT + echo "has-changes=true" >> "$GITHUB_OUTPUT" # Create summary summary="Infrastructure changes detected" @@ -615,8 +621,8 @@ jobs: summary="$summary - Resource modifications found" fi - echo "summary=$summary" >> $GITHUB_OUTPUT - echo "⚠️ Infrastructure changes detected!" + echo "summary=$summary" >> "$GITHUB_OUTPUT" + echo "Infrastructure changes detected!" echo "$diff_output" fi @@ -730,14 +736,19 @@ jobs: # Extract stack outputs if [ -f "stack-outputs.json" ]; then # Compact JSON to single line to avoid multiline output issues - outputs=$(jq -c '.' stack-outputs.json) - echo "stack-outputs=$outputs" >> $GITHUB_OUTPUT + outputs="$(jq -c '.' stack-outputs.json)" + { + echo "stack-outputs=$outputs" + echo "status=success" + } >> "$GITHUB_OUTPUT" else - echo "stack-outputs={}" >> $GITHUB_OUTPUT + { + echo "stack-outputs={}" + echo "status=success" + } >> "$GITHUB_OUTPUT" fi - echo "status=success" >> $GITHUB_OUTPUT - echo "✅ Stack deployed successfully" + echo "Stack deployed successfully" - name: Upload deployment artifacts if: steps.deployment.outputs.status == 'success' @@ -814,20 +825,20 @@ jobs: - name: Validate stack deployment run: | - echo "🔍 Validating deployed stack..." + echo "Validating deployed stack..." # Check stack status - stack_status=$(aws cloudformation describe-stacks \ - --stack-name ${{ needs.prepare.outputs.stack-name }} \ + stack_status="$(aws cloudformation describe-stacks \ + --stack-name "${{ needs.prepare.outputs.stack-name }}" \ --query 'Stacks[0].StackStatus' \ - --output text) + --output text)" echo "Stack status: $stack_status" if [[ "$stack_status" =~ ^(CREATE_COMPLETE|UPDATE_COMPLETE)$ ]]; then - echo "✅ Stack deployment validated successfully" + echo "Stack deployment validated successfully" else - echo "❌ Stack is in unexpected state: $stack_status" + echo "Stack is in unexpected state: $stack_status" exit 1 fi @@ -836,20 +847,20 @@ jobs: echo "🔍 Checking for infrastructure drift..." # Initiate drift detection - drift_id=$(aws cloudformation detect-stack-drift \ - --stack-name ${{ needs.prepare.outputs.stack-name }} \ + drift_id="$(aws cloudformation detect-stack-drift \ + --stack-name "${{ needs.prepare.outputs.stack-name }}" \ --query 'StackDriftDetectionId' \ - --output text) + --output text)" echo "Drift detection initiated: $drift_id" # Wait for drift detection to complete aws cloudformation wait stack-drift-detection-complete \ - --stack-drift-detection-id $drift_id + --stack-drift-detection-id "$drift_id" # Get drift detection results drift_status=$(aws cloudformation describe-stack-drift-detection-status \ - --stack-drift-detection-id $drift_id \ + --stack-drift-detection-id "$drift_id" \ --query 'StackDriftStatus' \ --output text) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..09a560e --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,22 @@ +name: 🔍 Lint Workflows + +on: + pull_request: + paths: + - ".github/workflows/**" + - ".github/actions/**" + +jobs: + actionlint: + name: Lint GitHub Actions + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install actionlint + run: | + bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) + + - name: Run actionlint + run: ./actionlint -color diff --git a/.github/workflows/magento-cloud-deploy.yml b/.github/workflows/magento-cloud-deploy.yml index 7fcd48c..d738d8c 100644 --- a/.github/workflows/magento-cloud-deploy.yml +++ b/.github/workflows/magento-cloud-deploy.yml @@ -101,9 +101,9 @@ jobs: run: | echo "📦 Installing Magento Cloud CLI..." curl -fsS https://accounts.magento.cloud/cli/installer | php - export PATH=$HOME/.magento-cloud/bin:$PATH - echo "$HOME/.magento-cloud/bin" >> $GITHUB_PATH - + export PATH="$HOME/.magento-cloud/bin:$PATH" + echo "$HOME/.magento-cloud/bin" >> "$GITHUB_PATH" + # Verify installation magento-cloud --version echo "✅ Magento Cloud CLI installed successfully" @@ -166,33 +166,41 @@ jobs: - name: Generate deployment summary run: | - echo "## 🛍️ Magento Cloud Deployment Summary" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY - echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY - echo "| **Project ID** | ${{ inputs.magento-cloud-project-id }} |" >> $GITHUB_STEP_SUMMARY - echo "| **Environment** | ${{ inputs.environment }} |" >> $GITHUB_STEP_SUMMARY - echo "| **Deployment ID** | ${{ steps.deploy-info.outputs.id }} |" >> $GITHUB_STEP_SUMMARY - echo "| **Site URL** | [${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }}) |" >> $GITHUB_STEP_SUMMARY - echo "| **Git Commit** | ${{ github.sha }} |" >> $GITHUB_STEP_SUMMARY - echo "| **Deployed By** | ${{ github.actor }} |" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - + { + echo "## 🛍️ Magento Cloud Deployment Summary" + echo "" + echo "| Property | Value |" + echo "|----------|-------|" + echo "| **Project ID** | ${{ inputs.magento-cloud-project-id }} |" + echo "| **Environment** | ${{ inputs.environment }} |" + echo "| **Deployment ID** | ${{ steps.deploy-info.outputs.id }} |" + echo "| **Site URL** | [${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }}) |" + echo "| **Git Commit** | ${{ github.sha }} |" + echo "| **Deployed By** | ${{ github.actor }} |" + echo "" + } >> "$GITHUB_STEP_SUMMARY" + case "${{ inputs.environment }}" in "production") - echo "### 🌍 Production Deployment" >> $GITHUB_STEP_SUMMARY - echo "Your Magento store is now live at:" >> $GITHUB_STEP_SUMMARY - echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" >> $GITHUB_STEP_SUMMARY + { + echo "### 🌍 Production Deployment" + echo "Your Magento store is now live at:" + echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" + } >> "$GITHUB_STEP_SUMMARY" ;; "staging") - echo "### 🚀 Staging Environment" >> $GITHUB_STEP_SUMMARY - echo "Staging environment updated successfully:" >> $GITHUB_STEP_SUMMARY - echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" >> $GITHUB_STEP_SUMMARY + { + echo "### 🚀 Staging Environment" + echo "Staging environment updated successfully:" + echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" + } >> "$GITHUB_STEP_SUMMARY" ;; *) - echo "### 🔧 Integration Environment" >> $GITHUB_STEP_SUMMARY - echo "Integration environment deployed for testing:" >> $GITHUB_STEP_SUMMARY - echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" >> $GITHUB_STEP_SUMMARY + { + echo "### 🔧 Integration Environment" + echo "Integration environment deployed for testing:" + echo "**[${{ steps.deploy-info.outputs.url }}](${{ steps.deploy-info.outputs.url }})**" + } >> "$GITHUB_STEP_SUMMARY" ;; esac @@ -299,11 +307,8 @@ jobs: CST_PROJECT_KEY="${{ vars.CST_PROJECT_KEY }}" fi - # Determine CST reporting key - input overrides workspace secret + # Determine CST reporting key from workflow secret CST_KEY="${{ secrets.cst-reporting-token }}" - if [ -z "$CST_KEY" ]; then - CST_KEY="${{ secrets.CST_REPORTING_TOKEN }}" - fi # Check if we have all required CST configuration if [ -z "$CST_ENDPOINT" ] || [ -z "$CST_PROJECT_KEY" ] || [ -z "$CST_KEY" ]; then diff --git a/.github/workflows/node-pr.yml b/.github/workflows/node-pr.yml index 8a66f3f..3d302c2 100644 --- a/.github/workflows/node-pr.yml +++ b/.github/workflows/node-pr.yml @@ -137,7 +137,7 @@ jobs: # Mask the value in logs echo "::add-mask::${value}" echo "Setting environment variable: ${key}" - echo "${key}=${value}" >> $GITHUB_ENV + echo "${key}=${value}" >> "$GITHUB_ENV" fi done <<< "${{ secrets.ENV_VARS }}" - name: Install dependencies @@ -207,7 +207,7 @@ jobs: # Mask the value in logs echo "::add-mask::${value}" echo "Setting environment variable: ${key}" - echo "${key}=${value}" >> $GITHUB_ENV + echo "${key}=${value}" >> "$GITHUB_ENV" fi done <<< "${{ secrets.ENV_VARS }}" @@ -270,7 +270,7 @@ jobs: # Mask the value in logs echo "::add-mask::${value}" echo "Setting environment variable: ${key}" - echo "${key}=${value}" >> $GITHUB_ENV + echo "${key}=${value}" >> "$GITHUB_ENV" fi done <<< "${{ secrets.ENV_VARS }}" @@ -330,7 +330,7 @@ jobs: fi # Convert to JSON array json=$(printf '%s\n' "${commands[@]}" | jq -R . | jq -s -c .) - echo "commands=$json" >> $GITHUB_OUTPUT + echo "commands=$json" >> "$GITHUB_OUTPUT" - name: Run checks concurrently uses: aligent/workflows/.github/actions/run-checks@main diff --git a/.github/workflows/nx-serverless-deployment.yml b/.github/workflows/nx-serverless-deployment.yml index 3151a34..01bad04 100644 --- a/.github/workflows/nx-serverless-deployment.yml +++ b/.github/workflows/nx-serverless-deployment.yml @@ -116,10 +116,10 @@ jobs: run: | echo "Checking repository type..." if [ -f "nx.json" ]; then - echo "is_monorepo=true" >> $GITHUB_OUTPUT + echo "is_monorepo=true" >> "$GITHUB_OUTPUT" echo "Repository is a monorepo (nx.json found)" else - echo "is_monorepo=false" >> $GITHUB_OUTPUT + echo "is_monorepo=false" >> "$GITHUB_OUTPUT" echo "Repository is not a monorepo (nx.json not found)" fi diff --git a/.github/workflows/php-quality-checks.yml b/.github/workflows/php-quality-checks.yml index be7826c..cdf2f7d 100644 --- a/.github/workflows/php-quality-checks.yml +++ b/.github/workflows/php-quality-checks.yml @@ -137,10 +137,10 @@ jobs: run: | if [ -f "composer.json" ]; then echo "✅ composer.json found" - echo "has-composer=true" >> $GITHUB_OUTPUT + echo "has-composer=true" >> "$GITHUB_OUTPUT" else echo "❌ composer.json not found" - echo "has-composer=false" >> $GITHUB_OUTPUT + echo "has-composer=false" >> "$GITHUB_OUTPUT" fi - name: Check for analysis tool configurations @@ -149,19 +149,19 @@ jobs: # Check for PHPStan if [ -f "phpstan.neon" ] || [ -f "phpstan.neon.dist" ] || [ -f "phpstan.dist.neon" ]; then echo "✅ PHPStan configuration found" - echo "has-phpstan-config=true" >> $GITHUB_OUTPUT + echo "has-phpstan-config=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No PHPStan configuration found, will use defaults" - echo "has-phpstan-config=false" >> $GITHUB_OUTPUT + echo "has-phpstan-config=false" >> "$GITHUB_OUTPUT" fi # Check for PHPCS if [ -f "phpcs.xml" ] || [ -f "phpcs.xml.dist" ] || [ -f "phpcs.dist.xml" ] || [ -f ".phpcs.xml" ]; then echo "✅ PHPCS configuration found" - echo "has-phpcs-config=true" >> $GITHUB_OUTPUT + echo "has-phpcs-config=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No PHPCS configuration found, will use standard" - echo "has-phpcs-config=false" >> $GITHUB_OUTPUT + echo "has-phpcs-config=false" >> "$GITHUB_OUTPUT" fi - name: Check for test configurations @@ -170,37 +170,37 @@ jobs: # Check for Unit tests if [ -f "dev/tests/unit/phpunit.xml" ]; then echo "✅ PHPUnit configuration found" - echo "has-unit-tests=true" >> $GITHUB_OUTPUT + echo "has-unit-tests=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No PHPUnit configuration found" - echo "has-unit-tests=false" >> $GITHUB_OUTPUT + echo "has-unit-tests=false" >> "$GITHUB_OUTPUT" fi # Check for Integration tests if [ -f "dev/tests/integration/phpunit.xml" ]; then echo "✅ Integration test configuration found" - echo "has-integration-tests=true" >> $GITHUB_OUTPUT + echo "has-integration-tests=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No integration test configuration found" - echo "has-integration-tests=false" >> $GITHUB_OUTPUT + echo "has-integration-tests=false" >> "$GITHUB_OUTPUT" fi # Check for REST API tests if [ -f "dev/tests/api-functional/phpunit_rest.xml" ]; then echo "✅ REST API test configuration found" - echo "has-rest-api-tests=true" >> $GITHUB_OUTPUT + echo "has-rest-api-tests=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No REST API test configuration found" - echo "has-rest-api-tests=false" >> $GITHUB_OUTPUT + echo "has-rest-api-tests=false" >> "$GITHUB_OUTPUT" fi # Check for GraphQL tests if [ -f "dev/tests/api-functional/phpunit_graphql.xml" ]; then echo "✅ GraphQL test configuration found" - echo "has-graphql-tests=true" >> $GITHUB_OUTPUT + echo "has-graphql-tests=true" >> "$GITHUB_OUTPUT" else echo "ℹ️ No GraphQL test configuration found" - echo "has-graphql-tests=false" >> $GITHUB_OUTPUT + echo "has-graphql-tests=false" >> "$GITHUB_OUTPUT" fi @@ -225,10 +225,10 @@ jobs: - name: Get composer cache directory id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT + run: echo "dir=$(composer config cache-files-dir)" >> "$GITHUB_OUTPUT" - name: Cache Composer dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ${{ steps.composer-cache.outputs.dir }} key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} @@ -281,8 +281,9 @@ jobs: done if [ -n "$dirs" ]; then - echo "Creating archive with: $dirs" + echo "Creating archive with:$dirs" # Use --warning=no-file-changed to suppress warnings about files changing during archive + # shellcheck disable=SC2086 tar --warning=no-file-changed -czf dependencies.tar.gz $dirs # Show archive size for debugging @@ -392,7 +393,7 @@ jobs: fi # Execute PHPStan - eval $cmd + eval "$cmd" else echo "Installing PHPStan globally..." composer global require phpstan/phpstan:^1.0 @@ -408,7 +409,7 @@ jobs: fi # Execute PHPStan - eval $cmd + eval "$cmd" fi # PHP CodeSniffer style checks @@ -481,7 +482,7 @@ jobs: if [ "${{ inputs.debug }}" = "true" ]; then cmd="$cmd --verbose" fi - eval $cmd + eval "$cmd" elif [ -f "vendor/bin/phpcs" ]; then echo "✅ Using vendor PHPCS with ${{ inputs.coding-standard }} standard" phpcs_cmd="vendor/bin/phpcs" @@ -495,7 +496,7 @@ jobs: cmd="$cmd --verbose" fi - eval $cmd + eval "$cmd" else echo "Installing PHP CodeSniffer globally..." composer global require squizlabs/php_codesniffer:^3.0 @@ -515,7 +516,7 @@ jobs: cmd="$cmd --verbose" fi - eval $cmd + eval "$cmd" fi # PHPUnit testing with coverage @@ -579,12 +580,12 @@ jobs: exit 1 fi - # Unit tests need to be excuted from inside this directory + # Unit tests need to be executed from inside this directory # or we have pathing issues pushd dev/tests/unit # Build PHPUnit command with coverage - cmd='php -d memory_limit=${{ inputs.memory-limit }} -d xdebug.mode=coverage $phpunit_cmd -c ./phpunit.xml --testsuite="Unit Tests" --log-junit ./test-results/report.xml' + cmd="php -d memory_limit=${{ inputs.memory-limit }} -d xdebug.mode=coverage $phpunit_cmd -c ./phpunit.xml --testsuite=\"Unit Tests\" --log-junit ./test-results/report.xml" cmd="$cmd --coverage-text --coverage-clover=coverage.xml" # Add debug flags if enabled @@ -593,7 +594,7 @@ jobs: fi # Execute PHPUnit - eval $cmd + eval "$cmd" - name: Check coverage threshold run: | @@ -733,7 +734,7 @@ jobs: # Run integration tests if [ -f "vendor/bin/phpunit" ]; then php -d memory_limit=${{ inputs.memory-limit }} vendor/bin/phpunit \ - -c $(pwd)/dev/tests/integration/phpunit.xml \ + -c "$(pwd)/dev/tests/integration/phpunit.xml" \ --log-junit ./test-results/integration-report.xml \ --testsuite="Integration Tests" \ ${{ inputs.debug && '--verbose' || '' }} @@ -859,7 +860,7 @@ jobs: # Run REST API tests if [ -f "vendor/bin/phpunit" ]; then php -d memory_limit=${{ inputs.memory-limit }} vendor/bin/phpunit \ - -c $(pwd)/dev/tests/api-functional/phpunit_rest.xml \ + -c "$(pwd)/dev/tests/api-functional/phpunit_rest.xml" \ --log-junit ./test-results/rest-api-report.xml \ --testsuite="REST API Tests" \ ${{ inputs.debug && '--verbose' || '' }} @@ -869,8 +870,8 @@ jobs: fi # Kill the PHP server if it was started - if [ ! -z "$SERVER_PID" ]; then - kill $SERVER_PID || true + if [ -n "$SERVER_PID" ]; then + kill "$SERVER_PID" || true fi - name: Upload test results @@ -990,7 +991,7 @@ jobs: # Run GraphQL tests if [ -f "vendor/bin/phpunit" ]; then php -d memory_limit=${{ inputs.memory-limit }} vendor/bin/phpunit \ - -c $(pwd)/dev/tests/api-functional/phpunit_graphql.xml \ + -c "$(pwd)/dev/tests/api-functional/phpunit_graphql.xml" \ --log-junit ./test-results/graphql-report.xml \ --testsuite="GraphQL API Tests" \ ${{ inputs.debug && '--verbose' || '' }} @@ -1000,8 +1001,8 @@ jobs: fi # Kill the PHP server if it was started - if [ ! -z "$SERVER_PID" ]; then - kill $SERVER_PID || true + if [ -n "$SERVER_PID" ]; then + kill "$SERVER_PID" || true fi - name: Upload test results diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml new file mode 100644 index 0000000..b0e6ef5 --- /dev/null +++ b/.github/workflows/pull-request.yml @@ -0,0 +1,183 @@ +name: Pull Request + +on: [pull_request] + +jobs: + check-readme: + name: 📖 Check readme for updates + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - name: Compare package list against readme + run: | + # Count packages in directory + PACKAGES="$(find packages -maxdepth 1 -mindepth 1 -type d -printf '%f\n' | wc -l)" + + # Count packages mentioned in README + find packages -maxdepth 1 -mindepth 1 -type d -printf '%f\n' > /tmp/package-list.txt + README_PACKAGES="$(grep -cFf /tmp/package-list.txt README.md || echo 0)" + + if [ "$PACKAGES" -eq "$README_PACKAGES" ]; then + echo "README correctly lists all packages." + exit 0 + else + echo "Mismatch: Found $PACKAGES packages, but README lists $README_PACKAGES." + exit 1 + fi + + install: + name: 📦 Install dependencies + runs-on: ubuntu-latest + outputs: + cache-hit: ${{ steps.cache-node-modules.outputs.cache-hit }} + steps: + - name: Checkout Repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - name: Fetch target + run: git fetch origin ${{ env.PR_BASE_REF }} + + - name: Enable Corepack + run: corepack enable + + - uses: actions/setup-node@v6 + with: + node-version-file: ".nvmrc" + + - name: Install safe-chain + run: curl -fsSL https://github.com/AikidoSec/safe-chain/releases/latest/download/install-safe-chain.sh | sh -s -- --ci + + - name: Cache node_modules + id: cache-node-modules + uses: actions/cache@v4 + with: + path: | + node_modules + .yarn/cache + key: ${{ runner.os }}-node-modules-${{ hashFiles('yarn.lock') }} + restore-keys: | + ${{ runner.os }}-node-modules- + + - name: Install dependencies + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: yarn install --frozen-lockfile + + env: + PR_BASE_REF: ${{ github.event.pull_request.base.ref }} + + lint: + name: 🔍 Lint affected packages + runs-on: ubuntu-latest + needs: install + steps: + - name: Checkout Repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - name: Fetch target + run: git fetch origin ${{ env.PR_BASE_REF }} + + - name: Enable Corepack + run: corepack enable + + - uses: actions/setup-node@v6 + with: + node-version-file: ".nvmrc" + + - name: Restore node_modules cache + uses: actions/cache@v4 + with: + path: | + node_modules + .yarn/cache + key: ${{ runner.os }}-node-modules-${{ hashFiles('yarn.lock') }} + restore-keys: | + ${{ runner.os }}-node-modules- + + - name: Lint affected packages + run: yarn nx affected:lint --base=origin/${{ env.PR_BASE_REF }} --parallel --max-parallel=3 + + env: + PR_BASE_REF: ${{ github.event.pull_request.base.ref }} + + build: + name: 🏭 Build affected packages + runs-on: ubuntu-latest + needs: install + steps: + - name: Checkout Repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - name: Fetch target + run: git fetch origin ${{ env.PR_BASE_REF }} + + - name: Enable Corepack + run: corepack enable + + - uses: actions/setup-node@v6 + with: + node-version-file: ".nvmrc" + + - name: Restore node_modules cache + uses: actions/cache@v4 + with: + path: | + node_modules + .yarn/cache + key: ${{ runner.os }}-node-modules-${{ hashFiles('yarn.lock') }} + restore-keys: | + ${{ runner.os }}-node-modules- + + - name: Build affected packages + run: yarn nx affected:build --base=origin/${{ env.PR_BASE_REF }} --parallel --max-parallel=3 + + env: + PR_BASE_REF: ${{ github.event.pull_request.base.ref }} + + test: + name: 🧪 Run tests + runs-on: ubuntu-latest + needs: install + steps: + - name: Checkout Repository + uses: actions/checkout@v5 + with: + fetch-depth: 0 + + - name: Fetch target + run: git fetch origin ${{ env.PR_BASE_REF }} + + - name: Enable Corepack + run: corepack enable + + - uses: actions/setup-node@v6 + with: + node-version-file: ".nvmrc" + + - name: Restore node_modules cache + uses: actions/cache@v4 + with: + path: | + node_modules + .yarn/cache + key: ${{ runner.os }}-node-modules-${{ hashFiles('yarn.lock') }} + restore-keys: | + ${{ runner.os }}-node-modules- + + - name: Build affected packages + run: yarn nx affected:build --base=origin/${{ env.PR_BASE_REF }} --parallel --max-parallel=3 + + - name: Test affected packages + run: yarn nx affected:test --base=origin/${{ env.PR_BASE_REF }} --parallel --max-parallel=3 + + env: + PR_BASE_REF: ${{ github.event.pull_request.base.ref }} diff --git a/.github/workflows/s3-deploy.yml b/.github/workflows/s3-deploy.yml index 9e14450..0655b68 100644 --- a/.github/workflows/s3-deploy.yml +++ b/.github/workflows/s3-deploy.yml @@ -21,7 +21,6 @@ on: description: "Path to deploy" type: string required: true - default: "" delete-flag: description: "Enable --delete flag" type: boolean @@ -53,22 +52,21 @@ jobs: - name: Deploy to S3 run: | - s3_path="" - cache_control="" - extra_args="" + # Build command arguments as an array to handle quoting correctly + cmd=(aws s3 sync "${{ inputs.local-path }}" "s3://${{ inputs.s3-bucket }}${{ inputs.s3-path }}") - if [ -n "${{inputs.cache-control}}" ]; then - cache_control="--cache-control \"${{inputs.cache-control}}\"" + if [ -n "${{ inputs.cache-control }}" ]; then + cmd+=(--cache-control "${{ inputs.cache-control }}") fi - command="aws s3 sync ${{inputs.local-path}} s3://${{inputs.s3-bucket}}${{inputs.s3-path}} ${cache_control} ${{inputs.extra-args}}" - - if [ "${{inputs.delete-flag}}" = "true" ]; then - command="$command --delete" + if [ "${{ inputs.delete-flag }}" = "true" ]; then + cmd+=(--delete) fi - $command - - - + # Add extra args if provided (word-split intentionally) + if [ -n "${{ inputs.extra-args }}" ]; then + # shellcheck disable=SC2086 + cmd+=(${{ inputs.extra-args }}) + fi + "${cmd[@]}" diff --git a/.github/workflows/static-hosting-deployment.yml b/.github/workflows/static-hosting-deployment.yml index 61f16b2..6176cb3 100644 --- a/.github/workflows/static-hosting-deployment.yml +++ b/.github/workflows/static-hosting-deployment.yml @@ -36,7 +36,6 @@ on: description: "Path to deploy" type: string required: true - default: "" DELETE_FLAG: description: "Enable __delete flag" type: string @@ -81,7 +80,7 @@ jobs: if [ "$DELETE_FLAG_INPUT" = "true" ]; then DELETE_FLAG="--delete" fi - echo "DELETE_FLAG=$DELETE_FLAG" >> $GITHUB_ENV + echo "DELETE_FLAG=$DELETE_FLAG" >> "$GITHUB_ENV" env: DELETE_FLAG_INPUT: ${{ inputs.DELETE_FLAG }}