Sync Winget Manifests #6
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Sync Winget Manifests | |
| on: | |
| # Automatically run after Build Curated App List completes | |
| workflow_run: | |
| workflows: ["Build Curated App List"] | |
| types: [completed] | |
| branches: [main, master] | |
| schedule: | |
| # Run daily at 02:00 UTC | |
| - cron: "0 2 * * *" | |
| workflow_dispatch: | |
| inputs: | |
| app_ids: | |
| description: 'Specific app IDs to sync (comma-separated, or "all")' | |
| required: false | |
| default: "all" | |
| limit: | |
| description: "Total maximum apps to process (0 = all)" | |
| required: false | |
| default: "0" | |
| type: string | |
| mode: | |
| description: "Sync mode" | |
| required: false | |
| default: "all" | |
| type: choice | |
| options: | |
| - all # All apps (full refresh) | |
| - incremental # Only apps missing version data | |
| - new_only # Only apps without any version_history | |
| force_refresh: | |
| description: "Force refresh even if version unchanged" | |
| required: false | |
| default: false | |
| type: boolean | |
| # Prevent concurrent workflow runs (matrix jobs within a run are fine) | |
| concurrency: | |
| group: curated-apps-pipeline | |
| cancel-in-progress: false | |
| env: | |
| NODE_VERSION: "20" | |
| GITHUB_RAW_BASE: "https://raw.githubusercontent.com/microsoft/winget-pkgs/master/manifests" | |
| jobs: | |
| # ── Job 1: Plan ────────────────────────────────────────────────────── | |
| # Fast job that counts apps and generates a matrix of 2000-app shards. | |
| # Uses native fetch (Node 20) so no npm ci is needed. | |
| plan: | |
| runs-on: ubuntu-latest | |
| outputs: | |
| matrix: ${{ steps.generate.outputs.matrix }} | |
| if: > | |
| github.event_name == 'workflow_dispatch' || | |
| github.event_name == 'schedule' || | |
| (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success') | |
| steps: | |
| - name: Setup Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: ${{ env.NODE_VERSION }} | |
| - name: Generate sync matrix | |
| id: generate | |
| env: | |
| SUPABASE_URL: ${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} | |
| SUPABASE_SERVICE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }} | |
| APP_IDS: ${{ github.event.inputs.app_ids || 'all' }} | |
| APP_LIMIT: ${{ github.event.inputs.limit || '0' }} | |
| SYNC_MODE: ${{ github.event.inputs.mode || 'all' }} | |
| run: | | |
| node << 'PLANEOF' | |
| const fs = require('fs'); | |
| const SUPABASE_URL = process.env.SUPABASE_URL; | |
| const SUPABASE_KEY = process.env.SUPABASE_SERVICE_KEY; | |
| const appIds = process.env.APP_IDS; | |
| const appLimit = parseInt(process.env.APP_LIMIT || '0', 10); | |
| const syncMode = process.env.SYNC_MODE || 'all'; | |
| const CHUNK_SIZE = 1000; | |
| async function run() { | |
| let total = 0; | |
| if (appIds !== 'all') { | |
| total = appIds.split(',').length; | |
| } else { | |
| // Query count via Supabase REST API (no npm needed) | |
| let url = `${SUPABASE_URL}/rest/v1/curated_apps?select=winget_id`; | |
| if (syncMode === 'incremental' || syncMode === 'new_only') { | |
| url += '&latest_version=is.null'; | |
| } | |
| const res = await fetch(url, { | |
| headers: { | |
| 'apikey': SUPABASE_KEY, | |
| 'Authorization': `Bearer ${SUPABASE_KEY}`, | |
| 'Prefer': 'count=exact', | |
| 'Range': '0-0' | |
| } | |
| }); | |
| const contentRange = res.headers.get('content-range'); | |
| if (contentRange && contentRange.includes('/')) { | |
| total = parseInt(contentRange.split('/')[1], 10); | |
| } | |
| } | |
| // Apply total limit | |
| if (appLimit > 0 && total > appLimit) { | |
| total = appLimit; | |
| } | |
| // Generate matrix chunks | |
| const chunks = []; | |
| if (appIds !== 'all') { | |
| // Single shard for specific IDs | |
| chunks.push({ shard: 1, offset: 0, chunk_size: total, total_shards: 1 }); | |
| } else { | |
| for (let offset = 0; offset < total; offset += CHUNK_SIZE) { | |
| const size = Math.min(CHUNK_SIZE, total - offset); | |
| chunks.push({ | |
| shard: chunks.length + 1, | |
| offset, | |
| chunk_size: size, | |
| total_shards: Math.ceil(total / CHUNK_SIZE) | |
| }); | |
| } | |
| } | |
| // GitHub Actions requires at least one matrix entry | |
| if (chunks.length === 0) { | |
| chunks.push({ shard: 1, offset: 0, chunk_size: 0, total_shards: 1 }); | |
| } | |
| const matrix = JSON.stringify({ include: chunks }); | |
| console.log(`Total apps: ${total}`); | |
| console.log(`Chunk size: ${CHUNK_SIZE}`); | |
| console.log(`Shards: ${chunks.length}`); | |
| console.log(`Matrix: ${matrix}`); | |
| fs.appendFileSync(process.env.GITHUB_OUTPUT, `matrix=${matrix}\n`); | |
| // Mark sync as running | |
| await fetch(`${SUPABASE_URL}/rest/v1/curated_sync_status`, { | |
| method: 'POST', | |
| headers: { | |
| 'apikey': SUPABASE_KEY, | |
| 'Authorization': `Bearer ${SUPABASE_KEY}`, | |
| 'Content-Type': 'application/json', | |
| 'Prefer': 'resolution=merge-duplicates' | |
| }, | |
| body: JSON.stringify({ | |
| id: 'sync-manifests', | |
| last_run_started_at: new Date().toISOString(), | |
| last_run_status: 'running', | |
| metadata: { mode: syncMode, total_apps: total, shards: chunks.length }, | |
| updated_at: new Date().toISOString() | |
| }) | |
| }); | |
| } | |
| run().catch(err => { console.error('Plan failed:', err); process.exit(1); }); | |
| PLANEOF | |
| # ── Job 2: Sync (matrix) ───────────────────────────────────────────── | |
| # Each shard processes a 2000-app slice in parallel. | |
| # Shards work on non-overlapping slices so there are no DB conflicts. | |
| sync: | |
| needs: plan | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 30 | |
| strategy: | |
| matrix: ${{ fromJson(needs.plan.outputs.matrix) }} | |
| fail-fast: false | |
| max-parallel: 3 | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| ref: ${{ github.event.workflow_run.head_sha || github.ref }} | |
| - name: Setup Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: ${{ env.NODE_VERSION }} | |
| cache: "npm" | |
| - name: Install dependencies | |
| run: npm ci | |
| - name: "Sync shard ${{ matrix.shard }}/${{ matrix.total_shards }}" | |
| env: | |
| SUPABASE_URL: ${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} | |
| SUPABASE_SERVICE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }} | |
| APP_IDS: ${{ github.event.inputs.app_ids || 'all' }} | |
| APP_OFFSET: ${{ matrix.offset }} | |
| APP_LIMIT: ${{ matrix.chunk_size }} | |
| SYNC_MODE: ${{ github.event.inputs.mode || 'all' }} | |
| FORCE_REFRESH: ${{ github.event.inputs.force_refresh || 'false' }} | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| SHARD: ${{ matrix.shard }} | |
| TOTAL_SHARDS: ${{ matrix.total_shards }} | |
| run: | | |
| node << 'EOF' | |
| const fs = require('fs'); | |
| const https = require('https'); | |
| const { createClient } = require('@supabase/supabase-js'); | |
| const yaml = require('yaml'); | |
| const supabaseUrl = process.env.SUPABASE_URL; | |
| const supabaseKey = process.env.SUPABASE_SERVICE_KEY; | |
| if (!supabaseUrl || !supabaseKey) { | |
| console.error('Supabase credentials not configured'); | |
| process.exit(1); | |
| } | |
| const supabase = createClient(supabaseUrl, supabaseKey); | |
| const GITHUB_RAW_BASE = process.env.GITHUB_RAW_BASE; | |
| const GITHUB_TOKEN = process.env.GITHUB_TOKEN; | |
| function fetchUrl(url, retries = 5) { | |
| return new Promise((resolve, reject) => { | |
| const attempt = (retriesLeft) => { | |
| const headers = { 'User-Agent': 'IntuneGet-Manifest-Sync' }; | |
| if (GITHUB_TOKEN && url.includes('api.github.com')) { | |
| headers['Authorization'] = `token ${GITHUB_TOKEN}`; | |
| } | |
| https.get(url, { headers }, (res) => { | |
| if (res.statusCode === 404) { | |
| resolve(null); | |
| return; | |
| } | |
| if (res.statusCode === 403 || res.statusCode === 429) { | |
| // Use Retry-After header, cap at 30s to avoid extremely long waits | |
| const retryAfter = Math.min(parseInt(res.headers['retry-after'] || '3', 10), 30); | |
| if (retriesLeft > 0) { | |
| setTimeout(() => attempt(retriesLeft - 1), retryAfter * 1000); | |
| return; | |
| } | |
| // Drain response body before rejecting | |
| res.resume(); | |
| reject(new Error(`HTTP ${res.statusCode} (rate limited, retries exhausted)`)); | |
| return; | |
| } | |
| if (res.statusCode !== 200) { | |
| if (retriesLeft > 0 && res.statusCode >= 500) { | |
| setTimeout(() => attempt(retriesLeft - 1), 1000); | |
| return; | |
| } | |
| res.resume(); | |
| reject(new Error(`HTTP ${res.statusCode}`)); | |
| return; | |
| } | |
| let data = ''; | |
| res.on('data', chunk => data += chunk); | |
| res.on('end', () => resolve(data)); | |
| }).on('error', (err) => { | |
| if (retriesLeft > 0) { | |
| setTimeout(() => attempt(retriesLeft - 1), 1000); | |
| } else { | |
| reject(err); | |
| } | |
| }); | |
| }; | |
| attempt(retries); | |
| }); | |
| } | |
| function buildManifestUrl(wingetId, version, type = 'installer') { | |
| const parts = wingetId.split('.'); | |
| if (parts.length < 2) return null; | |
| const publisher = parts[0]; | |
| const namePath = parts.slice(1).join('/'); | |
| const firstLetter = publisher.charAt(0).toLowerCase(); | |
| return `${GITHUB_RAW_BASE}/${firstLetter}/${publisher}/${namePath}/${version}/${wingetId}.${type}.yaml`; | |
| } | |
| function buildVersionListUrl(wingetId) { | |
| const parts = wingetId.split('.'); | |
| if (parts.length < 2) return null; | |
| const publisher = parts[0]; | |
| const namePath = parts.slice(1).map(p => encodeURIComponent(p)).join('/'); | |
| const firstLetter = publisher.charAt(0).toLowerCase(); | |
| return `https://api.github.com/repos/microsoft/winget-pkgs/contents/manifests/${firstLetter}/${encodeURIComponent(publisher)}/${namePath}`; | |
| } | |
| let versionFetchErrors = 0; | |
| async function fetchVersions(wingetId) { | |
| const url = buildVersionListUrl(wingetId); | |
| if (!url) return []; | |
| try { | |
| const response = await fetchUrl(url); | |
| if (!response) return []; | |
| const dirs = JSON.parse(response); | |
| return dirs | |
| .filter(d => d.type === 'dir') | |
| .map(d => d.name) | |
| .filter(name => /^\d/.test(name)) | |
| .filter(name => /^\d+[\d._-]*\d*$/.test(name) || name.includes('.')) | |
| .sort((a, b) => b.localeCompare(a, undefined, { numeric: true })); | |
| } catch (e) { | |
| versionFetchErrors++; | |
| // Log first few errors to help diagnose rate limiting | |
| if (versionFetchErrors <= 5) { | |
| console.error(` [DEBUG] fetchVersions failed for ${wingetId}: ${e.message}`); | |
| } | |
| return []; | |
| } | |
| } | |
| async function fetchInstallerManifest(wingetId, version) { | |
| const url = buildManifestUrl(wingetId, version, 'installer'); | |
| if (!url) return null; | |
| try { | |
| const content = await fetchUrl(url); | |
| if (!content) return null; | |
| return yaml.parse(content); | |
| } catch (e) { | |
| return null; | |
| } | |
| } | |
| async function fetchLocaleManifest(wingetId, version) { | |
| const locales = ['locale.en-US', 'locale']; | |
| for (const locale of locales) { | |
| const url = buildManifestUrl(wingetId, version, locale); | |
| if (!url) continue; | |
| try { | |
| const content = await fetchUrl(url); | |
| if (content) return yaml.parse(content); | |
| } catch (e) { | |
| // Continue to next locale | |
| } | |
| } | |
| return null; | |
| } | |
| async function syncManifests() { | |
| const appIds = process.env.APP_IDS; | |
| const appOffset = parseInt(process.env.APP_OFFSET || '0', 10); | |
| const chunkSize = parseInt(process.env.APP_LIMIT || '2000', 10); | |
| const syncMode = process.env.SYNC_MODE || 'all'; | |
| const forceRefresh = process.env.FORCE_REFRESH === 'true'; | |
| const shard = process.env.SHARD || '1'; | |
| const totalShards = process.env.TOTAL_SHARDS || '1'; | |
| let apps = []; | |
| if (appIds === 'all') { | |
| if (chunkSize === 0) { | |
| console.log('No apps to sync in this shard.'); | |
| fs.writeFileSync('./sync-results.json', JSON.stringify({ | |
| synced: 0, failed: 0, skipped: 0, newVersions: [] | |
| })); | |
| return; | |
| } | |
| // Fetch this shard's slice using .range() | |
| let query = supabase | |
| .from('curated_apps') | |
| .select('winget_id, latest_version') | |
| .order('popularity_rank', { ascending: true, nullsFirst: false }) | |
| .range(appOffset, appOffset + chunkSize - 1); | |
| if (syncMode === 'incremental' || syncMode === 'new_only') { | |
| query = query.is('latest_version', null); | |
| } | |
| const { data, error } = await query; | |
| if (error) throw error; | |
| apps = data || []; | |
| } else { | |
| apps = appIds.split(',').map(id => ({ winget_id: id.trim() })); | |
| } | |
| const shardCount = parseInt(totalShards, 10) || 1; | |
| // Scale batch size down and delay up when running multiple shards | |
| // to stay within GitHub's secondary rate limits (~100 concurrent requests) | |
| const batchSize = GITHUB_TOKEN ? Math.max(5, Math.floor(20 / shardCount)) : 5; | |
| const batchDelay = GITHUB_TOKEN ? Math.max(500, 500 * shardCount) : 3000; | |
| const delay = ms => new Promise(r => setTimeout(r, ms)); | |
| console.log(`\n========================================`); | |
| console.log(` Sync Winget Manifests - Shard ${shard}/${totalShards}`); | |
| console.log(`========================================`); | |
| console.log(` Mode: ${syncMode}`); | |
| console.log(` Force refresh: ${forceRefresh}`); | |
| console.log(` Offset: ${appOffset}`); | |
| console.log(` Apps in shard: ${apps.length}`); | |
| console.log(` Batch size: ${batchSize}`); | |
| console.log(` Batch delay: ${batchDelay}ms`); | |
| console.log(` Authenticated: ${!!GITHUB_TOKEN}`); | |
| console.log(`========================================\n`); | |
| let synced = 0; | |
| let failed = 0; | |
| let skipped = 0; | |
| let failedApps = []; | |
| let skippedNoVersions = 0; | |
| let skippedExisting = 0; | |
| let skippedNoManifest = 0; | |
| let newVersions = []; | |
| const startTime = Date.now(); | |
| for (let i = 0; i < apps.length; i += batchSize) { | |
| const batch = apps.slice(i, i + batchSize); | |
| await Promise.all(batch.map(async (app) => { | |
| try { | |
| const versions = await fetchVersions(app.winget_id); | |
| if (versions.length === 0) { | |
| skipped++; | |
| skippedNoVersions++; | |
| return; | |
| } | |
| const latestVersion = versions[0]; | |
| if (!forceRefresh) { | |
| const { data: existing } = await supabase | |
| .from('version_history') | |
| .select('version') | |
| .eq('winget_id', app.winget_id) | |
| .eq('version', latestVersion) | |
| .single(); | |
| if (existing) { | |
| skipped++; | |
| skippedExisting++; | |
| return; | |
| } | |
| } | |
| const installerManifest = await fetchInstallerManifest(app.winget_id, latestVersion); | |
| if (!installerManifest) { | |
| failed++; | |
| skippedNoManifest++; | |
| failedApps.push({ id: app.winget_id, reason: 'no installer manifest' }); | |
| return; | |
| } | |
| const localeManifest = await fetchLocaleManifest(app.winget_id, latestVersion); | |
| const rawInstallers = installerManifest.Installers || []; | |
| const defaultInstaller = rawInstallers[0] || {}; | |
| const rootType = installerManifest.InstallerType || null; | |
| const rootScope = installerManifest.Scope || null; | |
| const rootSwitches = installerManifest.InstallerSwitches || null; | |
| const installers = rawInstallers.map(inst => ({ | |
| ...inst, | |
| InstallerType: inst.InstallerType || rootType, | |
| Scope: inst.Scope || rootScope, | |
| InstallerSwitches: inst.InstallerSwitches || rootSwitches, | |
| })); | |
| const versionRecord = { | |
| winget_id: app.winget_id, | |
| version: latestVersion, | |
| installer_url: defaultInstaller.InstallerUrl || null, | |
| installer_sha256: defaultInstaller.InstallerSha256 || null, | |
| installer_type: defaultInstaller.InstallerType || | |
| installerManifest.InstallerType || null, | |
| installer_scope: defaultInstaller.Scope || | |
| installerManifest.Scope || null, | |
| silent_args: defaultInstaller.InstallerSwitches?.Silent || | |
| installerManifest.InstallerSwitches?.Silent || | |
| defaultInstaller.InstallerSwitches?.SilentWithProgress || | |
| installerManifest.InstallerSwitches?.SilentWithProgress || null, | |
| installers, | |
| minimum_os_version: installerManifest.MinimumOSVersion || null, | |
| platform: installerManifest.Platform || null, | |
| upgrade_behavior: installerManifest.UpgradeBehavior || null, | |
| release_notes: localeManifest?.ReleaseNotes || null, | |
| manifest_yaml: yaml.stringify(installerManifest), | |
| manifest_fetched_at: new Date().toISOString(), | |
| updated_at: new Date().toISOString() | |
| }; | |
| const { error: vhError } = await supabase | |
| .from('version_history') | |
| .upsert(versionRecord, { onConflict: 'winget_id,version' }); | |
| if (vhError) { | |
| console.error(` [FAIL] ${app.winget_id}: DB upsert error - ${vhError.message}`); | |
| failed++; | |
| failedApps.push({ id: app.winget_id, reason: `DB: ${vhError.message}` }); | |
| return; | |
| } | |
| const { error: caError } = await supabase | |
| .from('curated_apps') | |
| .update({ | |
| latest_version: latestVersion, | |
| description: localeManifest?.ShortDescription || | |
| localeManifest?.Description || null, | |
| homepage: localeManifest?.PackageUrl || | |
| localeManifest?.PublisherUrl || null, | |
| license: localeManifest?.License || null, | |
| updated_at: new Date().toISOString() | |
| }) | |
| .eq('winget_id', app.winget_id); | |
| if (caError) { | |
| console.error(` [WARN] ${app.winget_id}: curated_apps update failed - ${caError.message}`); | |
| } | |
| if (app.latest_version && app.latest_version !== latestVersion) { | |
| newVersions.push({ | |
| winget_id: app.winget_id, | |
| old_version: app.latest_version, | |
| new_version: latestVersion | |
| }); | |
| } | |
| synced++; | |
| } catch (e) { | |
| failed++; | |
| failedApps.push({ id: app.winget_id, reason: e.message || String(e) }); | |
| } | |
| })); | |
| if (i + batchSize < apps.length) { | |
| await delay(batchDelay); | |
| } | |
| const processed = Math.min(i + batchSize, apps.length); | |
| if ((i / batchSize) % 5 === 0 || processed >= apps.length) { | |
| const elapsed = ((Date.now() - startTime) / 1000).toFixed(1); | |
| const rate = (processed / (Date.now() - startTime) * 1000).toFixed(1); | |
| console.log(` [${elapsed}s] ${processed}/${apps.length} | synced: ${synced} | failed: ${failed} | skipped: ${skipped} (no-versions: ${skippedNoVersions}, existing: ${skippedExisting}, no-manifest: ${skippedNoManifest}) | ${rate} apps/s`); | |
| } | |
| } | |
| const totalTime = ((Date.now() - startTime) / 1000).toFixed(1); | |
| console.log(`\n========================================`); | |
| console.log(` Shard ${shard}/${totalShards} Complete`); | |
| console.log(`========================================`); | |
| console.log(` Duration: ${totalTime}s`); | |
| console.log(` Synced: ${synced}`); | |
| console.log(` Failed: ${failed}`); | |
| console.log(` Skipped: ${skipped}`); | |
| console.log(` - No versions found: ${skippedNoVersions}`); | |
| console.log(` - Already up to date: ${skippedExisting}`); | |
| console.log(` - No manifest found: ${skippedNoManifest}`); | |
| console.log(`========================================`); | |
| if (failedApps.length > 0) { | |
| console.log(`\nFailed apps (showing first 30):`); | |
| for (const f of failedApps.slice(0, 30)) { | |
| console.log(` - ${f.id}: ${f.reason}`); | |
| } | |
| if (failedApps.length > 30) { | |
| console.log(` ... and ${failedApps.length - 30} more`); | |
| } | |
| } | |
| if (newVersions.length > 0) { | |
| console.log(`\nNew versions detected:`); | |
| for (const v of newVersions.slice(0, 20)) { | |
| console.log(` ${v.winget_id}: ${v.old_version} -> ${v.new_version}`); | |
| } | |
| if (newVersions.length > 20) { | |
| console.log(` ... and ${newVersions.length - 20} more`); | |
| } | |
| } | |
| fs.writeFileSync('./sync-results.json', JSON.stringify({ | |
| synced, failed, skipped, newVersions | |
| })); | |
| } | |
| syncManifests().then(() => { | |
| process.exit(0); | |
| }).catch(err => { | |
| console.error('Sync failed:', err); | |
| // Write partial results so summary job can still aggregate | |
| const fs = require('fs'); | |
| if (!require('fs').existsSync('./sync-results.json')) { | |
| fs.writeFileSync('./sync-results.json', JSON.stringify({ | |
| synced: 0, failed: 1, skipped: 0, newVersions: [] | |
| })); | |
| } | |
| process.exit(1); | |
| }); | |
| EOF | |
| - name: Upload shard results | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: sync-results-shard-${{ matrix.shard }} | |
| path: sync-results.json | |
| retention-days: 1 | |
| # ── Job 3: Summary ─────────────────────────────────────────────────── | |
| # Aggregates results from all shards and updates sync status. | |
| summary: | |
| needs: [plan, sync] | |
| if: always() | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Setup Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: ${{ env.NODE_VERSION }} | |
| - name: Download all shard results | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: sync-results-shard-* | |
| path: results | |
| - name: Aggregate results | |
| id: aggregate | |
| env: | |
| SUPABASE_URL: ${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} | |
| SUPABASE_SERVICE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }} | |
| SYNC_MODE: ${{ github.event.inputs.mode || 'all' }} | |
| run: | | |
| node << 'SUMMARYEOF' | |
| const fs = require('fs'); | |
| const path = require('path'); | |
| const resultsDir = './results'; | |
| let totalSynced = 0, totalFailed = 0, totalSkipped = 0; | |
| let allNewVersions = []; | |
| let shardsFound = 0; | |
| if (fs.existsSync(resultsDir)) { | |
| const dirs = fs.readdirSync(resultsDir); | |
| for (const dir of dirs) { | |
| const filePath = path.join(resultsDir, dir, 'sync-results.json'); | |
| if (fs.existsSync(filePath)) { | |
| const data = JSON.parse(fs.readFileSync(filePath, 'utf8')); | |
| totalSynced += data.synced || 0; | |
| totalFailed += data.failed || 0; | |
| totalSkipped += data.skipped || 0; | |
| allNewVersions = allNewVersions.concat(data.newVersions || []); | |
| shardsFound++; | |
| } | |
| } | |
| } | |
| console.log(`\n========================================`); | |
| console.log(` Sync Complete (${shardsFound} shards)`); | |
| console.log(`========================================`); | |
| console.log(` Synced: ${totalSynced}`); | |
| console.log(` Failed: ${totalFailed}`); | |
| console.log(` Skipped: ${totalSkipped}`); | |
| console.log(` New versions: ${allNewVersions.length}`); | |
| console.log(`========================================`); | |
| fs.writeFileSync('./aggregate-results.json', JSON.stringify({ | |
| synced: totalSynced, | |
| failed: totalFailed, | |
| skipped: totalSkipped, | |
| newVersions: allNewVersions | |
| })); | |
| // Update sync status in Supabase | |
| const SUPABASE_URL = process.env.SUPABASE_URL; | |
| const SUPABASE_KEY = process.env.SUPABASE_SERVICE_KEY; | |
| const syncMode = process.env.SYNC_MODE || 'all'; | |
| async function updateStatus() { | |
| await fetch(`${SUPABASE_URL}/rest/v1/curated_sync_status`, { | |
| method: 'POST', | |
| headers: { | |
| 'apikey': SUPABASE_KEY, | |
| 'Authorization': `Bearer ${SUPABASE_KEY}`, | |
| 'Content-Type': 'application/json', | |
| 'Prefer': 'resolution=merge-duplicates' | |
| }, | |
| body: JSON.stringify({ | |
| id: 'sync-manifests', | |
| last_run_completed_at: new Date().toISOString(), | |
| last_run_status: totalFailed > 0 && totalSynced === 0 ? 'failed' : 'success', | |
| items_processed: totalSynced, | |
| error_message: totalFailed > 0 ? `${totalFailed} apps failed to sync` : null, | |
| metadata: { | |
| mode: syncMode, | |
| synced: totalSynced, | |
| failed: totalFailed, | |
| skipped: totalSkipped, | |
| shards: shardsFound, | |
| new_versions: allNewVersions.length | |
| }, | |
| updated_at: new Date().toISOString() | |
| }) | |
| }); | |
| } | |
| updateStatus().catch(err => console.error('Failed to update sync status:', err)); | |
| SUMMARYEOF | |
| - name: Create summary | |
| run: | | |
| if [ -f aggregate-results.json ]; then | |
| SYNCED=$(jq -r '.synced' aggregate-results.json) | |
| FAILED=$(jq -r '.failed' aggregate-results.json) | |
| SKIPPED=$(jq -r '.skipped' aggregate-results.json) | |
| NEW_VERSIONS=$(jq -r '.newVersions | length' aggregate-results.json) | |
| echo "## Manifest Sync Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Mode:** ${{ github.event.inputs.mode || 'all' }}" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Synced:** $SYNCED" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Failed:** $FAILED" >> $GITHUB_STEP_SUMMARY | |
| echo "- **Skipped:** $SKIPPED" >> $GITHUB_STEP_SUMMARY | |
| echo "- **New versions detected:** $NEW_VERSIONS" >> $GITHUB_STEP_SUMMARY | |
| if [ "$NEW_VERSIONS" -gt 0 ]; then | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "### New Versions (first 20)" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| jq -r '.newVersions[:20][] | "- **\(.winget_id):** \(.old_version) -> \(.new_version)"' aggregate-results.json >> $GITHUB_STEP_SUMMARY | |
| fi | |
| fi | |
| - name: Trigger scan workflow for new versions | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| if [ -f aggregate-results.json ]; then | |
| NEW_VERSIONS=$(jq -r '.newVersions | length' aggregate-results.json) | |
| if [ "$NEW_VERSIONS" -gt 0 ]; then | |
| APP_IDS=$(jq -r '[.newVersions[:10][].winget_id] | join(",")' aggregate-results.json) | |
| echo "Triggering scan workflow for: $APP_IDS" | |
| gh workflow run scan-apps.yml \ | |
| -f app_ids="$APP_IDS" \ | |
| -f trigger_source="sync-manifests" || echo "Scan workflow not available or failed to trigger" | |
| fi | |
| fi |