Skip to content

Commit aeb79d7

Browse files
committed
add status
1 parent 84d1eb3 commit aeb79d7

File tree

1 file changed

+35
-11
lines changed

1 file changed

+35
-11
lines changed

.github/workflows/upload_complete_workflow_metrics.py

Lines changed: 35 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -268,16 +268,36 @@ def post_all_metrics(self) -> None:
268268
"""Upload complete workflow metrics including workflow, jobs, and steps in one operation"""
269269
print(f"Uploading complete metrics for workflow '{self.workflow_name}' (run {self.run_id})")
270270

271-
# Get workflow and jobs data from GitHub API
272-
workflow_data = self.get_github_api_data(f"/repos/{self.repo}/actions/runs/{self.run_id}")
273-
if not workflow_data:
274-
print("Could not fetch workflow data from GitHub API")
275-
return
271+
# Wait for workflow to complete before uploading metrics
272+
import time
273+
max_retries = 5
274+
retry_delay = 10 # seconds
275+
276+
for attempt in range(max_retries):
277+
# Get workflow and jobs data from GitHub API
278+
workflow_data = self.get_github_api_data(f"/repos/{self.repo}/actions/runs/{self.run_id}")
279+
if not workflow_data:
280+
print("Could not fetch workflow data from GitHub API")
281+
return
282+
283+
jobs_data = self.get_github_api_data(f"/repos/{self.repo}/actions/runs/{self.run_id}/jobs")
284+
if not jobs_data or 'jobs' not in jobs_data:
285+
print("Could not fetch jobs data from GitHub API")
286+
return
276287

277-
jobs_data = self.get_github_api_data(f"/repos/{self.repo}/actions/runs/{self.run_id}/jobs")
278-
if not jobs_data or 'jobs' not in jobs_data:
279-
print("Could not fetch jobs data from GitHub API")
280-
return
288+
# Check if workflow is completed
289+
workflow_status = workflow_data.get('status', '')
290+
workflow_conclusion = workflow_data.get('conclusion')
291+
292+
if workflow_status == 'completed' or workflow_conclusion:
293+
print(f"Workflow completed with status: {workflow_status}, conclusion: {workflow_conclusion}")
294+
break
295+
elif attempt < max_retries - 1:
296+
print(f"Workflow still {workflow_status}, waiting {retry_delay}s before retry {attempt + 1}/{max_retries}")
297+
time.sleep(retry_delay)
298+
else:
299+
print(f"Workflow still {workflow_status} after {max_retries} attempts, uploading current state")
300+
break
281301

282302
# Upload workflow metrics
283303
try:
@@ -450,7 +470,12 @@ def _upload_single_job_metrics(self, job_data: Dict[str, Any]) -> None:
450470
# Schema fields
451471
db_data[FIELD_JOB_ID] = str(job_id)
452472
db_data[FIELD_WORKFLOW_ID] = str(self.run_id)
453-
db_data[FIELD_STATUS] = job_data.get('conclusion', job_data.get('status', 'unknown'))
473+
# Handle job status - prefer conclusion for completed jobs, fallback to status
474+
job_status = job_data.get('conclusion') or job_data.get('status', 'unknown')
475+
# Don't upload jobs with null/None status as they cause Grafana filtering issues
476+
if job_status is None:
477+
job_status = 'in_progress'
478+
db_data[FIELD_STATUS] = job_status
454479
db_data[FIELD_BRANCH] = self.ref_name
455480
db_data[FIELD_RUNNER_INFO] = job_data.get('runner_name', 'unknown')
456481

@@ -514,7 +539,6 @@ def _upload_job_step_metrics(self, job_data: Dict[str, Any]) -> int:
514539
def _upload_single_step_metrics(self, step_data: Dict[str, Any], job_data: Dict[str, Any], step_index: int) -> None:
515540
"""Extract and post metrics for a single step"""
516541
# Extract step metrics using standardized functions
517-
return
518542
db_data = {}
519543
job_id = job_data['id']
520544
job_name = job_data['name']

0 commit comments

Comments
 (0)