1919
2020# Common fields across all metric types
2121FIELD_ID = "_id"
22- FIELD_USER_ALIAS = "s_user_alias"
22+ FIELD_USER_ALIAS = "s_user_alias" #extra you can maybe delete or test
2323FIELD_REPO = "s_repo"
2424FIELD_WORKFLOW_NAME = "s_workflow_name"
2525FIELD_GITHUB_EVENT = "s_github_event"
26- FIELD_BRANCH = "s_branch"
27- FIELD_STATUS = "s_status"
26+ FIELD_BRANCH = "s_branch" #extra you can maybe delete or test
27+ FIELD_STATUS = "s_status" #duplicate you can maybe consolidate to the common metric adding
28+ FIELD_WORKFLOW_ID = "s_workflow_id"
2829
2930# Timing fields
3031FIELD_CREATION_TIME = "ts_creation_time"
3435FIELD_DURATION_SEC = "l_duration_sec"
3536
3637# Workflow-specific fields
37- FIELD_WORKFLOW_ID = "s_workflow_id"
3838FIELD_COMMIT_SHA = "s_commit_sha"
39- FIELD_JOBS = "s_jobs" # Comma-separated job IDs
39+ # FIELD_JOBS = "s_jobs" # Comma-separated job IDs
4040
4141# Job-specific fields
4242FIELD_JOB_ID = "s_job_id"
4343FIELD_JOB_NAME = "s_job_name"
44- FIELD_RUNNER_INFO = "s_runner_info"
44+ # FIELD_RUNNER_INFO = "s_runner_info"
4545FIELD_RUNNER_ID = "s_runner_id"
4646FIELD_RUNNER_NAME = "s_runner_name"
47- FIELD_WORKFLOW_SOURCE = "s_workflow_source"
48- FIELD_LABELS = "s_labels" # Comma-separated labels
49- FIELD_STEPS = "s_steps" # Comma-separated step IDs
47+ #FIELD_LABELS = "s_labels" # Comma-separated labels
48+ #FIELD_STEPS = "s_steps" # Comma-separated step IDs
5049
5150# Step-specific fields
5251FIELD_STEP_ID = "s_step_id"
5352FIELD_NAME = "s_step_name"
5453FIELD_STEP_NUMBER = "l_step_number"
5554FIELD_COMMAND = "s_command"
56- FIELD_JOB_LABELS = "s_job_labels" # Comma-separated labels
55+ # FIELD_JOB_LABELS = "s_job_labels" # Comma-separated labels
5756
5857class TimingProcessor :
5958 """Centralized processor for all datetime and duration conversions using Python built-ins"""
@@ -209,6 +208,7 @@ def add_common_context_fields(self, db_data: Dict[str, Any]) -> None:
209208 db_data [FIELD_WORKFLOW_NAME ] = self .workflow_name
210209 db_data [FIELD_GITHUB_EVENT ] = self .event_name
211210 db_data [FIELD_BRANCH ] = self .ref_name
211+ db_data [FIELD_WORKFLOW_ID ] = str (self .run_id )
212212
213213 def add_standardized_timing_fields (self , db_data : Dict [str , Any ], creation_time : str , start_time : str , end_time : str ,
214214 metric_type : str = "workflow" ) -> None :
@@ -232,8 +232,7 @@ def add_standardized_timing_fields(self, db_data: Dict[str, Any], creation_time:
232232
233233 # Queue time in integer seconds (using l_ prefix for long type)
234234 if metric_type != "step" :
235- queue_seconds = TimingProcessor .calculate_time_diff (creation_time , start_time )
236- db_data [FIELD_QUEUE_TIME ] = queue_seconds
235+ db_data [FIELD_QUEUE_TIME ] = TimingProcessor .calculate_time_diff (creation_time , start_time )
237236
238237 # Add @timestamp field for Grafana/OpenSearch indexing (CRITICAL FIX!)
239238 # Use the end_time if available, otherwise use current time
@@ -300,14 +299,14 @@ def post_all_metrics(self) -> None:
300299 def _upload_workflow_metrics (self , workflow_data : Dict [str , Any ], jobs_data : Dict [str , Any ]) -> None :
301300 """Internal method to upload workflow metrics"""
302301 db_data = {}
303- db_data [FIELD_ID ] = f"real -workflow-{ self .run_id } "
302+ db_data [FIELD_ID ] = f"github -workflow-{ self .run_id } "
304303
305304
306305 # Schema fields
307- db_data [FIELD_WORKFLOW_ID ] = str (self .run_id )
308306 # Use conclusion for completed workflows, fallback to status
309307 db_data [FIELD_STATUS ] = str (workflow_data .get ('conclusion' ) or workflow_data .get ('status' , 'unknown' ))
310- db_data [FIELD_BRANCH ] = str (workflow_data .get ('head_branch' , self .ref_name ))
308+ #db_data[FIELD_BRANCH] = str(workflow_data.get('head_branch', self.ref_name))
309+ print (f"Checking branch: { str (workflow_data .get ('head_branch' ))} " )
311310 db_data [FIELD_COMMIT_SHA ] = str (workflow_data .get ('head_sha' , self .sha ))
312311 # Timing fields - Fix parameter order for correct duration/queue time calculation
313312 created_at = workflow_data .get ('created_at' )
@@ -320,16 +319,22 @@ def _upload_workflow_metrics(self, workflow_data: Dict[str, Any], jobs_data: Dic
320319 self .add_common_context_fields (db_data )
321320
322321 # Override userAlias with actor from API if available
322+ """
323323 actor = workflow_data.get('actor', {})
324324 if actor and actor.get('login'):
325325 db_data[FIELD_USER_ALIAS] = actor.get('login')
326+ """
327+ actor = workflow_data .get ('actor' , {})
328+ print (f"Checking actor: { actor .get ('login' )} " )
326329
327330 # Add jobs list as comma-separated string (using s_ prefix)
331+ """
328332 if jobs_data and 'jobs' in jobs_data:
329333 job_ids = [str(job['id']) for job in jobs_data['jobs']]
330334 db_data[FIELD_JOBS] = ','.join(job_ids)
331335 else:
332336 db_data[FIELD_JOBS] = ''
337+ """
333338
334339 self .post_to_db (self .workflow_index , db_data )
335340
@@ -393,21 +398,18 @@ def _upload_single_job_metrics(self, job_data: Dict[str, Any]) -> None:
393398 job_id = job_data ['id' ]
394399 job_name = job_data ['name' ]
395400
396- db_data [FIELD_ID ] = f"real -job-{ job_id } "
401+ db_data [FIELD_ID ] = f"github -job-{ job_id } "
397402
398403 # Schema fields
399404 db_data [FIELD_JOB_ID ] = str (job_id )
400- db_data [FIELD_WORKFLOW_ID ] = str (self .run_id )
401405 # Handle job status - prefer conclusion for completed jobs, fallback to status
402- job_status = job_data .get ('conclusion' ) or job_data .get ('status' , 'unknown' )
406+ job_status = str ( job_data .get ('conclusion' ) or job_data .get ('status' , 'unknown' ) )
403407 # Don't upload jobs with null/None status as they cause Grafana filtering issues
404408 if job_status is None :
405409 job_status = 'in_progress'
406410 db_data [FIELD_STATUS ] = str (job_status )
407- db_data [FIELD_BRANCH ] = str (self .ref_name )
408- db_data [FIELD_RUNNER_INFO ] = str (job_data .get ('runner_name' , 'unknown' ))
411+ #db_data[FIELD_RUNNER_INFO] = str(job_data.get('runner_name', 'unknown'))
409412
410- db_data [FIELD_WORKFLOW_SOURCE ] = str (self .event_name )
411413 db_data [FIELD_JOB_NAME ] = str (job_name )
412414
413415 # Timing fields using standardized method - Fix parameter order
@@ -418,19 +420,23 @@ def _upload_single_job_metrics(self, job_data: Dict[str, Any]) -> None:
418420 self .add_standardized_timing_fields (db_data , created_at , started_at , completed_at , "job" )
419421
420422 # Labels - Convert array to comma-separated string to avoid indexing issues
423+ """
421424 runner_labels = job_data.get('labels', [])
422425 if runner_labels:
423426 db_data[FIELD_LABELS] = ','.join(runner_labels)
424427 else:
425428 db_data[FIELD_LABELS] = 'unknown'
429+ """
426430
427431 # Add steps list (get step IDs) - Convert to string to avoid array issues
432+ """
428433 steps = job_data.get('steps', [])
429434 if steps:
430435 step_ids = [f"{job_id}_{step.get('number', i+1)}" for i, step in enumerate(steps)]
431436 db_data[FIELD_STEPS] = ','.join(step_ids) # Convert array to comma-separated string
432437 else:
433438 db_data[FIELD_STEPS] = ''
439+ """
434440
435441 # Runner info
436442 runner_id = job_data .get ('runner_id' )
@@ -477,12 +483,11 @@ def _upload_single_step_metrics(self, step_data: Dict[str, Any], job_data: Dict[
477483
478484 # Create unique step ID and use standardized ID generation
479485 step_id = f"{ job_id } _{ step_number } "
480- db_data [FIELD_ID ] = f"real -step-{ step_id } "
486+ db_data [FIELD_ID ] = f"github -step-{ step_id } "
481487
482488 # Schema-compliant fields
483489 db_data [FIELD_STEP_ID ] = str (step_id )
484490 db_data [FIELD_JOB_ID ] = str (job_id )
485- db_data [FIELD_WORKFLOW_ID ] = str (self .run_id )
486491 db_data [FIELD_NAME ] = str (step_name )
487492 db_data [FIELD_STEP_NUMBER ] = int (step_number ) # Using l_ prefix, should be integer
488493 db_data [FIELD_STATUS ] = str (step_data .get ('conclusion' , step_data .get ('status' , 'unknown' )))
@@ -507,15 +512,17 @@ def _upload_single_step_metrics(self, step_data: Dict[str, Any], job_data: Dict[
507512 self .add_common_context_fields (db_data )
508513
509514 # Job context - Ensure all fields are strings
510- db_data [FIELD_RUNNER_NAME ] = str (job_data .get ('runner_name' , '' ))
511- db_data [FIELD_RUNNER_ID ] = str (job_data .get ('runner_id' )) if job_data .get ('runner_id' ) is not None else ''
515+ # db_data[FIELD_RUNNER_NAME] = str(job_data.get('runner_name', ''))
516+ # db_data[FIELD_RUNNER_ID] = str(job_data.get('runner_id')) if job_data.get('runner_id') is not None else ''
512517
513518 # Job labels (separate from step labels) - Convert array to string
519+ """
514520 runner_labels = job_data.get('labels', [])
515521 if runner_labels:
516522 db_data[FIELD_JOB_LABELS] = ','.join(runner_labels)
517523 else:
518524 db_data[FIELD_JOB_LABELS] = 'unknown'
525+ """
519526
520527 self .post_to_db (self .steps_index , db_data )
521528 print (f"Uploaded metrics for step: { step_name } (step { step_number } )" )
0 commit comments