2222
2323
2424@log_excess_runtime (logger , debug_cutoff = 0.01 , msg = 'Inventory {inventory_id} host facts prepared for {written_ct} hosts, took {delta:.3f} s' , add_log_data = True )
25- def start_fact_cache (hosts , destination , log_data , timeout = None , inventory_id = None ):
25+ def start_fact_cache (hosts , artifacts_dir , timeout = None , inventory_id = None , log_data = None ):
26+ log_data = log_data or {}
2627 log_data ['inventory_id' ] = inventory_id
2728 log_data ['written_ct' ] = 0
28- hosts_cached = list ()
29- try :
30- os . makedirs ( destination , mode = 0o700 )
31- except FileExistsError :
32- pass
29+ hosts_cached = []
30+
31+ # Create the fact_cache directory inside artifacts_dir
32+ fact_cache_dir = os . path . join ( artifacts_dir , 'fact_cache' )
33+ os . makedirs ( fact_cache_dir , mode = 0o700 , exist_ok = True )
3334
3435 if timeout is None :
3536 timeout = settings .ANSIBLE_FACT_CACHE_TIMEOUT
3637
37- last_filepath_written = None
38+ last_write_time = None
39+
3840 for host in hosts :
39- hosts_cached .append (host )
41+ hosts_cached .append (host . name )
4042 if not host .ansible_facts_modified or (timeout and host .ansible_facts_modified < now () - datetime .timedelta (seconds = timeout )):
4143 continue # facts are expired - do not write them
4244
43- filepath = os .sep .join (map ( str , [ destination , host .name ]) )
44- if not os .path .realpath (filepath ).startswith (destination ):
45- system_tracking_logger .error ('facts for host {} could not be cached' . format ( smart_str ( host . name )) )
45+ filepath = os .path .join (fact_cache_dir , host .name )
46+ if not os .path .realpath (filepath ).startswith (fact_cache_dir ):
47+ logger .error (f 'facts for host { smart_str ( host . name ) } could not be cached' )
4648 continue
4749
4850 try :
4951 with codecs .open (filepath , 'w' , encoding = 'utf-8' ) as f :
5052 os .chmod (f .name , 0o600 )
5153 json .dump (host .ansible_facts , f )
5254 log_data ['written_ct' ] += 1
53- last_filepath_written = filepath
55+ last_write_time = os . path . getmtime ( filepath )
5456 except IOError :
55- system_tracking_logger .error ('facts for host {} could not be cached' . format ( smart_str ( host . name )) )
57+ logger .error (f 'facts for host { smart_str ( host . name ) } could not be cached' )
5658 continue
5759
58- if last_filepath_written :
59- return os .path .getmtime (last_filepath_written ), hosts_cached
60-
61- return None , hosts_cached
60+ # Write summary file directly to the artifacts_dir
61+ if inventory_id is not None :
62+ summary_file = os .path .join (artifacts_dir , 'host_cache_summary.json' )
63+ summary_data = {
64+ 'last_write_time' : last_write_time ,
65+ 'hosts_cached' : hosts_cached ,
66+ 'written_ct' : log_data ['written_ct' ],
67+ }
68+ with open (summary_file , 'w' , encoding = 'utf-8' ) as f :
69+ json .dump (summary_data , f , indent = 2 )
6270
6371
6472@log_excess_runtime (
@@ -67,34 +75,54 @@ def start_fact_cache(hosts, destination, log_data, timeout=None, inventory_id=No
6775 msg = 'Inventory {inventory_id} host facts: updated {updated_ct}, cleared {cleared_ct}, unchanged {unmodified_ct}, took {delta:.3f} s' ,
6876 add_log_data = True ,
6977)
70- def finish_fact_cache (hosts_cached , destination , facts_write_time , log_data , job_id = None , inventory_id = None ):
78+ def finish_fact_cache (artifacts_dir , job_id = None , inventory_id = None , log_data = None ):
79+ log_data = log_data or {}
7180 log_data ['inventory_id' ] = inventory_id
7281 log_data ['updated_ct' ] = 0
7382 log_data ['unmodified_ct' ] = 0
7483 log_data ['cleared_ct' ] = 0
84+ # The summary file is directly inside the artifacts dir
85+ summary_path = os .path .join (artifacts_dir , 'host_cache_summary.json' )
86+ if not os .path .exists (summary_path ):
87+ logger .error (f'Missing summary file at { summary_path } ' )
88+ return
7589
76- hosts_cached = sorted ((h for h in hosts_cached if h .id is not None ), key = lambda h : h .id )
77-
90+ try :
91+ with open (summary_path , 'r' , encoding = 'utf-8' ) as f :
92+ summary = json .load (f )
93+ facts_write_time = os .path .getmtime (summary_path ) # After successful read
94+ except (json .JSONDecodeError , OSError ) as e :
95+ logger .error (f'Error reading summary file at { summary_path } : { e } ' )
96+ return
97+
98+ host_names = summary .get ('hosts_cached' , [])
99+ hosts_cached = Host .objects .filter (name__in = host_names ).order_by ('id' ).iterator ()
100+ # Path where individual fact files were written
101+ fact_cache_dir = os .path .join (artifacts_dir , 'fact_cache' )
78102 hosts_to_update = []
103+
79104 for host in hosts_cached :
80- filepath = os .sep .join (map ( str , [ destination , host .name ]) )
81- if not os .path .realpath (filepath ).startswith (destination ):
82- system_tracking_logger .error ('facts for host {} could not be cached' . format ( smart_str ( host . name )) )
105+ filepath = os .path .join (fact_cache_dir , host .name )
106+ if not os .path .realpath (filepath ).startswith (fact_cache_dir ):
107+ logger .error (f'Invalid path for facts file: { filepath } ' )
83108 continue
109+
84110 if os .path .exists (filepath ):
85111 # If the file changed since we wrote the last facts file, pre-playbook run...
86112 modified = os .path .getmtime (filepath )
87- if ( not facts_write_time ) or modified > facts_write_time :
88- with codecs . open ( filepath , 'r' , encoding = 'utf-8' ) as f :
89- try :
113+ if not facts_write_time or modified >= facts_write_time :
114+ try :
115+ with codecs . open ( filepath , 'r' , encoding = 'utf-8' ) as f :
90116 ansible_facts = json .load (f )
91- except ValueError :
92- continue
117+ except ValueError :
118+ continue
119+
120+ if ansible_facts != host .ansible_facts :
93121 host .ansible_facts = ansible_facts
94122 host .ansible_facts_modified = now ()
95123 hosts_to_update .append (host )
96- system_tracking_logger .info (
97- 'New fact for inventory {} host {}' . format ( smart_str (host .inventory .name ), smart_str (host .name )) ,
124+ logger .info (
125+ f 'New fact for inventory { smart_str (host .inventory .name )} host { smart_str (host .name )} ' ,
98126 extra = dict (
99127 inventory_id = host .inventory .id ,
100128 host_name = host .name ,
@@ -104,6 +132,8 @@ def finish_fact_cache(hosts_cached, destination, facts_write_time, log_data, job
104132 ),
105133 )
106134 log_data ['updated_ct' ] += 1
135+ else :
136+ log_data ['unmodified_ct' ] += 1
107137 else :
108138 log_data ['unmodified_ct' ] += 1
109139 else :
@@ -112,9 +142,11 @@ def finish_fact_cache(hosts_cached, destination, facts_write_time, log_data, job
112142 host .ansible_facts = {}
113143 host .ansible_facts_modified = now ()
114144 hosts_to_update .append (host )
115- system_tracking_logger .info ('Facts cleared for inventory {} host {}' . format ( smart_str (host .inventory .name ), smart_str (host .name )) )
145+ logger .info (f 'Facts cleared for inventory { smart_str (host .inventory .name )} host { smart_str (host .name )} ' )
116146 log_data ['cleared_ct' ] += 1
117- if len (hosts_to_update ) > 100 :
147+
148+ if len (hosts_to_update ) >= 100 :
118149 bulk_update_sorted_by_id (Host , hosts_to_update , fields = ['ansible_facts' , 'ansible_facts_modified' ])
119150 hosts_to_update = []
151+
120152 bulk_update_sorted_by_id (Host , hosts_to_update , fields = ['ansible_facts' , 'ansible_facts_modified' ])
0 commit comments