11import ipaddress
22import logging
33import os
4- import sys
54from collections import defaultdict
65from concurrent .futures import as_completed
76from datetime import datetime
@@ -84,6 +83,10 @@ def __init__(self,
8483
8584 # URLs to the API
8685 url = url .rstrip ('/' )
86+ if url .endswith ('/api/v1' ):
87+ self .reference ['reference_url_info' ] = url [:- len ('/api/v1' )]
88+ else :
89+ logging .warning (f'Data URL does not end with "/api/v1", will not set info URL: { url } ' )
8790 self .urls = {
8891 'routeservers' : f'{ url } /routeservers' ,
8992 'neighbors' : url + '/routeservers/{rs}/neighbors' ,
@@ -97,6 +100,8 @@ def __init__(self,
97100 # List of neighbor dicts. Each dict contains information about the route server,
98101 # so we do not keep track of that separately.
99102 self .neighbors = list ()
103+ # Dict mapping routeserver_id to the cache time of that server.
104+ self .routeserver_cached_at = dict ()
100105 # Dict mapping (routeserver_id, neighbor_id) tuple to a list of route dicts.
101106 self .routes = dict ()
102107 # If routes should be fetched or not.
@@ -123,8 +128,6 @@ def decode_json(resp: Response, *args, **kwargs) -> None:
123128 try :
124129 resp .data = resp .json ()
125130 except JSONDecodeError as e :
126- print (f'Failed to retrieve data for { resp .url } ' , file = sys .stderr )
127- print (f'Error while reading json data: { e } ' , file = sys .stderr )
128131 logging .error (f'Error while reading json data: { e } ' )
129132 logging .error (resp .status_code )
130133 logging .error (resp .headers )
@@ -160,8 +163,6 @@ def fetch_urls(self, urls: list, additional_data=list()) -> Iterable:
160163 except Exception as e :
161164 logging .error (f'Failed to retrieve data for { future } ' )
162165 logging .error (e )
163- print (f'Failed to retrieve data for { future } ' , file = sys .stderr )
164- print (e , file = sys .stderr )
165166 return False , dict (), None
166167
167168 def fetch_url (self , url : str ) -> Tuple [bool , dict ]:
@@ -177,7 +178,6 @@ def __fetch_routeservers(self) -> None:
177178 logging .info ('Using cached route server information.' )
178179 self .routeservers = self .cache_handler .load_cached_object (routeserver_object_name )
179180 else :
180- print (f'Fetching route servers from { self .urls ["routeservers" ]} ' )
181181 logging .info (f'Fetching route servers from { self .urls ["routeservers" ]} ' )
182182 is_ok , routeservers_root = self .fetch_url (self .urls ['routeservers' ])
183183 if not is_ok :
@@ -190,28 +190,61 @@ def __fetch_neighbors(self) -> None:
190190 neighbor_object_name = 'neighbors'
191191 if self .cache_handler .cached_object_exists (neighbor_object_name ):
192192 logging .info ('Using cached neighbor information.' )
193- self .neighbors = self .cache_handler .load_cached_object (neighbor_object_name )
193+ neighbor_object = self .cache_handler .load_cached_object (neighbor_object_name )
194+ self .routeserver_cached_at = neighbor_object ['routeserver_cached_at' ]
195+ self .neighbors = neighbor_object ['neighbors' ]
194196 else :
195- print (f'Fetching neighbor information from { len (self .routeservers )} route servers.' )
196197 logging .info (f'Fetching neighbor information from { len (self .routeservers )} route servers.' )
197198 neighbor_urls = [self .urls ['neighbors' ].format (rs = rs ['id' ]) for rs in self .routeservers ]
198199 failed_routeservers = list ()
199- for is_ok , neighbor_list_root , routeserver_id in self .fetch_urls (neighbor_urls ,
200- additional_data = self .routeservers ):
200+ for is_ok , neighbor_list_root , routeserver in self .fetch_urls (neighbor_urls ,
201+ additional_data = self .routeservers ):
202+ routeserver_id = routeserver ['id' ]
201203 if not is_ok :
202204 failed_routeservers .append (routeserver_id )
203205 continue
206+ try :
207+ cached_at_str = neighbor_list_root ['api' ]['cache_status' ]['cached_at' ]
208+ except KeyError :
209+ cached_at_str = str ()
210+ if cached_at_str :
211+ cached_at = None
212+ # Alice-LG uses nanosecond-granularity timestamps, which are not
213+ # valid ISO format...
214+ try :
215+ pre , suf = cached_at_str .rsplit ('.' , maxsplit = 1 )
216+ if suf .endswith ('Z' ):
217+ # UTC
218+ frac_seconds = suf [:- 1 ]
219+ tz_suffix = '+00:00'
220+ elif '+' in suf :
221+ # Hopefully a timezone identifier of form +HH:MM
222+ frac_seconds , tz_suffix = suf .split ('+' )
223+ tz_suffix = '+' + tz_suffix
224+ else :
225+ raise ValueError (f'Failed to get timezone from timestamp :{ cached_at_str } ' )
226+ if not frac_seconds .isdigit ():
227+ raise ValueError (f'Fractional seconds are not digits: { cached_at_str } ' )
228+ # Reduce to six digits (ms).
229+ frac_seconds = frac_seconds [:6 ]
230+ cached_at_str = f'{ pre } .{ frac_seconds } { tz_suffix } '
231+ cached_at = datetime .fromisoformat (cached_at_str )
232+ except ValueError as e :
233+ logging .warning (f'Failed to get cached_at timestamp for routeserver "{ routeserver_id } ": { e } ' )
234+ if cached_at :
235+ self .routeserver_cached_at [routeserver_id ] = cached_at
204236 # Spelling of neighbors/neighbours field is not consistent...
205237 if 'neighbors' in neighbor_list_root :
206238 neighbor_list = neighbor_list_root ['neighbors' ]
207239 elif 'neighbours' in neighbor_list_root :
208240 neighbor_list = neighbor_list_root ['neighbours' ]
209241 else :
210242 logging .error (f'Missing "neighbors"/"neighbours" field in reply: { neighbor_list_root } ' )
211- print (f'Missing "neighbors"/"neighbours" field in reply: { neighbor_list_root } ' , file = sys .stderr )
212243 continue
213244 self .neighbors += neighbor_list
214- self .cache_handler .save_cached_object (neighbor_object_name , self .neighbors )
245+ neighbor_object = {'routeserver_cached_at' : self .routeserver_cached_at ,
246+ 'neighbors' : self .neighbors }
247+ self .cache_handler .save_cached_object (neighbor_object_name , neighbor_object )
215248 if failed_routeservers :
216249 logging .warning (f'Failed to get neighbor information for { len (failed_routeservers )} routeservers: '
217250 f'{ failed_routeservers } ' )
@@ -343,7 +376,15 @@ def run(self) -> None:
343376 if ('details:route_changes' in flattened_neighbor
344377 and isinstance (flattened_neighbor ['details:route_changes' ], flatdict .FlatDict )):
345378 flattened_neighbor .pop ('details:route_changes' )
346- self .reference ['reference_url' ] = self .urls ['neighbors' ].format (rs = neighbor ['routeserver_id' ])
379+ routeserver_id = neighbor ['routeserver_id' ]
380+ self .reference ['reference_url_data' ] = self .urls ['neighbors' ].format (rs = routeserver_id )
381+ if routeserver_id in self .routeserver_cached_at :
382+ self .reference ['reference_time_modification' ] = self .routeserver_cached_at [routeserver_id ]
383+ else :
384+ logging .info (f'No modification time for routeserver: { routeserver_id } ' )
385+ # Set to None to not reuse value of previous loop iteration.
386+ self .reference ['reference_time_modification' ] = None
387+
347388 member_of_rels .append ({'src_id' : member_asn , # Translate to QID later.
348389 'dst_id' : n .data ['ixp_qid' ],
349390 'props' : [flattened_neighbor , self .reference .copy ()]})
@@ -354,7 +395,8 @@ def run(self) -> None:
354395 if self .fetch_routes :
355396 logging .info ('Iterating routes.' )
356397 for (routeserver_id , neighbor_id ), routes in self .routes .items ():
357- self .reference ['reference_url' ] = self .urls ['routes' ].format (rs = routeserver_id , neighbor = neighbor_id )
398+ self .reference ['reference_url_data' ] = self .urls ['routes' ].format (rs = routeserver_id ,
399+ neighbor = neighbor_id )
358400 for route in routes :
359401 prefix = ipaddress .ip_network (route ['network' ]).compressed
360402 origin_asn = route ['bgp' ]['as_path' ][- 1 ]
0 commit comments