1616
1717from laika .ephemeris import EphemerisType
1818from .constants import SECS_IN_HR , SECS_IN_DAY , SECS_IN_WEEK
19- from .gps_time import GPSTime , tow_to_datetime
19+ from .gps_time import GPSTime
2020from .helpers import ConstellationId
2121
2222dir_path = os .path .dirname (os .path .realpath (__file__ ))
2727# mirror of sftp://gdc.cddis.eosdis.nasa.gov/gnss/data/hourly
2828CDDIS_HOURLY_BASE_URL = os .getenv ("CDDIS_HOURLY_BASE_URL" , "https://raw.githubusercontent.com/commaai/gnss-data-hourly/master" )
2929
30- # mirror of ftp://ftp.glonass-iac.ru
31- GLONAS_IAC_BASE_URL = os .getenv ("GLONAS_IAC_BASE_URL" , "https://raw.githubusercontent.com/commaai/gnss-data-alt/master" )
32-
33- # no mirror
34- IGN_BASE_URL = os .getenv ("IGN_BASE_URL" , "ftp://igs.ign.fr/pub" )
35-
3630
3731class DownloadFailed (Exception ):
3832 pass
@@ -322,121 +316,39 @@ def download_nav(time: GPSTime, cache_dir, constellation: ConstellationId):
322316 folder_and_filenames , cache_dir + 'hourly_nav/' , compression , overwrite = True )
323317
324318
325- def download_orbits_gps_cod0 (time , cache_dir , ephem_types ):
319+ def download_orbits_gps (time , cache_dir , ephem_types ):
326320 url_bases = (
327321 mirror_url (CDDIS_BASE_URL , '/gnss/products/' ),
322+ mirror_url (CDDIS_BASE_URL , '/glonass/products/' ),
328323 )
329324
330- if EphemerisType .ULTRA_RAPID_ORBIT not in ephem_types :
331- # TODO: raise error here
332- return None
333-
334- tm = tow_to_datetime (time .tow , time .week ).timetuple ()
335- doy = str (tm .tm_yday ).zfill (3 )
336- filename = f"COD0OPSULT_{ tm .tm_year } { doy } 0000_02D_05M_ORB.SP3"
337- # TODO: add hour management
338-
339325 folder_path = "%i/" % time .week
340- folder_file_names = [(folder_path , filename )]
341- return download_and_cache_file_return_first_success (url_bases , folder_file_names , cache_dir + 'cddis_products/' , compression = '.gz' )
342-
343- def download_orbits_gps (time , cache_dir , ephem_types ):
344- url_bases = (
345- mirror_url (CDDIS_BASE_URL , '/gnss/products/' ),
346- mirror_url (IGN_BASE_URL , '/igs/products/' ),
347- )
326+ filenames = []
327+ compression = '.gz'
348328
349329 if time .week < 2238 :
350- compression = '.Z'
351- ephem_strs = {
352- EphemerisType .FINAL_ORBIT : ['igs{wwww}{dow}.sp3' .format (wwww = time .week , dow = time .dow )],
353- EphemerisType .RAPID_ORBIT : ['igr{wwww}{dow}.sp3' .format (wwww = time .week , dow = time .dow )],
354- EphemerisType .ULTRA_RAPID_ORBIT : ['igu{wwww}{dow}_{hh}.sp3' .format (wwww = time .week , dow = time .dow , hh = hour ) for hour in ['18' , '12' , '06' , '00' ]]
355- }
330+ assert EphemerisType .FINAL_ORBIT in ephem_types , f"Only final orbits are available before 2238, { ephem_types } "
331+ filenames .extend (['COD0MGXFIN_{yyyy}{doy:03d}0000_01D_05M_ORB.SP3' .format (yyyy = time .year , doy = time .doy )])
356332 else :
357333 # TODO deal with version number
358- compression = '.gz'
359334 ephem_strs = {
360- EphemerisType .FINAL_ORBIT : ['IGS0OPSFIN_ {yyyy}{doy:03d}0000_01D_15M_ORB .SP3' .format (yyyy = time .year , doy = time .doy )],
361- EphemerisType .RAPID_ORBIT : ['IGS0OPSRAP_ {yyyy}{doy:03d}0000_01D_15M_ORB .SP3' .format (yyyy = time .year , doy = time .doy )],
362- EphemerisType .ULTRA_RAPID_ORBIT : ['IGS0OPSULT_ {yyyy}{doy:03d}{hh}00_02D_15M_ORB .SP3' .format (yyyy = time .year , doy = time .doy , hh = hour ) \
335+ EphemerisType .FINAL_ORBIT : ['COD0OPSFIN_ {yyyy}{doy:03d}0000_01D_05M_ORB .SP3' .format (yyyy = time .year , doy = time .doy )],
336+ EphemerisType .RAPID_ORBIT : ['COD0OPSRAP_ {yyyy}{doy:03d}0000_01D_05M_ORB .SP3' .format (yyyy = time .year , doy = time .doy )],
337+ EphemerisType .ULTRA_RAPID_ORBIT : ['COD0OPSULT_ {yyyy}{doy:03d}{hh}00_02D_05M_ORB .SP3' .format (yyyy = time .year , doy = time .doy , hh = hour ) \
363338 for hour in ['18' , '12' , '06' , '00' ]],
364339 }
365340
366- folder_path = "%i/" % time .week
367- filenames = []
368-
369- # Download filenames in order of quality. Final -> Rapid -> Ultra-Rapid(newest first)
370- if EphemerisType .FINAL_ORBIT in ephem_types and GPSTime .from_datetime (datetime .utcnow ()) - time > 3 * SECS_IN_WEEK :
371- filenames .extend (ephem_strs [EphemerisType .FINAL_ORBIT ])
372- if EphemerisType .RAPID_ORBIT in ephem_types :
373- filenames .extend (ephem_strs [EphemerisType .RAPID_ORBIT ])
374- if EphemerisType .ULTRA_RAPID_ORBIT in ephem_types :
375- filenames .extend (ephem_strs [EphemerisType .ULTRA_RAPID_ORBIT ])
341+ # Download filenames in order of quality. Final -> Rapid -> Ultra-Rapid(newest first)
342+ if EphemerisType .FINAL_ORBIT in ephem_types and GPSTime .from_datetime (datetime .utcnow ()) - time > 3 * SECS_IN_WEEK :
343+ filenames .extend (ephem_strs [EphemerisType .FINAL_ORBIT ])
344+ if EphemerisType .RAPID_ORBIT in ephem_types and GPSTime .from_datetime (datetime .utcnow ()) - time > 3 * SECS_IN_DAY :
345+ filenames .extend (ephem_strs [EphemerisType .RAPID_ORBIT ])
346+ if EphemerisType .ULTRA_RAPID_ORBIT in ephem_types :
347+ filenames .extend (ephem_strs [EphemerisType .ULTRA_RAPID_ORBIT ])
376348
377349 folder_file_names = [(folder_path , filename ) for filename in filenames ]
378350 ret = download_and_cache_file_return_first_success (url_bases , folder_file_names , cache_dir + 'cddis_products/' , compression = compression )
379- if ret is not None :
380- return ret
381-
382- # fallback to COD0 Ultra Rapid Orbits
383- return download_orbits_gps_cod0 (time , cache_dir , ephem_types )
384-
385-
386- def download_prediction_orbits_russia_src (gps_time , cache_dir ):
387- # Download single file that contains Ultra_Rapid predictions for GPS, GLONASS and other constellations
388- t = gps_time .as_datetime ()
389- # Files exist starting at 29-01-2022
390- if t < datetime (2022 , 1 , 29 ):
391- return None
392- url_bases = (
393- mirror_url (GLONAS_IAC_BASE_URL , '/MCC/PRODUCTS/' ),
394- )
395- folder_path = t .strftime ('%y%j/ultra/' )
396- file_prefix = "Stark_1D_" + t .strftime ('%y%m%d' )
397-
398- # Predictions are 24H so previous day can also be used.
399- prev_day = (t - timedelta (days = 1 ))
400- file_prefix_prev = "Stark_1D_" + prev_day .strftime ('%y%m%d' )
401- folder_path_prev = prev_day .strftime ('%y%j/ultra/' )
402-
403- current_day = GPSTime .from_datetime (datetime (t .year , t .month , t .day ))
404- # Ultra-Orbit is published in gnss-data-alt every 10th minute past the 5,11,17,23 hour.
405- # Predictions published are delayed by around 10 hours.
406- # Download latest file that includes gps_time with 20 minutes margin.:
407- if gps_time > current_day + 23.5 * SECS_IN_HR :
408- prev_day , current_day = [], [6 , 12 ]
409- elif gps_time > current_day + 17.5 * SECS_IN_HR :
410- prev_day , current_day = [], [0 , 6 ]
411- elif gps_time > current_day + 11.5 * SECS_IN_HR :
412- prev_day , current_day = [18 ], [0 ]
413- elif gps_time > current_day + 5.5 * SECS_IN_HR :
414- prev_day , current_day = [12 , 18 ], []
415- else :
416- prev_day , current_day = [6 , 12 ], []
417- # Example: Stark_1D_22060100.sp3
418- folder_and_file_names = [(folder_path , file_prefix + f"{ h :02} .sp3" ) for h in reversed (current_day )] + \
419- [(folder_path_prev , file_prefix_prev + f"{ h :02} .sp3" ) for h in reversed (prev_day )]
420- return download_and_cache_file_return_first_success (url_bases , folder_and_file_names , cache_dir + 'russian_products/' , raise_error = True )
421-
422-
423- def download_orbits_russia_src (time , cache_dir , ephem_types ):
424- # Orbits from russian source. Contains GPS, GLONASS, GALILEO, BEIDOU
425- url_bases = (
426- mirror_url (GLONAS_IAC_BASE_URL , '/MCC/PRODUCTS/' ),
427- )
428- t = time .as_datetime ()
429- folder_paths = []
430- current_gps_time = GPSTime .from_datetime (datetime .utcnow ())
431- filename = "Sta%i%i.sp3" % (time .week , time .dow )
432- if EphemerisType .FINAL_ORBIT in ephem_types and current_gps_time - time > 2 * SECS_IN_WEEK :
433- folder_paths .append (t .strftime ('%y%j/final/' ))
434- if EphemerisType .RAPID_ORBIT in ephem_types :
435- folder_paths .append (t .strftime ('%y%j/rapid/' ))
436- if EphemerisType .ULTRA_RAPID_ORBIT in ephem_types :
437- folder_paths .append (t .strftime ('%y%j/ultra/' ))
438- folder_file_names = [(folder_path , filename ) for folder_path in folder_paths ]
439- return download_and_cache_file_return_first_success (url_bases , folder_file_names , cache_dir + 'russian_products/' )
351+ return ret
440352
441353
442354def download_ionex (time , cache_dir ):
@@ -447,17 +359,16 @@ def download_ionex(time, cache_dir):
447359 folder_path = t .strftime ('%Y/%j/' )
448360 # Format date change
449361 if time >= GPSTime (2238 , 0.0 ):
450- filenames = [t .strftime ('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX' ),
451- t .strftime ('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX' )]
452- compression = '.gz'
362+ filenames = [t .strftime ('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX.gz ' ),
363+ t .strftime ('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX.gz' ),
364+ t . strftime ( "c2pg%j0.%yi.Z" )]
453365 else :
454- filenames = [t .strftime ("codg%j0.%yi" ),
455- t .strftime ("c1pg%j0.%yi" ),
456- t .strftime ("c2pg%j0.%yi" )]
457- compression = '.Z'
366+ filenames = [t .strftime ("codg%j0.%yi.Z" ),
367+ t .strftime ("c1pg%j0.%yi.Z" ),
368+ t .strftime ("c2pg%j0.%yi.Z" )]
458369
459370 folder_file_names = [(folder_path , f ) for f in filenames ]
460- return download_and_cache_file_return_first_success (url_bases , folder_file_names , cache_dir + 'ionex/' , compression = compression , raise_error = True )
371+ return download_and_cache_file_return_first_success (url_bases , folder_file_names , cache_dir + 'ionex/' , raise_error = True )
461372
462373
463374def download_dcb (time , cache_dir ):
@@ -467,7 +378,6 @@ def download_dcb(time, cache_dir):
467378 folder_paths = []
468379 url_bases = (
469380 mirror_url (CDDIS_BASE_URL , '/gnss/products/bias/' ),
470- mirror_url (IGN_BASE_URL , '/igs/products/mgex/dcb/' ),
471381 )
472382 # seem to be a lot of data missing, so try many days
473383 for time_step in [time - i * SECS_IN_DAY for i in range (14 )]:
0 commit comments