Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove russian crap, and simplify downloading #168

Merged
merged 12 commits into from
Feb 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ repos:
- --warn-return-any
- --warn-unreachable
- --warn-unused-ignores
- --explicit-package-bases
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.2
hooks:
Expand Down
64 changes: 20 additions & 44 deletions laika/astro_dog.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import os
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from typing import DefaultDict
from typing import DefaultDict, Sequence, Mapping
from collections.abc import Iterable

from .constants import SECS_IN_DAY, SECS_IN_HR
from .constants import SECS_IN_DAY
from .helpers import ConstellationId, get_constellation, get_closest, get_el_az, TimeRangeHolder
from .ephemeris import Ephemeris, EphemerisType, GLONASSEphemeris, GPSEphemeris, PolyEphemeris, parse_sp3_orbits, parse_rinex_nav_msg_gps, \
parse_rinex_nav_msg_glonass
from .downloader import download_orbits_gps, download_orbits_russia_src, download_nav, download_ionex, download_dcb, download_prediction_orbits_russia_src
from .downloader import download_orbits_gps, download_nav, download_ionex, download_dcb
from .downloader import download_cors_station
from .trop import saast
from .iono import IonexMap, parse_ionex, get_slant_delay
Expand Down Expand Up @@ -63,11 +63,11 @@ def __init__(self, auto_update=True,
self.dcbs_fetched_times = TimeRangeHolder()

self.dgps_delays = []
self.ionex_maps: list[IonexMap] = []
self.orbits: DefaultDict[str, list[PolyEphemeris]] = defaultdict(list)
self.qcom_polys: DefaultDict[str, list[PolyEphemeris]] = defaultdict(list)
self.navs: DefaultDict[str, list[GPSEphemeris | GLONASSEphemeris]] = defaultdict(list)
self.dcbs: DefaultDict[str, list[DCB]] = defaultdict(list)
self.ionex_maps: Sequence[IonexMap] = []
self.orbits: DefaultDict[str, Sequence[PolyEphemeris]] = defaultdict(list)
self.qcom_polys: DefaultDict[str, Sequence[PolyEphemeris]] = defaultdict(list)
self.navs: DefaultDict[str, Sequence[GPSEphemeris | GLONASSEphemeris]] = defaultdict(list)
self.dcbs: DefaultDict[str, Sequence[DCB]] = defaultdict(list)

self.cached_ionex: IonexMap | None = None
self.cached_dgps = None
Expand Down Expand Up @@ -160,16 +160,16 @@ def get_dgps_corrections(self, time, recv_pos):
self.cached_dgps = latest_data
return latest_data

def add_qcom_polys(self, new_ephems: dict[str, list[Ephemeris]]):
def add_qcom_polys(self, new_ephems: Mapping[str, Sequence[Ephemeris]]):
self._add_ephems(new_ephems, self.qcom_polys)

def add_orbits(self, new_ephems: dict[str, list[Ephemeris]]):
def add_orbits(self, new_ephems: Mapping[str, Sequence[Ephemeris]]):
self._add_ephems(new_ephems, self.orbits)

def add_navs(self, new_ephems: dict[str, list[Ephemeris]]):
def add_navs(self, new_ephems: Mapping[str, Sequence[Ephemeris]]):
self._add_ephems(new_ephems, self.navs)

def _add_ephems(self, new_ephems: dict[str, list[Ephemeris]], ephems_dict):
def _add_ephems(self, new_ephems: Mapping[str, Sequence[Ephemeris]], ephems_dict):
for k, v in new_ephems.items():
if len(v) > 0:
if self.clear_old_ephemeris:
Expand Down Expand Up @@ -208,41 +208,17 @@ def download_and_parse(constellation, parse_rinex_nav_func):
end_day = GPSTime(time.week, SECS_IN_DAY * (1 + (time.tow // SECS_IN_DAY)))
self.navs_fetched_times.add(begin_day, end_day)

def download_parse_orbit(self, gps_time: GPSTime, skip_before_epoch=None) -> dict[str, list[PolyEphemeris]]:
def download_parse_orbit(self, gps_time: GPSTime, skip_before_epoch=None) -> Mapping[str, Sequence[PolyEphemeris]]:
# Download multiple days to be able to polyfit at the start-end of the day
time_steps = [gps_time - SECS_IN_DAY, gps_time, gps_time + SECS_IN_DAY]
with ThreadPoolExecutor() as executor:
futures_other = [executor.submit(download_orbits_russia_src, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]
futures_gps = None
if ConstellationId.GPS in self.valid_const:
futures_gps = [executor.submit(download_orbits_gps, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]

files_other = [self.fetch_count(f.result()) for f in futures_other if f.result()]
ephems_other = parse_sp3_orbits(files_other, self.valid_const, skip_before_epoch)
files_gps = [self.fetch_count(f.result()) for f in futures_gps if f.result()] if futures_gps else []
ephems_us = parse_sp3_orbits(files_gps, self.valid_const, skip_before_epoch)

return {k: ephems_other.get(k, []) + ephems_us.get(k, []) for k in set(list(ephems_other.keys()) + list(ephems_us.keys()))}

def download_parse_prediction_orbit(self, gps_time: GPSTime):
assert EphemerisType.ULTRA_RAPID_ORBIT in self.valid_ephem_types
skip_until_epoch = gps_time - 2 * SECS_IN_HR

result = self.fetch_count(download_prediction_orbits_russia_src(gps_time, self.cache_dir))
if result is not None:
result = [result]
elif ConstellationId.GPS in self.valid_const:
# Slower fallback. Russia src prediction orbits are published from 2022
result = [self.fetch_count(download_orbits_gps(t, self.cache_dir, self.valid_ephem_types)) for t in [gps_time - SECS_IN_DAY, gps_time]]
if result is None:
return {}
return parse_sp3_orbits(result, self.valid_const, skip_until_epoch=skip_until_epoch)

def get_orbit_data(self, time: GPSTime, only_predictions=False):
if only_predictions:
ephems_sp3 = self.download_parse_prediction_orbit(time)
else:
ephems_sp3 = self.download_parse_orbit(time)
futures = [executor.submit(download_orbits_gps, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]
files = [self.fetch_count(f.result()) for f in futures if f.result()] if futures else []
ephems = parse_sp3_orbits(files, self.valid_const, skip_before_epoch)
return ephems

def get_orbit_data(self, time: GPSTime):
ephems_sp3 = self.download_parse_orbit(time)
if sum([len(v) for v in ephems_sp3.values()]) < 5:
raise RuntimeError(f'No orbit data found. For Time {time.as_datetime()} constellations {self.valid_const} valid ephem types {self.valid_ephem_types}')
self.add_ephem_fetched_time(ephems_sp3, self.orbit_fetched_times)
Expand Down
140 changes: 25 additions & 115 deletions laika/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from laika.ephemeris import EphemerisType
from .constants import SECS_IN_HR, SECS_IN_DAY, SECS_IN_WEEK
from .gps_time import GPSTime, tow_to_datetime
from .gps_time import GPSTime
from .helpers import ConstellationId

dir_path = os.path.dirname(os.path.realpath(__file__))
Expand All @@ -27,12 +27,6 @@
# mirror of sftp://gdc.cddis.eosdis.nasa.gov/gnss/data/hourly
CDDIS_HOURLY_BASE_URL = os.getenv("CDDIS_HOURLY_BASE_URL", "https://raw.githubusercontent.com/commaai/gnss-data-hourly/master")

# mirror of ftp://ftp.glonass-iac.ru
GLONAS_IAC_BASE_URL = os.getenv("GLONAS_IAC_BASE_URL", "https://raw.githubusercontent.com/commaai/gnss-data-alt/master")

# no mirror
IGN_BASE_URL = os.getenv("IGN_BASE_URL", "ftp://igs.ign.fr/pub")


class DownloadFailed(Exception):
pass
Expand Down Expand Up @@ -322,121 +316,39 @@ def download_nav(time: GPSTime, cache_dir, constellation: ConstellationId):
folder_and_filenames, cache_dir+'hourly_nav/', compression, overwrite=True)


def download_orbits_gps_cod0(time, cache_dir, ephem_types):
def download_orbits_gps(time, cache_dir, ephem_types):
url_bases = (
mirror_url(CDDIS_BASE_URL, '/gnss/products/'),
mirror_url(CDDIS_BASE_URL, '/glonass/products/'),
)

if EphemerisType.ULTRA_RAPID_ORBIT not in ephem_types:
# TODO: raise error here
return None

tm = tow_to_datetime(time.tow, time.week).timetuple()
doy = str(tm.tm_yday).zfill(3)
filename = f"COD0OPSULT_{tm.tm_year}{doy}0000_02D_05M_ORB.SP3"
# TODO: add hour management

folder_path = "%i/" % time.week
folder_file_names = [(folder_path, filename)]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'cddis_products/', compression='.gz')

def download_orbits_gps(time, cache_dir, ephem_types):
url_bases = (
mirror_url(CDDIS_BASE_URL, '/gnss/products/'),
mirror_url(IGN_BASE_URL, '/igs/products/'),
)
filenames = []
compression = '.gz'

if time.week < 2238:
compression = '.Z'
ephem_strs = {
EphemerisType.FINAL_ORBIT: ['igs{wwww}{dow}.sp3'.format(wwww=time.week, dow=time.dow)],
EphemerisType.RAPID_ORBIT: ['igr{wwww}{dow}.sp3'.format(wwww=time.week, dow=time.dow)],
EphemerisType.ULTRA_RAPID_ORBIT: ['igu{wwww}{dow}_{hh}.sp3'.format(wwww=time.week, dow=time.dow, hh=hour) for hour in ['18', '12', '06', '00']]
}
assert EphemerisType.FINAL_ORBIT in ephem_types, f"Only final orbits are available before 2238, {ephem_types}"
filenames.extend(['COD0MGXFIN_{yyyy}{doy:03d}0000_01D_05M_ORB.SP3'.format(yyyy=time.year, doy=time.doy)])
else:
# TODO deal with version number
compression = '.gz'
ephem_strs = {
EphemerisType.FINAL_ORBIT: ['IGS0OPSFIN_{yyyy}{doy:03d}0000_01D_15M_ORB.SP3'.format(yyyy=time.year, doy=time.doy)],
EphemerisType.RAPID_ORBIT: ['IGS0OPSRAP_{yyyy}{doy:03d}0000_01D_15M_ORB.SP3'.format(yyyy=time.year, doy=time.doy)],
EphemerisType.ULTRA_RAPID_ORBIT: ['IGS0OPSULT_{yyyy}{doy:03d}{hh}00_02D_15M_ORB.SP3'.format(yyyy=time.year, doy=time.doy, hh=hour) \
EphemerisType.FINAL_ORBIT: ['COD0OPSFIN_{yyyy}{doy:03d}0000_01D_05M_ORB.SP3'.format(yyyy=time.year, doy=time.doy)],
EphemerisType.RAPID_ORBIT: ['COD0OPSRAP_{yyyy}{doy:03d}0000_01D_05M_ORB.SP3'.format(yyyy=time.year, doy=time.doy)],
EphemerisType.ULTRA_RAPID_ORBIT: ['COD0OPSULT_{yyyy}{doy:03d}{hh}00_02D_05M_ORB.SP3'.format(yyyy=time.year, doy=time.doy, hh=hour) \
for hour in ['18', '12', '06', '00']],
}

folder_path = "%i/" % time.week
filenames = []

# Download filenames in order of quality. Final -> Rapid -> Ultra-Rapid(newest first)
if EphemerisType.FINAL_ORBIT in ephem_types and GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_WEEK:
filenames.extend(ephem_strs[EphemerisType.FINAL_ORBIT])
if EphemerisType.RAPID_ORBIT in ephem_types:
filenames.extend(ephem_strs[EphemerisType.RAPID_ORBIT])
if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types:
filenames.extend(ephem_strs[EphemerisType.ULTRA_RAPID_ORBIT])
# Download filenames in order of quality. Final -> Rapid -> Ultra-Rapid(newest first)
if EphemerisType.FINAL_ORBIT in ephem_types and GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_WEEK:
filenames.extend(ephem_strs[EphemerisType.FINAL_ORBIT])
if EphemerisType.RAPID_ORBIT in ephem_types and GPSTime.from_datetime(datetime.utcnow()) - time > 3 * SECS_IN_DAY:
filenames.extend(ephem_strs[EphemerisType.RAPID_ORBIT])
if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types:
filenames.extend(ephem_strs[EphemerisType.ULTRA_RAPID_ORBIT])

folder_file_names = [(folder_path, filename) for filename in filenames]
ret = download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'cddis_products/', compression=compression)
if ret is not None:
return ret

# fallback to COD0 Ultra Rapid Orbits
return download_orbits_gps_cod0(time, cache_dir, ephem_types)


def download_prediction_orbits_russia_src(gps_time, cache_dir):
# Download single file that contains Ultra_Rapid predictions for GPS, GLONASS and other constellations
t = gps_time.as_datetime()
# Files exist starting at 29-01-2022
if t < datetime(2022, 1, 29):
return None
url_bases = (
mirror_url(GLONAS_IAC_BASE_URL, '/MCC/PRODUCTS/'),
)
folder_path = t.strftime('%y%j/ultra/')
file_prefix = "Stark_1D_" + t.strftime('%y%m%d')

# Predictions are 24H so previous day can also be used.
prev_day = (t - timedelta(days=1))
file_prefix_prev = "Stark_1D_" + prev_day.strftime('%y%m%d')
folder_path_prev = prev_day.strftime('%y%j/ultra/')

current_day = GPSTime.from_datetime(datetime(t.year, t.month, t.day))
# Ultra-Orbit is published in gnss-data-alt every 10th minute past the 5,11,17,23 hour.
# Predictions published are delayed by around 10 hours.
# Download latest file that includes gps_time with 20 minutes margin.:
if gps_time > current_day + 23.5 * SECS_IN_HR:
prev_day, current_day = [], [6, 12]
elif gps_time > current_day + 17.5 * SECS_IN_HR:
prev_day, current_day = [], [0, 6]
elif gps_time > current_day + 11.5 * SECS_IN_HR:
prev_day, current_day = [18], [0]
elif gps_time > current_day + 5.5 * SECS_IN_HR:
prev_day, current_day = [12, 18], []
else:
prev_day, current_day = [6, 12], []
# Example: Stark_1D_22060100.sp3
folder_and_file_names = [(folder_path, file_prefix + f"{h:02}.sp3") for h in reversed(current_day)] + \
[(folder_path_prev, file_prefix_prev + f"{h:02}.sp3") for h in reversed(prev_day)]
return download_and_cache_file_return_first_success(url_bases, folder_and_file_names, cache_dir+'russian_products/', raise_error=True)


def download_orbits_russia_src(time, cache_dir, ephem_types):
# Orbits from russian source. Contains GPS, GLONASS, GALILEO, BEIDOU
url_bases = (
mirror_url(GLONAS_IAC_BASE_URL, '/MCC/PRODUCTS/'),
)
t = time.as_datetime()
folder_paths = []
current_gps_time = GPSTime.from_datetime(datetime.utcnow())
filename = "Sta%i%i.sp3" % (time.week, time.dow)
if EphemerisType.FINAL_ORBIT in ephem_types and current_gps_time - time > 2 * SECS_IN_WEEK:
folder_paths.append(t.strftime('%y%j/final/'))
if EphemerisType.RAPID_ORBIT in ephem_types:
folder_paths.append(t.strftime('%y%j/rapid/'))
if EphemerisType.ULTRA_RAPID_ORBIT in ephem_types:
folder_paths.append(t.strftime('%y%j/ultra/'))
folder_file_names = [(folder_path, filename) for folder_path in folder_paths]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'russian_products/')
return ret


def download_ionex(time, cache_dir):
Expand All @@ -447,17 +359,16 @@ def download_ionex(time, cache_dir):
folder_path = t.strftime('%Y/%j/')
# Format date change
if time >= GPSTime(2238, 0.0):
filenames = [t.strftime('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX'),
t.strftime('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX')]
compression = '.gz'
filenames = [t.strftime('COD0OPSFIN_%Y%j0000_01D_01H_GIM.INX.gz'),
t.strftime('COD0OPSRAP_%Y%j0000_01D_01H_GIM.INX.gz'),
t.strftime("c2pg%j0.%yi.Z")]
else:
filenames = [t.strftime("codg%j0.%yi"),
t.strftime("c1pg%j0.%yi"),
t.strftime("c2pg%j0.%yi")]
compression = '.Z'
filenames = [t.strftime("codg%j0.%yi.Z"),
t.strftime("c1pg%j0.%yi.Z"),
t.strftime("c2pg%j0.%yi.Z")]

folder_file_names = [(folder_path, f) for f in filenames]
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'ionex/', compression=compression, raise_error=True)
return download_and_cache_file_return_first_success(url_bases, folder_file_names, cache_dir+'ionex/', raise_error=True)


def download_dcb(time, cache_dir):
Expand All @@ -467,7 +378,6 @@ def download_dcb(time, cache_dir):
folder_paths = []
url_bases = (
mirror_url(CDDIS_BASE_URL, '/gnss/products/bias/'),
mirror_url(IGN_BASE_URL, '/igs/products/mgex/dcb/'),
)
# seem to be a lot of data missing, so try many days
for time_step in [time - i * SECS_IN_DAY for i in range(14)]:
Expand Down
11 changes: 6 additions & 5 deletions laika/ephemeris.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@ def all_orbits():

@classmethod
def from_file_name(cls, file_name: str):
if "/final" in file_name or "/igs" in file_name or 'OPSFIN' in file_name:
if "MGXFIN" in file_name or 'OPSFIN' in file_name:
return EphemerisType.FINAL_ORBIT
if "/rapid" in file_name or "/igr" in file_name or 'OPSRAP' in file_name:
if 'OPSRAP' in file_name:
return EphemerisType.RAPID_ORBIT
if "/ultra" in file_name or "/igu" in file_name or "COD0OPSULT" in file_name or 'OPSULT' in file_name:
if 'OPSULT' in file_name:
return EphemerisType.ULTRA_RAPID_ORBIT
raise RuntimeError(f"Ephemeris type not found in filename: {file_name}")

Expand Down Expand Up @@ -325,8 +325,9 @@ def parse_sp3_orbits(file_names, supported_constellations, skip_until_epoch: GPS

def read_prn_data(data, prn, deg=16, deg_t=1):
np_data_prn = np.array(data[prn], dtype=object)
# Currently, don't even bother with satellites that have unhealthy times
if len(np_data_prn) == 0 or (np_data_prn[:, 5] > .99).any():
# > .99 is unhealthy time
np_data_prn = np_data_prn[np_data_prn[:, 5] < .99]
if len(np_data_prn) == 0:
return []
ephems = []
for i in range(len(np_data_prn) - deg):
Expand Down
22 changes: 13 additions & 9 deletions tests/test_ephemerides.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,35 @@
import numpy as np
import unittest

#from datetime import datetime
from laika.ephemeris import EphemerisType, read_prn_data
from laika.gps_time import GPSTime
#from laika.constants import SECS_IN_DAY
from laika import AstroDog

gps_times_list = [[1999, 415621.0],
[2045, 455457.0],
[1985, 443787.0]]
gps_times_list = [[2100, 415621.0],
[2200, 455457.0],
[2300, 443787.0]]

svIds = ['G01', 'G31', 'R08']
svIds = ['G07', 'G31', 'R08']
gps_times = [GPSTime(*gps_time_list) for gps_time_list in gps_times_list]


class TestAstroDog(unittest.TestCase):
'''
def test_nav_vs_orbit_now(self):
dog_orbit = AstroDog(valid_ephem_types=EphemerisType.orbits())
dog_orbit = AstroDog(valid_ephem_types=EphemerisType.all_orbits())
dog_nav = AstroDog(valid_ephem_types=EphemerisType.NAV)
gps_time = GPSTime.from_datetime(datetime.utcnow()) - SECS_IN_DAY*2
gps_time = GPSTime.from_datetime(datetime.utcnow()) - SECS_IN_DAY*3
for svId in svIds:
sat_info_nav = dog_nav.get_sat_info(svId, gps_time)
assert sat_info_nav is not None, f"Failed to get sat info for {svId} at {gps_time}"
sat_info_orbit = dog_orbit.get_sat_info(svId, gps_time)
np.testing.assert_allclose(sat_info_nav[0], sat_info_orbit[0], rtol=0, atol=5)
np.testing.assert_allclose(sat_info_nav[1], sat_info_orbit[1], rtol=0, atol=.1)
assert sat_info_orbit is not None
np.testing.assert_allclose(sat_info_nav[0], sat_info_orbit[0], rtol=0, atol=5e2)
np.testing.assert_allclose(sat_info_nav[1], sat_info_orbit[1], rtol=0, atol=1e0)
np.testing.assert_allclose(sat_info_nav[2], sat_info_orbit[2], rtol=0, atol=1e-7)
np.testing.assert_allclose(sat_info_nav[3], sat_info_orbit[3], rtol=0, atol=1e-11)
np.testing.assert_allclose(sat_info_nav[3], sat_info_orbit[3], rtol=0, atol=1e-10)
'''

def test_nav_vs_orbit_old(self):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_fail_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from laika.gps_time import GPSTime
from laika import AstroDog

gps_times_list = [[1950, 415621.0]]
gps_times_list = [[2350, 415621.0]]

svIds = ['R12']
svIds = ['R345'] # fake satellite id
gps_times = [GPSTime(*gps_time_list) for gps_time_list in gps_times_list]


Expand Down
Loading