Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions laika/astro_dog.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
Expand Down Expand Up @@ -213,14 +214,21 @@ def download_parse_orbit(self, gps_time: GPSTime, skip_before_epoch=None) -> Map
time_steps = [gps_time - SECS_IN_DAY, gps_time, gps_time + SECS_IN_DAY]
with ThreadPoolExecutor() as executor:
futures = [executor.submit(download_orbits_gps, t, self.cache_dir, self.valid_ephem_types) for t in time_steps]
files = [self.fetch_count(f.result()) for f in futures if f.result()] if futures else []
files = []
for t, future in zip(time_steps, futures):
try:
result = future.result()
if result:
files.append(self.fetch_count(result))
except Exception as e:
logging.warning("Failed to download orbit for %s: %s", t.as_datetime(), e)
ephems = parse_sp3_orbits(files, self.valid_const, skip_before_epoch)
return ephems

def get_orbit_data(self, time: GPSTime):
ephems_sp3 = self.download_parse_orbit(time)
if sum([len(v) for v in ephems_sp3.values()]) < 5:
raise RuntimeError(f'No orbit data found. For Time {time.as_datetime()} constellations {self.valid_const} valid ephem types {self.valid_ephem_types}')
raise RuntimeError(f'No orbit data found. For Time {time.as_datetime()} constellations {self.valid_const} valid ephem types {self.valid_ephem_types}. Check logs above for download failure details.')
self.add_ephem_fetched_time(ephems_sp3, self.orbit_fetched_times)
self.add_orbits(ephems_sp3)

Expand Down
24 changes: 16 additions & 8 deletions laika/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,15 @@ def wrapped(url_bases, *args, **kwargs):
return f(url_bases, *args, **kwargs)

# not a string, must be a list of url_bases
errors = []
for url_base in (url for url in url_bases if url):
try:
return f(url_base, *args, **kwargs)
except DownloadFailed as e:
logging.warning(e)
errors.append(str(e))
logging.warning("Download from %s failed: %s", url_base, e)
# none of them succeeded
raise DownloadFailed("Multiple URL failures attempting to pull file(s)")
raise DownloadFailed("Multiple URL failures attempting to pull file(s):\n" + "\n".join(errors))
return wrapped


Expand Down Expand Up @@ -202,7 +204,7 @@ def https_download_file(url):
crl.close()

if response != 200:
raise DownloadFailed('HTTPS error ' + str(response))
raise DownloadFailed(f'HTTPS error {response} downloading {url}')
return buf.getvalue()


Expand All @@ -218,7 +220,7 @@ def ftp_download_file(url):
ftp.retrbinary('RETR ' + parsed.path, buf.write)
return buf.getvalue()
except ftplib.all_errors as e:
raise DownloadFailed(e)
raise DownloadFailed(f'FTP error downloading {url}: {e}')


@retryable
Expand All @@ -243,13 +245,18 @@ def download_file(url_base, folder_path, filename_zipped):

def download_and_cache_file_return_first_success(url_bases, folder_and_file_names, cache_dir, compression='', overwrite=False, raise_error=False):
last_error = None
errors = []
for folder_path, filename in folder_and_file_names:
try:
file = download_and_cache_file(url_bases, folder_path, cache_dir, filename, compression, overwrite)
return file
except DownloadFailed as e:
logging.warning("Download attempt failed for %s%s: %s", folder_path, filename, e)
errors.append(str(e))
last_error = e

if errors:
logging.warning("All download attempts failed:\n%s", "\n".join(errors))
if last_error and raise_error:
raise last_error

Expand All @@ -269,12 +276,13 @@ def download_and_cache_file(url_base, folder_path: str, cache_dir: str, filename
if not os.path.isfile(filepath) or overwrite:
try:
data_zipped = download_file(url_base, folder_path, filename_zipped)
except (DownloadFailed, pycurl.error, TimeoutError):
except (DownloadFailed, pycurl.error, TimeoutError) as e:
unix_time = time.time()
os.makedirs(folder_path_abs, exist_ok=True)
with atomic_write(filepath_attempt, mode='w', overwrite=True) as wf:
wf.write(str(unix_time))
raise DownloadFailed(f"Could not download {folder_path + filename_zipped} from {url_base}")
logging.warning("Failed to download %s from %s: %s", folder_path + filename_zipped, url_base, e)
raise DownloadFailed(f"Could not download {folder_path + filename_zipped} from {url_base}: {e}")

os.makedirs(folder_path_abs, exist_ok=True)
ephem_bytes = hatanaka.decompress(data_zipped)
Expand Down Expand Up @@ -411,6 +419,6 @@ def download_cors_station(time, station_name, cache_dir):
try:
filepath = download_and_cache_file(url_bases, folder_path, cache_dir+'cors_obs/', filename, compression='.gz')
return filepath
except DownloadFailed:
logging.warning("File not downloaded, check availability on server.")
except DownloadFailed as e:
logging.warning("File not downloaded for station %s: %s", station_name, e)
return None
Loading