diff --git a/CHANGELOG.md b/CHANGELOG.md index 38ac848..075382d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,27 @@ ## Future ? -- [ ] Optimize CSV generation (numpy vectorization ?) +- [ ] Optimize data aggregation (numpy vectorization?) - [ ] Credit the author of the pixel art planets - [ ] Add a README to the download tarball - [ ] Set the log level to _error_ in production (see `web/run.py`) -- [ ] Cache warmup (generate for today's default interval) `/cache/warmup` - [ ] CRON statements to call the cache cleanup and warmup -- [ ] Add a priority for models to support Juno's trajectory (using Jupiter's) +- [ ] Cache warmup (generate for today's default interval) `/cache/warmup` +- [ ] Normalize time interval for time series +- [ ] Give the future data another color +- [ ] Sort times series by closeness to the sun +- [ ] Generate a CDF file (not NetCDF) +- [ ] Make the footer images clickable +- [ ] Move the link to the source in the footer +- [ ] Highlight the visits counter + +An heliospheric propagation 1D MHD model for solar wind prediction at planets, probes and comets. + ## 1.0.0-rc4 -- [ ] Make the tarball with netcdf files instead of CSVs +- [x] Add interval constraints for orbit models (Rosetta uses P67 after a time) +- [x] Make the download with a netcdf file instead of a tarball of CSVs - [x] Support having no position to display (for Rosetta in some intervals) - [x] Make the local cache more resilient to corrupted downloads - [x] Make the local cache more resilient to naming collisions diff --git a/config.yml b/config.yml index 20c8806..fdae59c 100644 --- a/config.yml +++ b/config.yml @@ -156,6 +156,9 @@ targets: orbit: models: - slug: 'ros_orb_cruise' + stopped_at: '2014-08-02T00:00:00' + - slug: 'p67_orb_all' + started_at: '2014-08-02T00:00:00' models: - slug: 'tao_ros_sw' locked: false diff --git a/web/run.py b/web/run.py index f32a957..7358669 100755 --- a/web/run.py +++ b/web/run.py @@ -392,7 +392,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): local_netc_files.append(local_netc_file) log.debug("Unzipped '%s'." % local_gzip_file) - return local_netc_files + return list(set(local_netc_files)) # remove possible dupes def get_data_for_target(target_config, started_at, stopped_at): @@ -412,21 +412,25 @@ def get_data_for_target(target_config, started_at, stopped_at): abort(500, "Invalid orbit configuration for '%s' : %s" % (target_config['slug'], str(e))) + # try: + # started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT) + # except: + # abort(400, "Invalid started_at config : '%s'." % started_at) + # try: + # stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT) + # except: + # abort(400, "Invalid stopped_at config : '%s'." % stopped_at) + # Grab the list of netCDF files from Myriam's API model_files = [] - orbit_files = [] for model in models: model_files = model_files + retrieve_amda_netcdf( target_config['slug'], model['slug'], started_at, stopped_at ) - for orbit in orbits: - orbit_files = orbit_files + retrieve_amda_netcdf( - target_config['slug'], orbit['slug'], started_at, stopped_at - ) # Remove possible duplicates model_files = set(model_files) - orbit_files = set(orbit_files) + # Let's return an empty dict when there's no data instead of crashing # if not len(model_files): # abort(500, "No model files found for '%s'." % target_config['slug']) # if not len(orbit_files): @@ -434,20 +438,38 @@ def get_data_for_target(target_config, started_at, stopped_at): precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish orbit_data = {} # keys are datetime as str, values arrays of XY - for orbit_file in orbit_files: - log.debug("%s: opening orbit NETCDF4 '%s'..." % - (target_config['name'], orbit_file)) - cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") - times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms - data_hee = cdf_handle.variables['HEE'] - log.debug("%s: aggregating data from '%s'..." % - (target_config['name'], orbit_file)) - for time, datum_hee in zip(times, data_hee): - dtime = datetime_from_list(time) - if started_at <= dtime <= stopped_at: - dkey = dtime.strftime(precision) - orbit_data[dkey] = datum_hee - cdf_handle.close() + + for orbit in orbits: + if 'started_at' in orbit: + s0 = datetime.datetime.strptime(orbit['started_at'], FILE_DATE_FMT) + else: + s0 = started_at + if 'stopped_at' in orbit: + s1 = datetime.datetime.strptime(orbit['stopped_at'], FILE_DATE_FMT) + else: + s1 = stopped_at + + orbit_files = retrieve_amda_netcdf( + target_config['slug'], orbit['slug'], s0, s1 + ) + for orbit_file in orbit_files: + log.debug("%s: opening orbit NETCDF4 '%s'..." % + (target_config['name'], orbit_file)) + cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") + times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms + try: + data_hee = cdf_handle.variables['HEE'] + except KeyError: + data_hee = cdf_handle.variables['XYZ_HEE'] # p67 uses this + + log.debug("%s: aggregating data from '%s'..." % + (target_config['name'], orbit_file)) + for time, datum_hee in zip(times, data_hee): + dtime = datetime_from_list(time) + if s0 <= dtime <= s1: + dkey = dtime.strftime(precision) + orbit_data[dkey] = datum_hee + cdf_handle.close() all_data = {} # keys are datetime as str, values tuples of data for model_file in model_files: @@ -681,6 +703,8 @@ def download_targets_tarball(targets, started_at, stopped_at): in their own CSV file, and make a tarball of them. `started_at` and `stopped_at` should be UTC strings. + Note: we do not use this route anymore, but let's keep it shelved for now. + targets: string list of targets' slugs, separated by `-`. """ separator = '-' @@ -694,17 +718,16 @@ def download_targets_tarball(targets, started_at, stopped_at): if 0 == len(targets_configs): abort(400, "No valid targets specified. What are you doing?") - date_fmt = FILE_DATE_FMT try: - started_at = datetime.datetime.strptime(started_at, date_fmt) + started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT) except: abort(400, "Invalid started_at parameter : '%s'." % started_at) try: - stopped_at = datetime.datetime.strptime(stopped_at, date_fmt) + stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT) except: abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at) - sta = started_at.strftime(date_fmt) - sto = stopped_at.strftime(date_fmt) + sta = started_at.strftime(FILE_DATE_FMT) + sto = stopped_at.strftime(FILE_DATE_FMT) gzip_filename = "%s_%s_%s.tar.gz" % (separator.join(targets), sta, sto) local_gzip_file = join(CACHE_DIR, gzip_filename) @@ -780,9 +803,9 @@ def download_targets_netcdf(targets, params, started_at, stopped_at): log.debug("Creating the NetCDF file '%s'..." % nc_filename) nc_handle = Dataset(nc_path, "w", format="NETCDF4") try: - nc_handle.description = "TODO" # todo + nc_handle.description = "Model and orbit data for targets" # todo nc_handle.history = "Created " + time.ctime(time.time()) - nc_handle.source = "Transplanet (CDDP)" + nc_handle.source = "Heliopropa (CDDP)" available_params = list(PROPERTIES) for target in targets_configs: target_slug = target['slug'] diff --git a/web/view/layout.html.jinja2 b/web/view/layout.html.jinja2 index f6757d9..d4a44e9 100755 --- a/web/view/layout.html.jinja2 +++ b/web/view/layout.html.jinja2 @@ -42,8 +42,9 @@ -- libgit2 0.21.2