Commit ea6c8d5d603b72a8e79ab29245dbc9bbad08a8d8
1 parent
dc0be992
Exists in
master
and in
2 other branches
Add interval constraints for orbit models (Rosetta uses P67 after a time)
Showing
4 changed files
with
71 additions
and
34 deletions
Show diff stats
CHANGELOG.md
1 | 1 | ## Future ? |
2 | 2 | |
3 | -- [ ] Optimize CSV generation (numpy vectorization ?) | |
3 | +- [ ] Optimize data aggregation (numpy vectorization?) | |
4 | 4 | - [ ] Credit the author of the pixel art planets |
5 | 5 | - [ ] Add a README to the download tarball |
6 | 6 | - [ ] Set the log level to _error_ in production (see `web/run.py`) |
7 | -- [ ] Cache warmup (generate for today's default interval) `/cache/warmup` | |
8 | 7 | - [ ] CRON statements to call the cache cleanup and warmup |
9 | -- [ ] Add a priority for models to support Juno's trajectory (using Jupiter's) | |
8 | +- [ ] Cache warmup (generate for today's default interval) `/cache/warmup` | |
9 | +- [ ] Normalize time interval for time series | |
10 | +- [ ] Give the future data another color | |
11 | +- [ ] Sort times series by closeness to the sun | |
12 | +- [ ] Generate a CDF file (not NetCDF) | |
13 | +- [ ] Make the footer images clickable | |
14 | +- [ ] Move the link to the source in the footer | |
15 | +- [ ] Highlight the visits counter | |
16 | + | |
17 | +An heliospheric propagation 1D MHD model for solar wind prediction at planets, probes and comets. | |
18 | + | |
10 | 19 | |
11 | 20 | |
12 | 21 | ## 1.0.0-rc4 |
13 | 22 | |
14 | -- [ ] Make the tarball with netcdf files instead of CSVs | |
23 | +- [x] Add interval constraints for orbit models (Rosetta uses P67 after a time) | |
24 | +- [x] Make the download with a netcdf file instead of a tarball of CSVs | |
15 | 25 | - [x] Support having no position to display (for Rosetta in some intervals) |
16 | 26 | - [x] Make the local cache more resilient to corrupted downloads |
17 | 27 | - [x] Make the local cache more resilient to naming collisions | ... | ... |
config.yml
web/run.py
... | ... | @@ -392,7 +392,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): |
392 | 392 | local_netc_files.append(local_netc_file) |
393 | 393 | log.debug("Unzipped '%s'." % local_gzip_file) |
394 | 394 | |
395 | - return local_netc_files | |
395 | + return list(set(local_netc_files)) # remove possible dupes | |
396 | 396 | |
397 | 397 | |
398 | 398 | def get_data_for_target(target_config, started_at, stopped_at): |
... | ... | @@ -412,21 +412,25 @@ def get_data_for_target(target_config, started_at, stopped_at): |
412 | 412 | abort(500, "Invalid orbit configuration for '%s' : %s" |
413 | 413 | % (target_config['slug'], str(e))) |
414 | 414 | |
415 | + # try: | |
416 | + # started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT) | |
417 | + # except: | |
418 | + # abort(400, "Invalid started_at config : '%s'." % started_at) | |
419 | + # try: | |
420 | + # stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT) | |
421 | + # except: | |
422 | + # abort(400, "Invalid stopped_at config : '%s'." % stopped_at) | |
423 | + | |
415 | 424 | # Grab the list of netCDF files from Myriam's API |
416 | 425 | model_files = [] |
417 | - orbit_files = [] | |
418 | 426 | for model in models: |
419 | 427 | model_files = model_files + retrieve_amda_netcdf( |
420 | 428 | target_config['slug'], model['slug'], started_at, stopped_at |
421 | 429 | ) |
422 | - for orbit in orbits: | |
423 | - orbit_files = orbit_files + retrieve_amda_netcdf( | |
424 | - target_config['slug'], orbit['slug'], started_at, stopped_at | |
425 | - ) | |
426 | 430 | # Remove possible duplicates |
427 | 431 | model_files = set(model_files) |
428 | - orbit_files = set(orbit_files) | |
429 | 432 | |
433 | + # Let's return an empty dict when there's no data instead of crashing | |
430 | 434 | # if not len(model_files): |
431 | 435 | # abort(500, "No model files found for '%s'." % target_config['slug']) |
432 | 436 | # if not len(orbit_files): |
... | ... | @@ -434,20 +438,38 @@ def get_data_for_target(target_config, started_at, stopped_at): |
434 | 438 | |
435 | 439 | precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish |
436 | 440 | orbit_data = {} # keys are datetime as str, values arrays of XY |
437 | - for orbit_file in orbit_files: | |
438 | - log.debug("%s: opening orbit NETCDF4 '%s'..." % | |
439 | - (target_config['name'], orbit_file)) | |
440 | - cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") | |
441 | - times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms | |
442 | - data_hee = cdf_handle.variables['HEE'] | |
443 | - log.debug("%s: aggregating data from '%s'..." % | |
444 | - (target_config['name'], orbit_file)) | |
445 | - for time, datum_hee in zip(times, data_hee): | |
446 | - dtime = datetime_from_list(time) | |
447 | - if started_at <= dtime <= stopped_at: | |
448 | - dkey = dtime.strftime(precision) | |
449 | - orbit_data[dkey] = datum_hee | |
450 | - cdf_handle.close() | |
441 | + | |
442 | + for orbit in orbits: | |
443 | + if 'started_at' in orbit: | |
444 | + s0 = datetime.datetime.strptime(orbit['started_at'], FILE_DATE_FMT) | |
445 | + else: | |
446 | + s0 = started_at | |
447 | + if 'stopped_at' in orbit: | |
448 | + s1 = datetime.datetime.strptime(orbit['stopped_at'], FILE_DATE_FMT) | |
449 | + else: | |
450 | + s1 = stopped_at | |
451 | + | |
452 | + orbit_files = retrieve_amda_netcdf( | |
453 | + target_config['slug'], orbit['slug'], s0, s1 | |
454 | + ) | |
455 | + for orbit_file in orbit_files: | |
456 | + log.debug("%s: opening orbit NETCDF4 '%s'..." % | |
457 | + (target_config['name'], orbit_file)) | |
458 | + cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") | |
459 | + times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms | |
460 | + try: | |
461 | + data_hee = cdf_handle.variables['HEE'] | |
462 | + except KeyError: | |
463 | + data_hee = cdf_handle.variables['XYZ_HEE'] # p67 uses this | |
464 | + | |
465 | + log.debug("%s: aggregating data from '%s'..." % | |
466 | + (target_config['name'], orbit_file)) | |
467 | + for time, datum_hee in zip(times, data_hee): | |
468 | + dtime = datetime_from_list(time) | |
469 | + if s0 <= dtime <= s1: | |
470 | + dkey = dtime.strftime(precision) | |
471 | + orbit_data[dkey] = datum_hee | |
472 | + cdf_handle.close() | |
451 | 473 | |
452 | 474 | all_data = {} # keys are datetime as str, values tuples of data |
453 | 475 | for model_file in model_files: |
... | ... | @@ -681,6 +703,8 @@ def download_targets_tarball(targets, started_at, stopped_at): |
681 | 703 | in their own CSV file, and make a tarball of them. |
682 | 704 | `started_at` and `stopped_at` should be UTC strings. |
683 | 705 | |
706 | + Note: we do not use this route anymore, but let's keep it shelved for now. | |
707 | + | |
684 | 708 | targets: string list of targets' slugs, separated by `-`. |
685 | 709 | """ |
686 | 710 | separator = '-' |
... | ... | @@ -694,17 +718,16 @@ def download_targets_tarball(targets, started_at, stopped_at): |
694 | 718 | if 0 == len(targets_configs): |
695 | 719 | abort(400, "No valid targets specified. What are you doing?") |
696 | 720 | |
697 | - date_fmt = FILE_DATE_FMT | |
698 | 721 | try: |
699 | - started_at = datetime.datetime.strptime(started_at, date_fmt) | |
722 | + started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT) | |
700 | 723 | except: |
701 | 724 | abort(400, "Invalid started_at parameter : '%s'." % started_at) |
702 | 725 | try: |
703 | - stopped_at = datetime.datetime.strptime(stopped_at, date_fmt) | |
726 | + stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT) | |
704 | 727 | except: |
705 | 728 | abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at) |
706 | - sta = started_at.strftime(date_fmt) | |
707 | - sto = stopped_at.strftime(date_fmt) | |
729 | + sta = started_at.strftime(FILE_DATE_FMT) | |
730 | + sto = stopped_at.strftime(FILE_DATE_FMT) | |
708 | 731 | |
709 | 732 | gzip_filename = "%s_%s_%s.tar.gz" % (separator.join(targets), sta, sto) |
710 | 733 | local_gzip_file = join(CACHE_DIR, gzip_filename) |
... | ... | @@ -780,9 +803,9 @@ def download_targets_netcdf(targets, params, started_at, stopped_at): |
780 | 803 | log.debug("Creating the NetCDF file '%s'..." % nc_filename) |
781 | 804 | nc_handle = Dataset(nc_path, "w", format="NETCDF4") |
782 | 805 | try: |
783 | - nc_handle.description = "TODO" # todo | |
806 | + nc_handle.description = "Model and orbit data for targets" # todo | |
784 | 807 | nc_handle.history = "Created " + time.ctime(time.time()) |
785 | - nc_handle.source = "Transplanet (CDDP)" | |
808 | + nc_handle.source = "Heliopropa (CDDP)" | |
786 | 809 | available_params = list(PROPERTIES) |
787 | 810 | for target in targets_configs: |
788 | 811 | target_slug = target['slug'] | ... | ... |
web/view/layout.html.jinja2
... | ... | @@ -42,8 +42,9 @@ |
42 | 42 | <!-- Navigation. We hide it in small screens. --> |
43 | 43 | <nav class="mdl-navigation mdl-layout--large-screen-only"> |
44 | 44 | <a class="mdl-navigation__link" href="https://gitlab.irap.omp.eu/CDPP/SPACEWEATHERONLINE">v{{ version }}</a> |
45 | - <a class="mdl-navigation__link" href="https://onlinelibrary.wiley.com/doi/10.1029/2004JA010959/abstract">Model</a> | |
46 | - <a class="mdl-navigation__link" href="https://ec.europa.eu/programmes/horizon2020/">Horizon 2020</a> | |
45 | + <a class="mdl-navigation__link" href="https://onlinelibrary.wiley.com/doi/10.1029/2004JA010959/abstract">Tao's Model</a> | |
46 | + <a class="mdl-navigation__link" href="http://planetaryspaceweather-europlanet.irap.omp.eu">Planetary Space Weather Services</a> | |
47 | +{# <a class="mdl-navigation__link" href="https://ec.europa.eu/programmes/horizon2020/">Horizon 2020</a>#} | |
47 | 48 | </nav> |
48 | 49 | </div> |
49 | 50 | </header> | ... | ... |