Commit ea6c8d5d603b72a8e79ab29245dbc9bbad08a8d8

Authored by Goutte
1 parent dc0be992

Add interval constraints for orbit models (Rosetta uses P67 after a time)

1 ## Future ? 1 ## Future ?
2 2
3 -- [ ] Optimize CSV generation (numpy vectorization ?) 3 +- [ ] Optimize data aggregation (numpy vectorization?)
4 - [ ] Credit the author of the pixel art planets 4 - [ ] Credit the author of the pixel art planets
5 - [ ] Add a README to the download tarball 5 - [ ] Add a README to the download tarball
6 - [ ] Set the log level to _error_ in production (see `web/run.py`) 6 - [ ] Set the log level to _error_ in production (see `web/run.py`)
7 -- [ ] Cache warmup (generate for today's default interval) `/cache/warmup`  
8 - [ ] CRON statements to call the cache cleanup and warmup 7 - [ ] CRON statements to call the cache cleanup and warmup
9 -- [ ] Add a priority for models to support Juno's trajectory (using Jupiter's) 8 +- [ ] Cache warmup (generate for today's default interval) `/cache/warmup`
  9 +- [ ] Normalize time interval for time series
  10 +- [ ] Give the future data another color
  11 +- [ ] Sort times series by closeness to the sun
  12 +- [ ] Generate a CDF file (not NetCDF)
  13 +- [ ] Make the footer images clickable
  14 +- [ ] Move the link to the source in the footer
  15 +- [ ] Highlight the visits counter
  16 +
  17 +An heliospheric propagation 1D MHD model for solar wind prediction at planets, probes and comets.
  18 +
10 19
11 20
12 ## 1.0.0-rc4 21 ## 1.0.0-rc4
13 22
14 -- [ ] Make the tarball with netcdf files instead of CSVs 23 +- [x] Add interval constraints for orbit models (Rosetta uses P67 after a time)
  24 +- [x] Make the download with a netcdf file instead of a tarball of CSVs
15 - [x] Support having no position to display (for Rosetta in some intervals) 25 - [x] Support having no position to display (for Rosetta in some intervals)
16 - [x] Make the local cache more resilient to corrupted downloads 26 - [x] Make the local cache more resilient to corrupted downloads
17 - [x] Make the local cache more resilient to naming collisions 27 - [x] Make the local cache more resilient to naming collisions
@@ -156,6 +156,9 @@ targets: @@ -156,6 +156,9 @@ targets:
156 orbit: 156 orbit:
157 models: 157 models:
158 - slug: 'ros_orb_cruise' 158 - slug: 'ros_orb_cruise'
  159 + stopped_at: '2014-08-02T00:00:00'
  160 + - slug: 'p67_orb_all'
  161 + started_at: '2014-08-02T00:00:00'
159 models: 162 models:
160 - slug: 'tao_ros_sw' 163 - slug: 'tao_ros_sw'
161 locked: false 164 locked: false
@@ -392,7 +392,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): @@ -392,7 +392,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at):
392 local_netc_files.append(local_netc_file) 392 local_netc_files.append(local_netc_file)
393 log.debug("Unzipped '%s'." % local_gzip_file) 393 log.debug("Unzipped '%s'." % local_gzip_file)
394 394
395 - return local_netc_files 395 + return list(set(local_netc_files)) # remove possible dupes
396 396
397 397
398 def get_data_for_target(target_config, started_at, stopped_at): 398 def get_data_for_target(target_config, started_at, stopped_at):
@@ -412,21 +412,25 @@ def get_data_for_target(target_config, started_at, stopped_at): @@ -412,21 +412,25 @@ def get_data_for_target(target_config, started_at, stopped_at):
412 abort(500, "Invalid orbit configuration for '%s' : %s" 412 abort(500, "Invalid orbit configuration for '%s' : %s"
413 % (target_config['slug'], str(e))) 413 % (target_config['slug'], str(e)))
414 414
  415 + # try:
  416 + # started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT)
  417 + # except:
  418 + # abort(400, "Invalid started_at config : '%s'." % started_at)
  419 + # try:
  420 + # stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT)
  421 + # except:
  422 + # abort(400, "Invalid stopped_at config : '%s'." % stopped_at)
  423 +
415 # Grab the list of netCDF files from Myriam's API 424 # Grab the list of netCDF files from Myriam's API
416 model_files = [] 425 model_files = []
417 - orbit_files = []  
418 for model in models: 426 for model in models:
419 model_files = model_files + retrieve_amda_netcdf( 427 model_files = model_files + retrieve_amda_netcdf(
420 target_config['slug'], model['slug'], started_at, stopped_at 428 target_config['slug'], model['slug'], started_at, stopped_at
421 ) 429 )
422 - for orbit in orbits:  
423 - orbit_files = orbit_files + retrieve_amda_netcdf(  
424 - target_config['slug'], orbit['slug'], started_at, stopped_at  
425 - )  
426 # Remove possible duplicates 430 # Remove possible duplicates
427 model_files = set(model_files) 431 model_files = set(model_files)
428 - orbit_files = set(orbit_files)  
429 432
  433 + # Let's return an empty dict when there's no data instead of crashing
430 # if not len(model_files): 434 # if not len(model_files):
431 # abort(500, "No model files found for '%s'." % target_config['slug']) 435 # abort(500, "No model files found for '%s'." % target_config['slug'])
432 # if not len(orbit_files): 436 # if not len(orbit_files):
@@ -434,20 +438,38 @@ def get_data_for_target(target_config, started_at, stopped_at): @@ -434,20 +438,38 @@ def get_data_for_target(target_config, started_at, stopped_at):
434 438
435 precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish 439 precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish
436 orbit_data = {} # keys are datetime as str, values arrays of XY 440 orbit_data = {} # keys are datetime as str, values arrays of XY
437 - for orbit_file in orbit_files:  
438 - log.debug("%s: opening orbit NETCDF4 '%s'..." %  
439 - (target_config['name'], orbit_file))  
440 - cdf_handle = Dataset(orbit_file, "r", format="NETCDF4")  
441 - times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms  
442 - data_hee = cdf_handle.variables['HEE']  
443 - log.debug("%s: aggregating data from '%s'..." %  
444 - (target_config['name'], orbit_file))  
445 - for time, datum_hee in zip(times, data_hee):  
446 - dtime = datetime_from_list(time)  
447 - if started_at <= dtime <= stopped_at:  
448 - dkey = dtime.strftime(precision)  
449 - orbit_data[dkey] = datum_hee  
450 - cdf_handle.close() 441 +
  442 + for orbit in orbits:
  443 + if 'started_at' in orbit:
  444 + s0 = datetime.datetime.strptime(orbit['started_at'], FILE_DATE_FMT)
  445 + else:
  446 + s0 = started_at
  447 + if 'stopped_at' in orbit:
  448 + s1 = datetime.datetime.strptime(orbit['stopped_at'], FILE_DATE_FMT)
  449 + else:
  450 + s1 = stopped_at
  451 +
  452 + orbit_files = retrieve_amda_netcdf(
  453 + target_config['slug'], orbit['slug'], s0, s1
  454 + )
  455 + for orbit_file in orbit_files:
  456 + log.debug("%s: opening orbit NETCDF4 '%s'..." %
  457 + (target_config['name'], orbit_file))
  458 + cdf_handle = Dataset(orbit_file, "r", format="NETCDF4")
  459 + times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms
  460 + try:
  461 + data_hee = cdf_handle.variables['HEE']
  462 + except KeyError:
  463 + data_hee = cdf_handle.variables['XYZ_HEE'] # p67 uses this
  464 +
  465 + log.debug("%s: aggregating data from '%s'..." %
  466 + (target_config['name'], orbit_file))
  467 + for time, datum_hee in zip(times, data_hee):
  468 + dtime = datetime_from_list(time)
  469 + if s0 <= dtime <= s1:
  470 + dkey = dtime.strftime(precision)
  471 + orbit_data[dkey] = datum_hee
  472 + cdf_handle.close()
451 473
452 all_data = {} # keys are datetime as str, values tuples of data 474 all_data = {} # keys are datetime as str, values tuples of data
453 for model_file in model_files: 475 for model_file in model_files:
@@ -681,6 +703,8 @@ def download_targets_tarball(targets, started_at, stopped_at): @@ -681,6 +703,8 @@ def download_targets_tarball(targets, started_at, stopped_at):
681 in their own CSV file, and make a tarball of them. 703 in their own CSV file, and make a tarball of them.
682 `started_at` and `stopped_at` should be UTC strings. 704 `started_at` and `stopped_at` should be UTC strings.
683 705
  706 + Note: we do not use this route anymore, but let's keep it shelved for now.
  707 +
684 targets: string list of targets' slugs, separated by `-`. 708 targets: string list of targets' slugs, separated by `-`.
685 """ 709 """
686 separator = '-' 710 separator = '-'
@@ -694,17 +718,16 @@ def download_targets_tarball(targets, started_at, stopped_at): @@ -694,17 +718,16 @@ def download_targets_tarball(targets, started_at, stopped_at):
694 if 0 == len(targets_configs): 718 if 0 == len(targets_configs):
695 abort(400, "No valid targets specified. What are you doing?") 719 abort(400, "No valid targets specified. What are you doing?")
696 720
697 - date_fmt = FILE_DATE_FMT  
698 try: 721 try:
699 - started_at = datetime.datetime.strptime(started_at, date_fmt) 722 + started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT)
700 except: 723 except:
701 abort(400, "Invalid started_at parameter : '%s'." % started_at) 724 abort(400, "Invalid started_at parameter : '%s'." % started_at)
702 try: 725 try:
703 - stopped_at = datetime.datetime.strptime(stopped_at, date_fmt) 726 + stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT)
704 except: 727 except:
705 abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at) 728 abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at)
706 - sta = started_at.strftime(date_fmt)  
707 - sto = stopped_at.strftime(date_fmt) 729 + sta = started_at.strftime(FILE_DATE_FMT)
  730 + sto = stopped_at.strftime(FILE_DATE_FMT)
708 731
709 gzip_filename = "%s_%s_%s.tar.gz" % (separator.join(targets), sta, sto) 732 gzip_filename = "%s_%s_%s.tar.gz" % (separator.join(targets), sta, sto)
710 local_gzip_file = join(CACHE_DIR, gzip_filename) 733 local_gzip_file = join(CACHE_DIR, gzip_filename)
@@ -780,9 +803,9 @@ def download_targets_netcdf(targets, params, started_at, stopped_at): @@ -780,9 +803,9 @@ def download_targets_netcdf(targets, params, started_at, stopped_at):
780 log.debug("Creating the NetCDF file '%s'..." % nc_filename) 803 log.debug("Creating the NetCDF file '%s'..." % nc_filename)
781 nc_handle = Dataset(nc_path, "w", format="NETCDF4") 804 nc_handle = Dataset(nc_path, "w", format="NETCDF4")
782 try: 805 try:
783 - nc_handle.description = "TODO" # todo 806 + nc_handle.description = "Model and orbit data for targets" # todo
784 nc_handle.history = "Created " + time.ctime(time.time()) 807 nc_handle.history = "Created " + time.ctime(time.time())
785 - nc_handle.source = "Transplanet (CDDP)" 808 + nc_handle.source = "Heliopropa (CDDP)"
786 available_params = list(PROPERTIES) 809 available_params = list(PROPERTIES)
787 for target in targets_configs: 810 for target in targets_configs:
788 target_slug = target['slug'] 811 target_slug = target['slug']
web/view/layout.html.jinja2
@@ -42,8 +42,9 @@ @@ -42,8 +42,9 @@
42 <!-- Navigation. We hide it in small screens. --> 42 <!-- Navigation. We hide it in small screens. -->
43 <nav class="mdl-navigation mdl-layout--large-screen-only"> 43 <nav class="mdl-navigation mdl-layout--large-screen-only">
44 <a class="mdl-navigation__link" href="https://gitlab.irap.omp.eu/CDPP/SPACEWEATHERONLINE">v{{ version }}</a> 44 <a class="mdl-navigation__link" href="https://gitlab.irap.omp.eu/CDPP/SPACEWEATHERONLINE">v{{ version }}</a>
45 - <a class="mdl-navigation__link" href="https://onlinelibrary.wiley.com/doi/10.1029/2004JA010959/abstract">Model</a>  
46 - <a class="mdl-navigation__link" href="https://ec.europa.eu/programmes/horizon2020/">Horizon 2020</a> 45 + <a class="mdl-navigation__link" href="https://onlinelibrary.wiley.com/doi/10.1029/2004JA010959/abstract">Tao's Model</a>
  46 + <a class="mdl-navigation__link" href="http://planetaryspaceweather-europlanet.irap.omp.eu">Planetary Space Weather Services</a>
  47 +{# <a class="mdl-navigation__link" href="https://ec.europa.eu/programmes/horizon2020/">Horizon 2020</a>#}
47 </nav> 48 </nav>
48 </div> 49 </div>
49 </header> 50 </header>