Commit aa7247d6dad03769723529cfea0fe56213d8d1c2
1 parent
7b5642ae
Exists in
master
and in
2 other branches
Generate a CDF file (not NetCDF)
Bump to v1.0.0-rc4
Showing
5 changed files
with
124 additions
and
9 deletions
Show diff stats
CHANGELOG.md
@@ -7,8 +7,6 @@ | @@ -7,8 +7,6 @@ | ||
7 | - [ ] CRON statements to call the cache cleanup and warmup | 7 | - [ ] CRON statements to call the cache cleanup and warmup |
8 | - [ ] Cache warmup (generate for today's default interval) `/cache/warmup` | 8 | - [ ] Cache warmup (generate for today's default interval) `/cache/warmup` |
9 | - [ ] Give the future data another color | 9 | - [ ] Give the future data another color |
10 | -- [ ] Sort times series by closeness to the sun | ||
11 | -- [ ] Generate a CDF file (not NetCDF) | ||
12 | 10 | ||
13 | An heliospheric propagation 1D MHD model for solar wind prediction at planets, probes and comets. | 11 | An heliospheric propagation 1D MHD model for solar wind prediction at planets, probes and comets. |
14 | 12 | ||
@@ -16,6 +14,8 @@ An heliospheric propagation 1D MHD model for solar wind prediction at planets, p | @@ -16,6 +14,8 @@ An heliospheric propagation 1D MHD model for solar wind prediction at planets, p | ||
16 | 14 | ||
17 | ## 1.0.0-rc4 | 15 | ## 1.0.0-rc4 |
18 | 16 | ||
17 | +- [x] Sort times series by closeness to the sun | ||
18 | +- [x] Generate a CDF file (not NetCDF) | ||
19 | - [x] Normalize time interval for time series | 19 | - [x] Normalize time interval for time series |
20 | - [x] Make the footer images clickable | 20 | - [x] Make the footer images clickable |
21 | - [x] Highlight the visits counter | 21 | - [x] Highlight the visits counter |
VERSION
requirements.txt
@@ -18,3 +18,5 @@ python-slugify==1.2.4 | @@ -18,3 +18,5 @@ python-slugify==1.2.4 | ||
18 | PyYAML==3.12 | 18 | PyYAML==3.12 |
19 | six==1.10.0 | 19 | six==1.10.0 |
20 | Werkzeug==0.12 | 20 | Werkzeug==0.12 |
21 | +# spacepy==0.7 # requires a lot of system deps, like fortran | ||
22 | +CDF==0.32 | ||
21 | \ No newline at end of file | 23 | \ No newline at end of file |
web/run.py
@@ -9,6 +9,7 @@ import random | @@ -9,6 +9,7 @@ import random | ||
9 | import tarfile | 9 | import tarfile |
10 | import time | 10 | import time |
11 | import urllib | 11 | import urllib |
12 | +import cdf | ||
12 | from csv import writer as csv_writer | 13 | from csv import writer as csv_writer |
13 | from math import sqrt | 14 | from math import sqrt |
14 | from os import environ, remove as removefile | 15 | from os import environ, remove as removefile |
@@ -112,7 +113,7 @@ PARAMETERS = { | @@ -112,7 +113,7 @@ PARAMETERS = { | ||
112 | 'slug': 'dens', | 113 | 'slug': 'dens', |
113 | 'name': 'Density', | 114 | 'name': 'Density', |
114 | 'title': 'The density N.', | 115 | 'title': 'The density N.', |
115 | - 'units': u'cm⁻³', | 116 | + 'units': 'cm^-3', |
116 | 'active': False, | 117 | 'active': False, |
117 | 'position': 50, | 118 | 'position': 50, |
118 | }, | 119 | }, |
@@ -438,7 +439,7 @@ def get_data_for_target(target_config, started_at, stopped_at): | @@ -438,7 +439,7 @@ def get_data_for_target(target_config, started_at, stopped_at): | ||
438 | _s1 = min(_s1, _sto) | 439 | _s1 = min(_s1, _sto) |
439 | else: | 440 | else: |
440 | _s1 = _sto | 441 | _s1 = _sto |
441 | - return _s1, _s0 | 442 | + return _s0, _s1 |
442 | 443 | ||
443 | precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish | 444 | precision = "%Y-%m-%dT%H" # model and orbits times are only equal-ish |
444 | orbit_data = {} # keys are datetime as str, values arrays of XY | 445 | orbit_data = {} # keys are datetime as str, values arrays of XY |
@@ -770,7 +771,7 @@ def download_targets_tarball(targets, started_at, stopped_at): | @@ -770,7 +771,7 @@ def download_targets_tarball(targets, started_at, stopped_at): | ||
770 | def download_targets_netcdf(targets, params, started_at, stopped_at): | 771 | def download_targets_netcdf(targets, params, started_at, stopped_at): |
771 | """ | 772 | """ |
772 | Grab data and orbit data for the specified `target`, | 773 | Grab data and orbit data for the specified `target`, |
773 | - rearrange it and return it as a CSV file. | 774 | + rearrange it and return it as a NetCDF file. |
774 | `started_at` and `stopped_at` are expected to be UTC. | 775 | `started_at` and `stopped_at` are expected to be UTC. |
775 | 776 | ||
776 | targets: string list of targets' slugs, separated by `-`. | 777 | targets: string list of targets' slugs, separated by `-`. |
@@ -884,6 +885,117 @@ def download_targets_netcdf(targets, params, started_at, stopped_at): | @@ -884,6 +885,117 @@ def download_targets_netcdf(targets, params, started_at, stopped_at): | ||
884 | return send_from_directory(CACHE_DIR, nc_filename) | 885 | return send_from_directory(CACHE_DIR, nc_filename) |
885 | 886 | ||
886 | 887 | ||
888 | +@app.route("/<targets>_<started_at>_<stopped_at>.cdf") | ||
889 | +def download_targets_cdf(targets, started_at, stopped_at): | ||
890 | + """ | ||
891 | + Grab data and orbit data for the specified `target`, | ||
892 | + rearrange it and return it as a CDF file. | ||
893 | + `started_at` and `stopped_at` are expected to be UTC. | ||
894 | + | ||
895 | + targets: string list of targets' slugs, separated by `-`. | ||
896 | + params: string list of targets' parameters, separated by `-`. | ||
897 | + """ | ||
898 | + separator = '-' # /!\ this char should never be in target's slugs | ||
899 | + targets = targets.split(separator) | ||
900 | + targets.sort() | ||
901 | + targets_configs = [] | ||
902 | + for target in targets: | ||
903 | + if not target: | ||
904 | + abort(400, "Invalid targets format : `%s`." % targets) | ||
905 | + targets_configs.append(get_target_config(target)) | ||
906 | + if 0 == len(targets_configs): | ||
907 | + abort(400, "No valid targets specified. What are you doing?") | ||
908 | + | ||
909 | + params = PARAMETERS.keys() | ||
910 | + # params = params.split(separator) | ||
911 | + # params.sort() | ||
912 | + # if 0 == len(params): | ||
913 | + # abort(400, "No valid parameters specified. What are you doing?") | ||
914 | + # if not is_list_in_list(params, PARAMETERS.keys()): | ||
915 | + # abort(400, "Some parameters are not recognized in '%s'." % str(params)) | ||
916 | + | ||
917 | + try: | ||
918 | + started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT) | ||
919 | + except: | ||
920 | + abort(400, "Invalid started_at parameter : '%s'." % started_at) | ||
921 | + try: | ||
922 | + stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT) | ||
923 | + except: | ||
924 | + abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at) | ||
925 | + sta = started_at.strftime(FILE_DATE_FMT) | ||
926 | + sto = stopped_at.strftime(FILE_DATE_FMT) | ||
927 | + | ||
928 | + cdf_filename = "%s_%s_%s.cdf" % (separator.join(targets), sta, sto) | ||
929 | + cdf_path = join(CACHE_DIR, cdf_filename) | ||
930 | + | ||
931 | + if not isfile(cdf_path): | ||
932 | + log.debug("Creating the CDF file '%s'..." % cdf_filename) | ||
933 | + try: | ||
934 | + cdf_handle = cdf.archive() | ||
935 | + description = "Model and orbit data for %s." % \ | ||
936 | + ', '.join([t['name'] for t in targets_configs]) | ||
937 | + cdf_handle.attributes['Description'] = description | ||
938 | + cdf_handle.attributes['Author'] = "Heliopropa.irap.omp.eu (CDPP)" | ||
939 | + cdf_handle.attributes['Created'] = str(time.ctime(time.time())) | ||
940 | + | ||
941 | + available_params = list(PROPERTIES) | ||
942 | + for target in targets_configs: | ||
943 | + target_slug = target['slug'] | ||
944 | + data = get_data_for_target(target, started_at, stopped_at) | ||
945 | + dkeys = sorted(data) | ||
946 | + | ||
947 | + values = [] | ||
948 | + units = "hours since 1970-01-01 00:00:00" | ||
949 | + calendar = "standard" | ||
950 | + for dkey in dkeys: | ||
951 | + time_as_string = data[dkey][0][:-6] # remove +00:00 tail | ||
952 | + date = datetime.datetime.strptime(time_as_string, FILE_DATE_FMT) | ||
953 | + values.append(date2num(date, units=units, calendar=calendar)) | ||
954 | + k = "%s_time" % target_slug | ||
955 | + cdf_handle[k] = values | ||
956 | + cdf_handle[k].attributes['units'] = units | ||
957 | + | ||
958 | + for param in params: | ||
959 | + k = "%s_%s" % (target_slug, param) | ||
960 | + values = [] | ||
961 | + i = available_params.index(param) | ||
962 | + for dkey in dkeys: | ||
963 | + values.append(data[dkey][i]) | ||
964 | + cdf_handle[k] = values | ||
965 | + cdf_handle[k].attributes['units'] = PARAMETERS[param]['units'] | ||
966 | + | ||
967 | + k_xhee = "%s_xhee" % target_slug | ||
968 | + k_yhee = "%s_yhee" % target_slug | ||
969 | + values_xhee = [] | ||
970 | + values_yhee = [] | ||
971 | + index_x = available_params.index('xhee') | ||
972 | + index_y = available_params.index('yhee') | ||
973 | + for dkey in dkeys: | ||
974 | + values_xhee.append(data[dkey][index_x]) | ||
975 | + values_yhee.append(data[dkey][index_y]) | ||
976 | + cdf_handle[k_xhee] = values_xhee | ||
977 | + cdf_handle[k_yhee] = values_yhee | ||
978 | + cdf_handle[k_xhee].attributes['units'] = 'Au' | ||
979 | + cdf_handle[k_yhee].attributes['units'] = 'Au' | ||
980 | + | ||
981 | + log.debug("Writing CDF '%s'..." % cdf_filename) | ||
982 | + cdf_handle.save(cdf_path) | ||
983 | + | ||
984 | + except Exception as e: | ||
985 | + log.error("Failed to generate CDF '%s'." % cdf_filename) | ||
986 | + removefile(cdf_path) | ||
987 | + raise | ||
988 | + | ||
989 | + finally: | ||
990 | + # cdf_handle.close() | ||
991 | + pass | ||
992 | + | ||
993 | + if not isfile(cdf_path): | ||
994 | + abort(500, "No CDF to serve. Looked at '%s'." % cdf_path) | ||
995 | + | ||
996 | + return send_from_directory(CACHE_DIR, cdf_filename) | ||
997 | + | ||
998 | + | ||
887 | # API ######################################################################### | 999 | # API ######################################################################### |
888 | 1000 | ||
889 | @app.route("/cache/clear") | 1001 | @app.route("/cache/clear") |
web/view/home.html.jinja2
@@ -538,8 +538,8 @@ var configuration = { | @@ -538,8 +538,8 @@ var configuration = { | ||
538 | orbits_container: '#orbits', | 538 | orbits_container: '#orbits', |
539 | api: { | 539 | api: { |
540 | 'data_for_interval': "{{ request.url_root }}<target>_<started_at>_<stopped_at>.csv", | 540 | 'data_for_interval': "{{ request.url_root }}<target>_<started_at>_<stopped_at>.csv", |
541 | - 'download': "{{ request.url_root }}<targets>_<started_at>_<stopped_at>.nc", | ||
542 | - 'samp': "{{ request.url_root }}<targets>_<params>_<started_at>_<stopped_at>.nc" | 541 | + 'download': "{{ request.url_root }}<targets>_<started_at>_<stopped_at>.cdf", |
542 | + 'samp': "{{ request.url_root }}<targets>_<started_at>_<stopped_at>.cdf" | ||
543 | }, | 543 | }, |
544 | sun: { | 544 | sun: { |
545 | img: '{{ static('img/sun_128.png') }}' | 545 | img: '{{ static('img/sun_128.png') }}' |
@@ -623,6 +623,7 @@ jQuery().ready(function($){ | @@ -623,6 +623,7 @@ jQuery().ready(function($){ | ||
623 | }); | 623 | }); |
624 | $('#download').on("click", function(e){ | 624 | $('#download').on("click", function(e){ |
625 | var url = sw.buildDownloadUrl(); | 625 | var url = sw.buildDownloadUrl(); |
626 | + console.info("Downloading " + url); | ||
626 | $.ajax({ | 627 | $.ajax({ |
627 | type: 'GET', | 628 | type: 'GET', |
628 | url: url, | 629 | url: url, |
@@ -677,7 +678,7 @@ jQuery().ready(function($){ | @@ -677,7 +678,7 @@ jQuery().ready(function($){ | ||
677 | } | 678 | } |
678 | var name = sw.buildSampName(); | 679 | var name = sw.buildSampName(); |
679 | var url = sw.buildSampUrl(); | 680 | var url = sw.buildSampUrl(); |
680 | - console.log(name, url); | 681 | + console.info("Trying to activate SAMP…", name, url); |
681 | connector.runWithConnection(function (connection) { | 682 | connector.runWithConnection(function (connection) { |
682 | var msg = new samp.Message("table.load.cdf", { | 683 | var msg = new samp.Message("table.load.cdf", { |
683 | "name": name, | 684 | "name": name, |