Commit 952e3d8f07394e67498b318c70f112f833568bba
1 parent
0af09c17
Exists in
master
and in
2 other branches
Move to another spacepy for CDF generation, and clean up the python dependencies.
Showing
2 changed files
with
67 additions
and
47 deletions
Show diff stats
requirements.txt
1 | -## The following requirements were added by pip freeze: | ||
2 | -appdirs==1.4.3 | ||
3 | -click==6.7 | ||
4 | -dateutils==0.6.6 | 1 | +## The following requirements were originally generated with `pip freeze`. |
2 | + | ||
3 | + | ||
4 | +## MAIN DEPENDENCIES | ||
5 | + | ||
5 | Flask==0.12 | 6 | Flask==0.12 |
6 | -h5py==2.3.1 | ||
7 | -itsdangerous==0.24 | ||
8 | Jinja2==2.9.5 | 7 | Jinja2==2.9.5 |
9 | Markdown==2.6.8 | 8 | Markdown==2.6.8 |
10 | MarkupSafe==1.0 | 9 | MarkupSafe==1.0 |
10 | +python-slugify==1.2.4 | ||
11 | + | ||
12 | + | ||
13 | +## SECOND LEVEL DEPS | ||
14 | +# (We could probably comment them out, but hey) | ||
15 | + | ||
16 | +appdirs==1.4.3 | ||
17 | +click==6.7 | ||
18 | +dateutils==0.6.6 | ||
19 | +itsdangerous==0.24 | ||
11 | mglob==0.4 | 20 | mglob==0.4 |
12 | -netCDF4==1.2.7 | ||
13 | -numpy==1.12.0 | ||
14 | -packaging==16.8 | ||
15 | -#pkg-resources==0.0.0 | ||
16 | pyparsing==2.2.0 | 21 | pyparsing==2.2.0 |
17 | -python-slugify==1.2.4 | ||
18 | -PyYAML==3.12 | 22 | +packaging==16.8 |
19 | six==1.10.0 | 23 | six==1.10.0 |
20 | Werkzeug==0.12 | 24 | Werkzeug==0.12 |
21 | -# spacepy==0.7 # requires a lot of system deps, like fortran | ||
22 | -CDF==0.32 | ||
23 | \ No newline at end of file | 25 | \ No newline at end of file |
26 | + | ||
27 | + | ||
28 | +## FILE FORMATS | ||
29 | + | ||
30 | +PyYAML==3.12 | ||
31 | +netCDF4==1.2.7 | ||
32 | +h5py==2.3.1 | ||
33 | +# We only use pycdf from spacepy. | ||
34 | +spacepy==0.1.* | ||
35 | +# spacepy does not like recent numpy versions (1.7 looks ok) | ||
36 | +numpy==1.7.* | ||
37 | + | ||
38 | + | ||
39 | +## WEIRD STUFF | ||
40 | + | ||
41 | +# This version number makes pip yell on the prod server (?) | ||
42 | +#pkg-resources==0.0.0 |
web/run.py
@@ -9,7 +9,6 @@ import random | @@ -9,7 +9,6 @@ import random | ||
9 | import tarfile | 9 | import tarfile |
10 | import time | 10 | import time |
11 | import urllib | 11 | import urllib |
12 | -import cdf | ||
13 | from csv import writer as csv_writer | 12 | from csv import writer as csv_writer |
14 | from math import sqrt | 13 | from math import sqrt |
15 | from os import environ, remove as removefile | 14 | from os import environ, remove as removefile |
@@ -61,6 +60,9 @@ log.addHandler(logHandler) | @@ -61,6 +60,9 @@ log.addHandler(logHandler) | ||
61 | 60 | ||
62 | # HARDCODED CONFIGURATION ##################################################### | 61 | # HARDCODED CONFIGURATION ##################################################### |
63 | 62 | ||
63 | +# Absolute path to the installed CDF library from https://cdf.gsfc.nasa.gov/ | ||
64 | +CDF_LIB = '/usr/local/lib/libcdf' | ||
65 | + | ||
64 | # Absolute path to the data cache directory | 66 | # Absolute path to the data cache directory |
65 | CACHE_DIR = get_path('../cache') | 67 | CACHE_DIR = get_path('../cache') |
66 | 68 | ||
@@ -930,13 +932,15 @@ def download_targets_cdf(targets, started_at, stopped_at): | @@ -930,13 +932,15 @@ def download_targets_cdf(targets, started_at, stopped_at): | ||
930 | 932 | ||
931 | if not isfile(cdf_path): | 933 | if not isfile(cdf_path): |
932 | log.debug("Creating the CDF file '%s'..." % cdf_filename) | 934 | log.debug("Creating the CDF file '%s'..." % cdf_filename) |
935 | + environ['CDF_LIB'] = CDF_LIB | ||
936 | + from spacepy import pycdf | ||
933 | try: | 937 | try: |
934 | - cdf_handle = cdf.archive() | 938 | + cdf_handle = pycdf.CDF(cdf_path, masterpath='') |
935 | description = "Model and orbit data for %s." % \ | 939 | description = "Model and orbit data for %s." % \ |
936 | ', '.join([t['name'] for t in targets_configs]) | 940 | ', '.join([t['name'] for t in targets_configs]) |
937 | - cdf_handle.attributes['Description'] = description | ||
938 | - cdf_handle.attributes['Author'] = "Heliopropa.irap.omp.eu (CDPP)" | ||
939 | - cdf_handle.attributes['Created'] = str(time.ctime(time.time())) | 941 | + cdf_handle.attrs['Description'] = description |
942 | + cdf_handle.attrs['Author'] = "Heliopropa.irap.omp.eu (CDPP)" | ||
943 | + cdf_handle.attrs['Created'] = str(time.ctime(time.time())) | ||
940 | 944 | ||
941 | available_params = list(PROPERTIES) | 945 | available_params = list(PROPERTIES) |
942 | for target in targets_configs: | 946 | for target in targets_configs: |
@@ -945,15 +949,13 @@ def download_targets_cdf(targets, started_at, stopped_at): | @@ -945,15 +949,13 @@ def download_targets_cdf(targets, started_at, stopped_at): | ||
945 | dkeys = sorted(data) | 949 | dkeys = sorted(data) |
946 | 950 | ||
947 | values = [] | 951 | values = [] |
948 | - units = "hours since 1970-01-01 00:00:00" | ||
949 | - calendar = "standard" | ||
950 | for dkey in dkeys: | 952 | for dkey in dkeys: |
951 | - time_as_string = data[dkey][0][:-6] # remove +00:00 tail | ||
952 | - date = datetime.datetime.strptime(time_as_string, FILE_DATE_FMT) | ||
953 | - values.append(date2num(date, units=units, calendar=calendar)) | ||
954 | - k = "%s_time" % target_slug | ||
955 | - cdf_handle[k] = values | ||
956 | - cdf_handle[k].attributes['units'] = units | 953 | + time_str = data[dkey][0][:-6] # remove +00:00 tail |
954 | + date = datetime.datetime.strptime(time_str, FILE_DATE_FMT) | ||
955 | + values.append(date) | ||
956 | + kt = "%s_time" % target_slug | ||
957 | + cdf_handle[kt] = values | ||
958 | + cdf_handle[kt].attrs['FIELDNAM'] = "Time since 0 A.D" | ||
957 | 959 | ||
958 | for param in params: | 960 | for param in params: |
959 | k = "%s_%s" % (target_slug, param) | 961 | k = "%s_%s" % (target_slug, param) |
@@ -962,10 +964,16 @@ def download_targets_cdf(targets, started_at, stopped_at): | @@ -962,10 +964,16 @@ def download_targets_cdf(targets, started_at, stopped_at): | ||
962 | for dkey in dkeys: | 964 | for dkey in dkeys: |
963 | values.append(data[dkey][i]) | 965 | values.append(data[dkey][i]) |
964 | cdf_handle[k] = values | 966 | cdf_handle[k] = values |
965 | - cdf_handle[k].attributes['units'] = PARAMETERS[param]['units'] | ||
966 | - | ||
967 | - k_xhee = "%s_xhee" % target_slug | ||
968 | - k_yhee = "%s_yhee" % target_slug | 967 | + attrs = cdf_handle[k].attrs |
968 | + attrs['UNITS'] = PARAMETERS[param]['units'] | ||
969 | + attrs['LABLAXIS'] = PARAMETERS[param]['name'] | ||
970 | + attrs['FIELDNAM'] = PARAMETERS[param]['title'] | ||
971 | + if values: | ||
972 | + attrs['VALIDMIN'] = min(values) | ||
973 | + attrs['VALIDMAX'] = max(values) | ||
974 | + | ||
975 | + kx = "%s_xhee" % target_slug | ||
976 | + ky = "%s_yhee" % target_slug | ||
969 | values_xhee = [] | 977 | values_xhee = [] |
970 | values_yhee = [] | 978 | values_yhee = [] |
971 | index_x = available_params.index('xhee') | 979 | index_x = available_params.index('xhee') |
@@ -973,23 +981,21 @@ def download_targets_cdf(targets, started_at, stopped_at): | @@ -973,23 +981,21 @@ def download_targets_cdf(targets, started_at, stopped_at): | ||
973 | for dkey in dkeys: | 981 | for dkey in dkeys: |
974 | values_xhee.append(data[dkey][index_x]) | 982 | values_xhee.append(data[dkey][index_x]) |
975 | values_yhee.append(data[dkey][index_y]) | 983 | values_yhee.append(data[dkey][index_y]) |
976 | - cdf_handle[k_xhee] = values_xhee | ||
977 | - cdf_handle[k_yhee] = values_yhee | ||
978 | - cdf_handle[k_xhee].attributes['units'] = 'Au' | ||
979 | - cdf_handle[k_yhee].attributes['units'] = 'Au' | 984 | + cdf_handle[kx] = values_xhee |
985 | + cdf_handle[ky] = values_yhee | ||
986 | + cdf_handle[kx].attrs['UNITS'] = 'Au' | ||
987 | + cdf_handle[ky].attrs['UNITS'] = 'Au' | ||
980 | 988 | ||
981 | log.debug("Writing CDF '%s'..." % cdf_filename) | 989 | log.debug("Writing CDF '%s'..." % cdf_filename) |
982 | - cdf_handle.save(cdf_path) | 990 | + cdf_handle.close() |
991 | + log.debug("Wrote CDF '%s'." % cdf_filename) | ||
983 | 992 | ||
984 | except Exception as e: | 993 | except Exception as e: |
985 | log.error("Failed to generate CDF '%s'." % cdf_filename) | 994 | log.error("Failed to generate CDF '%s'." % cdf_filename) |
986 | - removefile(cdf_path) | 995 | + if isfile(cdf_path): |
996 | + removefile(cdf_path) | ||
987 | raise | 997 | raise |
988 | 998 | ||
989 | - finally: | ||
990 | - # cdf_handle.close() | ||
991 | - pass | ||
992 | - | ||
993 | if not isfile(cdf_path): | 999 | if not isfile(cdf_path): |
994 | abort(500, "No CDF to serve. Looked at '%s'." % cdf_path) | 1000 | abort(500, "No CDF to serve. Looked at '%s'." % cdf_path) |
995 | 1001 | ||
@@ -1080,11 +1086,6 @@ def log_clear(): | @@ -1080,11 +1086,6 @@ def log_clear(): | ||
1080 | # MAIN ######################################################################## | 1086 | # MAIN ######################################################################## |
1081 | 1087 | ||
1082 | if __name__ == "__main__": | 1088 | if __name__ == "__main__": |
1083 | - | ||
1084 | - a = "2015171150000001" | ||
1085 | - | ||
1086 | - | ||
1087 | - | ||
1088 | - # Debug mode on, as the production server does not use this but run.wsgi | 1089 | + # Debug mode is on, as the production server does not use this but run.wsgi |
1089 | extra_files = [get_path('../config.yml')] | 1090 | extra_files = [get_path('../config.yml')] |
1090 | app.run(debug=True, extra_files=extra_files) | 1091 | app.run(debug=True, extra_files=extra_files) |