Commit c074c0295d513ef2af5db5cd9858fe99c9d5048f

Authored by hitier
2 parents 8b29751b c269db3f
Exists in rhitier-dev and in 1 other branch DEV

Merge branch 'ToPython3' into dev

Showing 2 changed files with 66 additions and 56 deletions   Show diff stats
web/run.py 100755 → 100644
1 # coding=utf-8 1 # coding=utf-8
2 2
3 -import StringIO 3 +from io import StringIO
4 import datetime 4 import datetime
5 import gzip 5 import gzip
6 import json 6 import json
@@ -8,7 +8,7 @@ import logging @@ -8,7 +8,7 @@ import logging
8 import random 8 import random
9 import tarfile 9 import tarfile
10 import time 10 import time
11 -import urllib 11 +import urllib.request as urllib_request
12 import requests 12 import requests
13 import re # regex 13 import re # regex
14 import numpy 14 import numpy
@@ -25,8 +25,13 @@ from dateutil import parser as dateparser @@ -25,8 +25,13 @@ from dateutil import parser as dateparser
25 from flask import Flask 25 from flask import Flask
26 from flask import request 26 from flask import request
27 from flask import url_for, send_from_directory, abort as abort_flask 27 from flask import url_for, send_from_directory, abort as abort_flask
28 -from jinja2 import Environment, FileSystemLoader, Markup  
29 -from yaml import load as yaml_load 28 +from jinja2 import Environment, FileSystemLoader
  29 +from jinja2.utils import markupsafe
  30 +from yaml import load as yaml_load, dump
  31 +try:
  32 + from yaml import CLoader as Loader
  33 +except ImportError:
  34 + from yaml import Loader
30 from netCDF4 import Dataset, date2num 35 from netCDF4 import Dataset, date2num
31 36
32 37
@@ -48,7 +53,7 @@ with open(get_path('../VERSION'), 'r') as version_file: @@ -48,7 +53,7 @@ with open(get_path('../VERSION'), 'r') as version_file:
48 53
49 # CONFIG 54 # CONFIG
50 with open(get_path('../config.yml'), 'r') as config_file: 55 with open(get_path('../config.yml'), 'r') as config_file:
51 - config = yaml_load(config_file.read()) 56 + config = yaml_load(config_file.read(), Loader=Loader)
52 57
53 FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S" 58 FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S"
54 MOMENT_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" 59 MOMENT_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
@@ -148,13 +153,11 @@ PARAMETERS = { @@ -148,13 +153,11 @@ PARAMETERS = {
148 }, 153 },
149 } 154 }
150 155
151 -  
152 # SETUP ENVIRONMENT ########################################################### 156 # SETUP ENVIRONMENT ###########################################################
153 157
154 environ['SPACEPY'] = CACHE_DIR 158 environ['SPACEPY'] = CACHE_DIR
155 environ['CDF_LIB'] = CDF_LIB 159 environ['CDF_LIB'] = CDF_LIB
156 160
157 -  
158 # SETUP FLASK ENGINE ########################################################## 161 # SETUP FLASK ENGINE ##########################################################
159 162
160 app = Flask(__name__, root_path=THIS_DIRECTORY) 163 app = Flask(__name__, root_path=THIS_DIRECTORY)
@@ -211,20 +214,20 @@ def markdown_filter(value, nl2br=False, p=True): @@ -211,20 +214,20 @@ def markdown_filter(value, nl2br=False, p=True):
211 214
212 215
213 _js_escapes = { 216 _js_escapes = {
214 - '\\': '\\u005C',  
215 - '\'': '\\u0027',  
216 - '"': '\\u0022',  
217 - '>': '\\u003E',  
218 - '<': '\\u003C',  
219 - '&': '\\u0026',  
220 - '=': '\\u003D',  
221 - '-': '\\u002D',  
222 - ';': '\\u003B',  
223 - u'\u2028': '\\u2028',  
224 - u'\u2029': '\\u2029' 217 + '\\': '\\u005C',
  218 + '\'': '\\u0027',
  219 + '"': '\\u0022',
  220 + '>': '\\u003E',
  221 + '<': '\\u003C',
  222 + '&': '\\u0026',
  223 + '=': '\\u003D',
  224 + '-': '\\u002D',
  225 + ';': '\\u003B',
  226 + u'\u2028': '\\u2028',
  227 + u'\u2029': '\\u2029'
225 } 228 }
226 # Escape every ASCII character with a value less than 32. 229 # Escape every ASCII character with a value less than 32.
227 -_js_escapes.update(('%c' % z, '\\u%04X' % z) for z in xrange(32)) 230 +_js_escapes.update(('%c' % z, '\\u%04X' % z) for z in range(32))
228 231
229 232
230 def escapejs_filter(value): 233 def escapejs_filter(value):
@@ -235,7 +238,7 @@ def escapejs_filter(value): @@ -235,7 +238,7 @@ def escapejs_filter(value):
235 else: 238 else:
236 escaped.append(letter) 239 escaped.append(letter)
237 240
238 - return Markup("".join(escaped)) 241 + return markupsafe.Markup("".join(escaped))
239 242
240 tpl_engine = Environment(loader=FileSystemLoader([get_path('view')]), 243 tpl_engine = Environment(loader=FileSystemLoader([get_path('view')]),
241 trim_blocks=True, 244 trim_blocks=True,
@@ -275,7 +278,7 @@ def render_view(view, context=None): @@ -275,7 +278,7 @@ def render_view(view, context=None):
275 """ 278 """
276 context = {} if context is None else context 279 context = {} if context is None else context
277 return tpl_engine.get_template(view).render( 280 return tpl_engine.get_template(view).render(
278 - dict(tpl_global_vars.items() + context.items()) 281 + dict(list(tpl_global_vars.items()) + list(context.items()))
279 ) 282 )
280 283
281 284
@@ -314,7 +317,7 @@ def round_time(dt=None, round_to=60): @@ -314,7 +317,7 @@ def round_time(dt=None, round_to=60):
314 dt = datetime.datetime.now() 317 dt = datetime.datetime.now()
315 seconds = (dt.replace(tzinfo=None) - dt.min).seconds 318 seconds = (dt.replace(tzinfo=None) - dt.min).seconds
316 rounding = (seconds + round_to / 2) // round_to * round_to 319 rounding = (seconds + round_to / 2) // round_to * round_to
317 - return dt + datetime.timedelta(0, rounding-seconds, -dt.microsecond) 320 + return dt + datetime.timedelta(0, rounding - seconds, -dt.microsecond)
318 321
319 322
320 def datetime_from_list(time_list): 323 def datetime_from_list(time_list):
@@ -459,10 +462,10 @@ ORDER BY time_min, granule_gid @@ -459,10 +462,10 @@ ORDER BY time_min, granule_gid
459 try: 462 try:
460 response = requests.post(api_url, { 463 response = requests.post(api_url, {
461 'REQUEST': 'doQuery', 464 'REQUEST': 'doQuery',
462 - 'LANG': 'ADQL',  
463 - 'QUERY': query, 465 + 'LANG': 'ADQL',
  466 + 'QUERY': query,
464 'TIMEOUT': '30', 467 'TIMEOUT': '30',
465 - 'FORMAT': 'VOTable/td' 468 + 'FORMAT': 'VOTable/td'
466 }) 469 })
467 470
468 response_xml = response.text 471 response_xml = response.text
@@ -516,7 +519,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): @@ -516,7 +519,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at):
516 remote_gzip_files = [] 519 remote_gzip_files = []
517 while not success and retries < 3: 520 while not success and retries < 3:
518 try: 521 try:
519 - response = urllib.urlopen(url) 522 + response = urllib_request.urlopen(url)
520 remote_gzip_files = json.loads(response.read()) 523 remote_gzip_files = json.loads(response.read())
521 if not remote_gzip_files: 524 if not remote_gzip_files:
522 raise Exception("Failed to fetch data at '%s'." % url) 525 raise Exception("Failed to fetch data at '%s'." % url)
@@ -529,7 +532,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): @@ -529,7 +532,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at):
529 # raise Exception("API says it's out of time at '%s'." % url) 532 # raise Exception("API says it's out of time at '%s'." % url)
530 success = True 533 success = True
531 except Exception as e: 534 except Exception as e:
532 - log.warn("Failed (%d/3) '%s' : %s" % (retries+1, url, e.message)) 535 + log.warning("Failed (%d/3) '%s' : %s" % (retries + 1, url, e.message))
533 remote_gzip_files = [] 536 remote_gzip_files = []
534 errors.append(e) 537 errors.append(e)
535 finally: 538 finally:
@@ -537,9 +540,9 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): @@ -537,9 +540,9 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at):
537 if not remote_gzip_files: 540 if not remote_gzip_files:
538 log.error("Failed to retrieve data from AMDA.") 541 log.error("Failed to retrieve data from AMDA.")
539 log.error("Failed to fetch gzip files list for %s at '%s' : %s" % 542 log.error("Failed to fetch gzip files list for %s at '%s' : %s" %
540 - (orbiter, url, errors)) 543 + (orbiter, url, errors))
541 abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" % 544 abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" %
542 - (orbiter, url, errors)) 545 + (orbiter, url, errors))
543 else: 546 else:
544 remote_gzip_files = list(set(remote_gzip_files)) 547 remote_gzip_files = list(set(remote_gzip_files))
545 548
@@ -558,7 +561,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): @@ -558,7 +561,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at):
558 local_gzip_files.append(local_gzip_file) 561 local_gzip_files.append(local_gzip_file)
559 if not isfile(local_gzip_file): 562 if not isfile(local_gzip_file):
560 log.debug("Retrieving '%s'..." % local_gzip_file) 563 log.debug("Retrieving '%s'..." % local_gzip_file)
561 - urllib.urlretrieve(remote_gzip_file, local_gzip_file) 564 + urllib_request.urlretrieve(remote_gzip_file, local_gzip_file)
562 log.debug("Retrieved '%s'." % local_gzip_file) 565 log.debug("Retrieved '%s'." % local_gzip_file)
563 else: 566 else:
564 log.debug("Found '%s' in the cache." % local_gzip_file) 567 log.debug("Found '%s' in the cache." % local_gzip_file)
@@ -708,13 +711,17 @@ def get_data_for_target(target_config, input_slug, @@ -708,13 +711,17 @@ def get_data_for_target(target_config, input_slug,
708 (target_config['name'], orbit_file)) 711 (target_config['name'], orbit_file))
709 for ltime, datum_hee in zip(times, data_hee): 712 for ltime, datum_hee in zip(times, data_hee):
710 try: 713 try:
  714 + try:
  715 + ltime = [str(i, 'UTF8') for i in ltime]
  716 + except Exception as e:
  717 + print(e)
711 dtime = datetime_from_list(ltime) 718 dtime = datetime_from_list(ltime)
712 except Exception: 719 except Exception:
713 - log.error("Failed to parse time from %s." % ltime) 720 + log.error("Failed to parse time from get__data_for_target %s." % ltime)
714 raise 721 raise
715 # Keep only what's in the interval 722 # Keep only what's in the interval
716 if s0 <= dtime <= s1: 723 if s0 <= dtime <= s1:
717 - dkey = round_time(dtime, 60*60).strftime(precision) 724 + dkey = round_time(dtime, 60 * 60).strftime(precision)
718 orbit_data[dkey] = datum_hee 725 orbit_data[dkey] = datum_hee
719 cdf_handle.close() 726 cdf_handle.close()
720 727
@@ -730,8 +737,8 @@ def get_data_for_target(target_config, input_slug, @@ -730,8 +737,8 @@ def get_data_for_target(target_config, input_slug,
730 nc_keys.update(model['parameters']) 737 nc_keys.update(model['parameters'])
731 738
732 if len(model_files) == 0: 739 if len(model_files) == 0:
733 - log.warn("No model data for '%s' '%s'."  
734 - % (target_config['slug'], model['slug'])) 740 + log.warning("No model data for '%s' '%s'."
  741 + % (target_config['slug'], model['slug']))
735 742
736 for model_file in model_files: 743 for model_file in model_files:
737 log.debug("%s: opening model NETCDF4 '%s'..." % 744 log.debug("%s: opening model NETCDF4 '%s'..." %
@@ -769,6 +776,10 @@ def get_data_for_target(target_config, input_slug, @@ -769,6 +776,10 @@ def get_data_for_target(target_config, input_slug,
769 in zip(times, data_v, data_b, data_t, data_n, data_p, data_a): 776 in zip(times, data_v, data_b, data_t, data_n, data_p, data_a):
770 777
771 try: 778 try:
  779 + try:
  780 + ltime = [str(i, 'UTF8') for i in ltime]
  781 + except Exception as e:
  782 + print(e)
772 dtime = datetime_from_list(ltime) 783 dtime = datetime_from_list(ltime)
773 except Exception: 784 except Exception:
774 log.error("Failed to parse time from %s." % ltime) 785 log.error("Failed to parse time from %s." % ltime)
@@ -777,7 +788,7 @@ def get_data_for_target(target_config, input_slug, @@ -777,7 +788,7 @@ def get_data_for_target(target_config, input_slug,
777 if not (s0 <= dtime <= s1): 788 if not (s0 <= dtime <= s1):
778 continue # Cull what's out of the interval 789 continue # Cull what's out of the interval
779 790
780 - droundtime = round_time(dtime, 60*60) 791 + droundtime = round_time(dtime, 60 * 60)
781 dkey = droundtime.strftime(precision) 792 dkey = droundtime.strftime(precision)
782 793
783 x_hee = None 794 x_hee = None
@@ -849,7 +860,7 @@ def get_data_for_target(target_config, input_slug, @@ -849,7 +860,7 @@ def get_data_for_target(target_config, input_slug,
849 def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): 860 def generate_csv_contents(target_slug, input_slug, started_at, stopped_at):
850 target_config = get_target_config(target_slug) 861 target_config = get_target_config(target_slug)
851 log.debug("Crunching CSV contents for '%s'..." % target_config['name']) 862 log.debug("Crunching CSV contents for '%s'..." % target_config['name'])
852 - si = StringIO.StringIO() 863 + si = StringIO()
853 cw = csv_writer(si) 864 cw = csv_writer(si)
854 cw.writerow(PROPERTIES) 865 cw.writerow(PROPERTIES)
855 866
@@ -901,8 +912,9 @@ def generate_csv_file_if_needed(target_slug, input_slug, @@ -901,8 +912,9 @@ def generate_csv_file_if_needed(target_slug, input_slug,
901 for trace in extract_tb(exc_traceback): 912 for trace in extract_tb(exc_traceback):
902 log.error(trace) 913 log.error(trace)
903 if isfile(local_csv_file): 914 if isfile(local_csv_file):
904 - log.warn("Removing failed CSV '%s'..." % local_csv_file) 915 + log.warning("Removing failed CSV '%s'..." % local_csv_file)
905 removefile(local_csv_file) 916 removefile(local_csv_file)
  917 + # pprint(e)
906 abort(500, "Failed creating CSV '%s' : %s" % (filename, e)) 918 abort(500, "Failed creating CSV '%s' : %s" % (filename, e))
907 919
908 920
@@ -1057,7 +1069,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): @@ -1057,7 +1069,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at):
1057 validates_any_constraint = True 1069 validates_any_constraint = True
1058 for constraint in constraints: 1070 for constraint in constraints:
1059 validates_constraint = True 1071 validates_constraint = True
1060 - for key, possible_values in constraint.iteritems(): 1072 + for key, possible_values in iter(constraint.items()):
1061 actual_value = json_datum[_get_index_of_key( 1073 actual_value = json_datum[_get_index_of_key(
1062 json_data, key 1074 json_data, key
1063 )] 1075 )]
@@ -1084,7 +1096,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): @@ -1084,7 +1096,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at):
1084 1096
1085 catalog_layers[config_layer['slug']].append({ 1097 catalog_layers[config_layer['slug']].append({
1086 'start': start_time.strftime(MOMENT_DATE_FMT), 1098 'start': start_time.strftime(MOMENT_DATE_FMT),
1087 - 'stop': stop_time.strftime(MOMENT_DATE_FMT), 1099 + 'stop': stop_time.strftime(MOMENT_DATE_FMT),
1088 }) 1100 })
1089 1101
1090 return catalog_layers 1102 return catalog_layers
@@ -1149,7 +1161,7 @@ def favicon(): # we want it served from the root, not from static/ @@ -1149,7 +1161,7 @@ def favicon(): # we want it served from the root, not from static/
1149 def home(): 1161 def home():
1150 increment_hit_counter() 1162 increment_hit_counter()
1151 parameters = PARAMETERS.values() 1163 parameters = PARAMETERS.values()
1152 - parameters.sort(key=lambda x: x['position']) 1164 + parameters = sorted(parameters, key=lambda x: x['position'])
1153 input_slug = get_input_slug_from_query() 1165 input_slug = get_input_slug_from_query()
1154 targets = [t for t in config['targets'] if not t['locked']] 1166 targets = [t for t in config['targets'] if not t['locked']]
1155 started_at, stopped_at = get_interval_from_query() 1167 started_at, stopped_at = get_interval_from_query()
@@ -1165,9 +1177,9 @@ def home(): @@ -1165,9 +1177,9 @@ def home():
1165 'started_at': started_at, 1177 'started_at': started_at,
1166 'stopped_at': stopped_at, 1178 'stopped_at': stopped_at,
1167 'planets': [s for s in config['targets'] if s['type'] == 'planet'], 1179 'planets': [s for s in config['targets'] if s['type'] == 'planet'],
1168 - 'probes': [s for s in config['targets'] if s['type'] == 'probe'],  
1169 - 'comets': [s for s in config['targets'] if s['type'] == 'comet'],  
1170 - 'visits': get_hit_counter(), 1180 + 'probes': [s for s in config['targets'] if s['type'] == 'probe'],
  1181 + 'comets': [s for s in config['targets'] if s['type'] == 'comet'],
  1182 + 'visits': get_hit_counter(),
1171 }) 1183 })
1172 1184
1173 1185
@@ -1235,7 +1247,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): @@ -1235,7 +1247,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at):
1235 """ 1247 """
1236 separator = '-' 1248 separator = '-'
1237 targets = targets.split(separator) 1249 targets = targets.split(separator)
1238 - targets.sort() 1250 + targets.sorty()
1239 targets_configs = [] 1251 targets_configs = []
1240 for target in targets: 1252 for target in targets:
1241 if not target: 1253 if not target:
@@ -1307,7 +1319,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): @@ -1307,7 +1319,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at):
1307 """ 1319 """
1308 separator = '-' # /!\ this char should never be in target's slugs 1320 separator = '-' # /!\ this char should never be in target's slugs
1309 targets = targets.split(separator) 1321 targets = targets.split(separator)
1310 - targets.sort() 1322 + targets.sorty()
1311 targets_configs = [] 1323 targets_configs = []
1312 for target in targets: 1324 for target in targets:
1313 if not target: 1325 if not target:
@@ -1317,7 +1329,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): @@ -1317,7 +1329,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at):
1317 abort(400, "No valid targets specified. What are you doing?") 1329 abort(400, "No valid targets specified. What are you doing?")
1318 1330
1319 params = params.split(separator) 1331 params = params.split(separator)
1320 - params.sort() 1332 + params.sorty()
1321 if 0 == len(params): 1333 if 0 == len(params):
1322 abort(400, "No valid parameters specified. What are you doing?") 1334 abort(400, "No valid parameters specified. What are you doing?")
1323 if not is_list_in_list(params, PARAMETERS.keys()): 1335 if not is_list_in_list(params, PARAMETERS.keys()):
@@ -1359,7 +1371,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): @@ -1359,7 +1371,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at):
1359 started_at=started_at, stopped_at=stopped_at 1371 started_at=started_at, stopped_at=stopped_at
1360 ) 1372 )
1361 dkeys = sorted(data) 1373 dkeys = sorted(data)
1362 - dimension = 'dim_'+target_slug 1374 + dimension = 'dim_' + target_slug
1363 nc_handle.createDimension(dimension, len(dkeys)) 1375 nc_handle.createDimension(dimension, len(dkeys))
1364 1376
1365 # TIME # 1377 # TIME #
@@ -1432,7 +1444,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): @@ -1432,7 +1444,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at):
1432 """ 1444 """
1433 separator = '-' # /!\ this char should never be in target's slugs 1445 separator = '-' # /!\ this char should never be in target's slugs
1434 targets = targets.split(separator) 1446 targets = targets.split(separator)
1435 - targets.sort() 1447 + targets.sorty()
1436 targets_configs = [] 1448 targets_configs = []
1437 for target in targets: 1449 for target in targets:
1438 if not target: 1450 if not target:
@@ -1552,7 +1564,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): @@ -1552,7 +1564,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at):
1552 else: 1564 else:
1553 values_xhee.append(0) 1565 values_xhee.append(0)
1554 values_yhee.append(0) 1566 values_yhee.append(0)
1555 - log.warn("Orbit data for %s has NaNs." % target_slug) 1567 + log.warning("Orbit data for %s has NaNs." % target_slug)
1556 cdf_handle[kx] = values_xhee 1568 cdf_handle[kx] = values_xhee
1557 cdf_handle[ky] = values_yhee 1569 cdf_handle[ky] = values_yhee
1558 cdf_handle[kx].attrs['UNITS'] = 'Au' 1570 cdf_handle[kx].attrs['UNITS'] = 'Au'
@@ -1599,11 +1611,11 @@ def download_auroral_catalog_csv(target): @@ -1599,11 +1611,11 @@ def download_auroral_catalog_csv(target):
1599 header = ('time_min', 'time_max', 'thumbnail_url', 'external_link') 1611 header = ('time_min', 'time_max', 'thumbnail_url', 'external_link')
1600 if len(emissions): 1612 if len(emissions):
1601 header = emissions[0].keys() 1613 header = emissions[0].keys()
1602 - si = StringIO.StringIO() 1614 + si = StringIO()
1603 cw = csv_dict_writer(si, fieldnames=header) 1615 cw = csv_dict_writer(si, fieldnames=header)
1604 cw.writeheader() 1616 cw.writeheader()
1605 # 'time_min', 'time_max', 'thumbnail_url', 'external_link' 1617 # 'time_min', 'time_max', 'thumbnail_url', 'external_link'
1606 - #cw.writerow(head) 1618 + # cw.writerow(head)
1607 1619
1608 log.debug("Writing auroral emissions CSV for %s..." % tc['name']) 1620 log.debug("Writing auroral emissions CSV for %s..." % tc['name'])
1609 cw.writerows(emissions) 1621 cw.writerows(emissions)
@@ -1617,8 +1629,6 @@ def download_auroral_catalog_csv(target): @@ -1617,8 +1629,6 @@ def download_auroral_catalog_csv(target):
1617 # return send_from_directory(CACHE_DIR, filename) 1629 # return send_from_directory(CACHE_DIR, filename)
1618 1630
1619 1631
1620 -  
1621 -  
1622 @app.route("/test/auroral/<target>") 1632 @app.route("/test/auroral/<target>")
1623 def test_auroral_emissions(target): 1633 def test_auroral_emissions(target):
1624 tc = validate_tap_target_config(target) 1634 tc = validate_tap_target_config(target)
@@ -1666,7 +1676,7 @@ def cache_warmup(): @@ -1666,7 +1676,7 @@ def cache_warmup():
1666 1676
1667 targets = get_active_targets() 1677 targets = get_active_targets()
1668 targets_slugs = [target['slug'] for target in targets] 1678 targets_slugs = [target['slug'] for target in targets]
1669 - targets_slugs.sort() 1679 + targets_slugs.sorty()
1670 1680
1671 update_spacepy() 1681 update_spacepy()
1672 for target in targets: 1682 for target in targets:
@@ -1704,7 +1714,7 @@ def log_clear(): @@ -1704,7 +1714,7 @@ def log_clear():
1704 # cdf_to_inspect = get_path("../res/dummy.nc") 1714 # cdf_to_inspect = get_path("../res/dummy.nc")
1705 # cdf_to_inspect = get_path("../res/dummy_jupiter_coordinates.nc") 1715 # cdf_to_inspect = get_path("../res/dummy_jupiter_coordinates.nc")
1706 # 1716 #
1707 -# si = StringIO.StringIO() 1717 +# si = StringIO()
1708 # cw = csv.DictWriter(si, fieldnames=['Name', 'Shape', 'Length']) 1718 # cw = csv.DictWriter(si, fieldnames=['Name', 'Shape', 'Length'])
1709 # cw.writeheader() 1719 # cw.writeheader()
1710 # 1720 #
web/view/home.html.jinja2
@@ -272,7 +272,7 @@ var sw_configuration = { @@ -272,7 +272,7 @@ var sw_configuration = {
272 orbit: { a: {{ target.orbit.semimajor or 0 }}, b: {{ target.orbit.semiminor or 0 }} }, 272 orbit: { a: {{ target.orbit.semimajor or 0 }}, b: {{ target.orbit.semiminor or 0 }} },
273 img: '{{ static('img/target/'~target.slug~'_128.png') }}', 273 img: '{{ static('img/target/'~target.slug~'_128.png') }}',
274 layers: { 274 layers: {
275 -{% for catalog_slug, catalog_intervals in target.catalog_layers.iteritems() %} 275 +{% for catalog_slug, catalog_intervals in target.catalog_layers.items() %}
276 "{{ catalog_slug }}": [ 276 "{{ catalog_slug }}": [
277 {% for interval in catalog_intervals %} 277 {% for interval in catalog_intervals %}
278 {# { start: "2018-03-28T00:00:00Z", stop: "2018-03-29T00:00:00Z" }, #} 278 {# { start: "2018-03-28T00:00:00Z", stop: "2018-03-29T00:00:00Z" }, #}