Commit 077980ebb67e3e8978cfe30bae045f9e69af2c09

Authored by Goutte
1 parent 7284f4ff

Improve availability of the hit counter and clean up.

Showing 1 changed file with 39 additions and 21 deletions   Show diff stats
@@ -49,7 +49,7 @@ FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S" @@ -49,7 +49,7 @@ FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S"
49 49
50 log = logging.getLogger("HelioPropa") 50 log = logging.getLogger("HelioPropa")
51 log.setLevel(logging.DEBUG) 51 log.setLevel(logging.DEBUG)
52 -# log.setLevel(logging.WARN) # <-- set log level here ! 52 +# log.setLevel(logging.ERROR) # <-- set log level here !
53 logHandler = logging.FileHandler(get_path('run.log')) 53 logHandler = logging.FileHandler(get_path('run.log'))
54 logHandler.setFormatter(logging.Formatter( 54 logHandler.setFormatter(logging.Formatter(
55 "%(asctime)s - %(levelname)s - %(message)s" 55 "%(asctime)s - %(levelname)s - %(message)s"
@@ -161,6 +161,7 @@ def render_view(view, context=None): @@ -161,6 +161,7 @@ def render_view(view, context=None):
161 # dict(tpl_global_vars.items() + context.items()) 161 # dict(tpl_global_vars.items() + context.items())
162 # ) 162 # )
163 163
  164 +
164 def datetime_from_list(time_list): 165 def datetime_from_list(time_list):
165 """ 166 """
166 Datetimes in retrieved CDFs are stored as lists of numbers, 167 Datetimes in retrieved CDFs are stored as lists of numbers,
@@ -212,7 +213,7 @@ def retrieve_data(orbiter, what, started_at, stopped_at): @@ -212,7 +213,7 @@ def retrieve_data(orbiter, what, started_at, stopped_at):
212 raise Exception("API says there's no dataset at '%s'." % url) 213 raise Exception("API says there's no dataset at '%s'." % url)
213 if remote_gzip_files == 'ERROR': 214 if remote_gzip_files == 'ERROR':
214 raise Exception("API returned an error at '%s'." % url) 215 raise Exception("API returned an error at '%s'." % url)
215 - if remote_gzip_files == ['OUTOFTIME']: 216 + if remote_gzip_files == ['OUTOFTIME']: # it happens
216 raise Exception("API says it's out of time at '%s'." % url) 217 raise Exception("API says it's out of time at '%s'." % url)
217 success = True 218 success = True
218 except Exception as e: 219 except Exception as e:
@@ -229,16 +230,15 @@ def retrieve_data(orbiter, what, started_at, stopped_at): @@ -229,16 +230,15 @@ def retrieve_data(orbiter, what, started_at, stopped_at):
229 230
230 log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) 231 log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files))
231 232
232 - # retriever = urllib.URLopener() # would we need to do this every time ?  
233 local_gzip_files = [] 233 local_gzip_files = []
234 for remote_gzip_file in remote_gzip_files: 234 for remote_gzip_file in remote_gzip_files:
235 - if remote_gzip_file == 'OUTOFTIME':  
236 - continue  
237 - # hotfix removeme @Myriam 235 + # hotfixes to remove when fixed upstream @Myriam
  236 + if remote_gzip_file in ['OUTOFTIME', 'ERROR']:
  237 + continue # sometimes half the response is okay, the other not
238 if remote_gzip_file.endswith('/.gz'): 238 if remote_gzip_file.endswith('/.gz'):
239 continue 239 continue
240 remote_gzip_file = remote_gzip_file.replace('cdpp1', 'cdpp', 1) 240 remote_gzip_file = remote_gzip_file.replace('cdpp1', 'cdpp', 1)
241 - ######################### 241 + ################################################
242 filename = "%s_%s" % (orbiter, str(remote_gzip_file).split('/')[-1]) 242 filename = "%s_%s" % (orbiter, str(remote_gzip_file).split('/')[-1])
243 local_gzip_file = get_path("../cache/%s" % filename) 243 local_gzip_file = get_path("../cache/%s" % filename)
244 local_gzip_files.append(local_gzip_file) 244 local_gzip_files.append(local_gzip_file)
@@ -272,8 +272,9 @@ def generate_csv_contents(source_config, started_at, stopped_at): @@ -272,8 +272,9 @@ def generate_csv_contents(source_config, started_at, stopped_at):
272 # @todo iterate on models when there are many 272 # @todo iterate on models when there are many
273 try: 273 try:
274 model_slug = source_config['models'][0]['slug'] 274 model_slug = source_config['models'][0]['slug']
275 - except:  
276 - abort(500, "Invalid model configuration for '%s'." % source_config['slug']) 275 + except Exception as e:
  276 + abort(500, "Invalid model configuration for '%s' : %s"
  277 + % (source_config['slug'], str(e)))
277 278
278 # Grab the list of netCDF files from Myriam's API 279 # Grab the list of netCDF files from Myriam's API
279 # http://cdpp.irap.omp.eu/BASE/DDService/getDataUrl.php?dataSet=jupiter_orb_all&StartTime=2014-02-23T10:00:10&StopTime=2017-02-24T23:59:00 280 # http://cdpp.irap.omp.eu/BASE/DDService/getDataUrl.php?dataSet=jupiter_orb_all&StartTime=2014-02-23T10:00:10&StopTime=2017-02-24T23:59:00
@@ -295,7 +296,8 @@ def generate_csv_contents(source_config, started_at, stopped_at): @@ -295,7 +296,8 @@ def generate_csv_contents(source_config, started_at, stopped_at):
295 precision = "%Y-%m-%dT%H" # model and orbits times are equal-ish 296 precision = "%Y-%m-%dT%H" # model and orbits times are equal-ish
296 orbits_data = {} # keys are datetime as str, values arrays of XY 297 orbits_data = {} # keys are datetime as str, values arrays of XY
297 for orbits_file in orbits_files: 298 for orbits_file in orbits_files:
298 - log.debug("%s: opening orbit NETCDF4 '%s'..." % (source_config['name'], orbits_file)) 299 + log.debug("%s: opening orbit NETCDF4 '%s'..." %
  300 + (source_config['name'], orbits_file))
299 cdf_handle = Dataset(orbits_file, "r", format="NETCDF4") 301 cdf_handle = Dataset(orbits_file, "r", format="NETCDF4")
300 times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms 302 times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms
301 data_hci = cdf_handle.variables['HCI'] 303 data_hci = cdf_handle.variables['HCI']
@@ -370,9 +372,11 @@ def generate_csv_file_if_needed(target_config, started_at, stopped_at): @@ -370,9 +372,11 @@ def generate_csv_file_if_needed(target_config, started_at, stopped_at):
370 372
371 def remove_files_created_before(date, in_directory): 373 def remove_files_created_before(date, in_directory):
372 """ 374 """
373 - Will throw if something horrible happens, like invalid parameters. 375 + Will throw if something horrible happens.
  376 + Does not remove recursively (could be done with os.walk if needed).
  377 + Does not remove directories either.
374 :param date: datetime object 378 :param date: datetime object
375 - :param in_directory: 379 + :param in_directory: absolute path to directory
376 :return: 380 :return:
377 """ 381 """
378 import os 382 import os
@@ -380,22 +384,32 @@ def remove_files_created_before(date, in_directory): @@ -380,22 +384,32 @@ def remove_files_created_before(date, in_directory):
380 384
381 secs = time.mktime(date.timetuple()) 385 secs = time.mktime(date.timetuple())
382 386
383 - if not isdir(in_directory):  
384 - raise ValueError("Directory to clean '%s' does not exist.") 387 + if not os.path.isdir(in_directory):
  388 + raise ValueError("No directory to clean at '%s'.")
385 389
386 removed_files = [] 390 removed_files = []
387 for file_name in os.listdir(in_directory): 391 for file_name in os.listdir(in_directory):
388 file_path = os.path.join(in_directory, file_name) 392 file_path = os.path.join(in_directory, file_name)
389 - if not os.path.isfile(file_path):  
390 - continue  
391 - t = os.stat(file_path)  
392 - if t.st_ctime < secs:  
393 - removed_files.append(file_path)  
394 - os.remove(file_path) 393 + if os.path.isfile(file_path):
  394 + t = os.stat(file_path)
  395 + if t.st_ctime < secs:
  396 + os.remove(file_path)
  397 + removed_files.append(file_path)
395 398
396 return removed_files 399 return removed_files
397 400
398 401
  402 +def get_hit_counter():
  403 + hit_count_path = get_path("../VISITS")
  404 +
  405 + if isfile(hit_count_path):
  406 + hit_count = int(open(hit_count_path).read())
  407 + else:
  408 + hit_count = 1
  409 +
  410 + return hit_count
  411 +
  412 +
399 def increment_hit_counter(): 413 def increment_hit_counter():
400 hit_count_path = get_path("../VISITS") 414 hit_count_path = get_path("../VISITS")
401 415
@@ -412,6 +426,9 @@ def increment_hit_counter(): @@ -412,6 +426,9 @@ def increment_hit_counter():
412 return hit_count 426 return hit_count
413 427
414 428
  429 +tpl_global_vars['visits'] = get_hit_counter()
  430 +
  431 +
415 # ROUTING ##################################################################### 432 # ROUTING #####################################################################
416 433
417 @app.route('/favicon.ico') 434 @app.route('/favicon.ico')
@@ -426,12 +443,13 @@ def favicon(): @@ -426,12 +443,13 @@ def favicon():
426 @app.route("/home.html") 443 @app.route("/home.html")
427 @app.route("/index.html") 444 @app.route("/index.html")
428 def home(): 445 def home():
  446 + increment_hit_counter()
429 return render_view('home.html.jinja2', { 447 return render_view('home.html.jinja2', {
430 'targets': config['targets'], 448 'targets': config['targets'],
431 'planets': [s for s in config['targets'] if s['type'] == 'planet'], 449 'planets': [s for s in config['targets'] if s['type'] == 'planet'],
432 'probes': [s for s in config['targets'] if s['type'] == 'probe'], 450 'probes': [s for s in config['targets'] if s['type'] == 'probe'],
433 'comets': [s for s in config['targets'] if s['type'] == 'comet'], 451 'comets': [s for s in config['targets'] if s['type'] == 'comet'],
434 - 'visits': increment_hit_counter(), 452 + 'visits': get_hit_counter(),
435 }) 453 })
436 454
437 455