From 077980ebb67e3e8978cfe30bae045f9e69af2c09 Mon Sep 17 00:00:00 2001 From: Goutte Date: Wed, 2 Aug 2017 07:28:55 +0200 Subject: [PATCH] Improve availability of the hit counter and clean up. --- web/run.py | 60 +++++++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/web/run.py b/web/run.py index b57466f..2a17f41 100755 --- a/web/run.py +++ b/web/run.py @@ -49,7 +49,7 @@ FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S" log = logging.getLogger("HelioPropa") log.setLevel(logging.DEBUG) -# log.setLevel(logging.WARN) # <-- set log level here ! +# log.setLevel(logging.ERROR) # <-- set log level here ! logHandler = logging.FileHandler(get_path('run.log')) logHandler.setFormatter(logging.Formatter( "%(asctime)s - %(levelname)s - %(message)s" @@ -161,6 +161,7 @@ def render_view(view, context=None): # dict(tpl_global_vars.items() + context.items()) # ) + def datetime_from_list(time_list): """ Datetimes in retrieved CDFs are stored as lists of numbers, @@ -212,7 +213,7 @@ def retrieve_data(orbiter, what, started_at, stopped_at): raise Exception("API says there's no dataset at '%s'." % url) if remote_gzip_files == 'ERROR': raise Exception("API returned an error at '%s'." % url) - if remote_gzip_files == ['OUTOFTIME']: + if remote_gzip_files == ['OUTOFTIME']: # it happens raise Exception("API says it's out of time at '%s'." % url) success = True except Exception as e: @@ -229,16 +230,15 @@ def retrieve_data(orbiter, what, started_at, stopped_at): log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) - # retriever = urllib.URLopener() # would we need to do this every time ? local_gzip_files = [] for remote_gzip_file in remote_gzip_files: - if remote_gzip_file == 'OUTOFTIME': - continue - # hotfix removeme @Myriam + # hotfixes to remove when fixed upstream @Myriam + if remote_gzip_file in ['OUTOFTIME', 'ERROR']: + continue # sometimes half the response is okay, the other not if remote_gzip_file.endswith('/.gz'): continue remote_gzip_file = remote_gzip_file.replace('cdpp1', 'cdpp', 1) - ######################### + ################################################ filename = "%s_%s" % (orbiter, str(remote_gzip_file).split('/')[-1]) local_gzip_file = get_path("../cache/%s" % filename) local_gzip_files.append(local_gzip_file) @@ -272,8 +272,9 @@ def generate_csv_contents(source_config, started_at, stopped_at): # @todo iterate on models when there are many try: model_slug = source_config['models'][0]['slug'] - except: - abort(500, "Invalid model configuration for '%s'." % source_config['slug']) + except Exception as e: + abort(500, "Invalid model configuration for '%s' : %s" + % (source_config['slug'], str(e))) # Grab the list of netCDF files from Myriam's API # http://cdpp.irap.omp.eu/BASE/DDService/getDataUrl.php?dataSet=jupiter_orb_all&StartTime=2014-02-23T10:00:10&StopTime=2017-02-24T23:59:00 @@ -295,7 +296,8 @@ def generate_csv_contents(source_config, started_at, stopped_at): precision = "%Y-%m-%dT%H" # model and orbits times are equal-ish orbits_data = {} # keys are datetime as str, values arrays of XY for orbits_file in orbits_files: - log.debug("%s: opening orbit NETCDF4 '%s'..." % (source_config['name'], orbits_file)) + log.debug("%s: opening orbit NETCDF4 '%s'..." % + (source_config['name'], orbits_file)) cdf_handle = Dataset(orbits_file, "r", format="NETCDF4") times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms data_hci = cdf_handle.variables['HCI'] @@ -370,9 +372,11 @@ def generate_csv_file_if_needed(target_config, started_at, stopped_at): def remove_files_created_before(date, in_directory): """ - Will throw if something horrible happens, like invalid parameters. + Will throw if something horrible happens. + Does not remove recursively (could be done with os.walk if needed). + Does not remove directories either. :param date: datetime object - :param in_directory: + :param in_directory: absolute path to directory :return: """ import os @@ -380,22 +384,32 @@ def remove_files_created_before(date, in_directory): secs = time.mktime(date.timetuple()) - if not isdir(in_directory): - raise ValueError("Directory to clean '%s' does not exist.") + if not os.path.isdir(in_directory): + raise ValueError("No directory to clean at '%s'.") removed_files = [] for file_name in os.listdir(in_directory): file_path = os.path.join(in_directory, file_name) - if not os.path.isfile(file_path): - continue - t = os.stat(file_path) - if t.st_ctime < secs: - removed_files.append(file_path) - os.remove(file_path) + if os.path.isfile(file_path): + t = os.stat(file_path) + if t.st_ctime < secs: + os.remove(file_path) + removed_files.append(file_path) return removed_files +def get_hit_counter(): + hit_count_path = get_path("../VISITS") + + if isfile(hit_count_path): + hit_count = int(open(hit_count_path).read()) + else: + hit_count = 1 + + return hit_count + + def increment_hit_counter(): hit_count_path = get_path("../VISITS") @@ -412,6 +426,9 @@ def increment_hit_counter(): return hit_count +tpl_global_vars['visits'] = get_hit_counter() + + # ROUTING ##################################################################### @app.route('/favicon.ico') @@ -426,12 +443,13 @@ def favicon(): @app.route("/home.html") @app.route("/index.html") def home(): + increment_hit_counter() return render_view('home.html.jinja2', { 'targets': config['targets'], 'planets': [s for s in config['targets'] if s['type'] == 'planet'], 'probes': [s for s in config['targets'] if s['type'] == 'probe'], 'comets': [s for s in config['targets'] if s['type'] == 'comet'], - 'visits': increment_hit_counter(), + 'visits': get_hit_counter(), }) -- libgit2 0.21.2