Blame view

web/run.py 20.3 KB
9390ec89   Goutte   Initial experimen...
1
2
3
import random
import datetime
import StringIO
c42fea3a   Goutte   Rework the CSS wi...
4
from math import sqrt
9390ec89   Goutte   Initial experimen...
5

5ede388f   Goutte   Make sure failed ...
6
from os import listdir, environ, remove as removefile
9390ec89   Goutte   Initial experimen...
7
8
9
from os.path import isfile, join, abspath, dirname

import csv
8644387c   Goutte   Use real data.
10
11
import json
import gzip
2fedd73b   Goutte   Initial implement...
12
import tarfile
8644387c   Goutte   Use real data.
13
import urllib
f75faf5f   Goutte   WIP
14
import logging
9390ec89   Goutte   Initial experimen...
15
16
17
18
from pprint import pprint
from csv import writer as csv_writer
from yaml import load as yaml_load
from flask import Flask
57f42bd7   Goutte   Log the abortions.
19
from flask import redirect, url_for, send_from_directory, abort as abort_flask
9390ec89   Goutte   Initial experimen...
20
21
22
23
from flask import request
from jinja2 import Environment, FileSystemLoader
from netCDF4 import Dataset

9390ec89   Goutte   Initial experimen...
24
25
26
27
28
29
30

# PATH RELATIVITY #############################################################

THIS_DIRECTORY = dirname(abspath(__file__))


def get_path(relative_path):
a4a9ef03   Goutte   Cache generated C...
31
    """Get an absolute path from the relative path to this script directory."""
9390ec89   Goutte   Initial experimen...
32
33
34
35
36
37
38
39
40
41
42
43
44
    return abspath(join(THIS_DIRECTORY, relative_path))


# COLLECT GLOBAL INFORMATION FROM SOURCES #####################################

# VERSION
with open(get_path('../VERSION'), 'r') as version_file:
    version = version_file.read().strip()

# CONFIG
with open(get_path('../config.yml'), 'r') as config_file:
    config = yaml_load(config_file.read())

c0df94bc   Goutte   Adding more logs.
45
46
FILE_DATE_FMT = "%Y-%m-%dT%H:%M:%S"

9390ec89   Goutte   Initial experimen...
47

f75faf5f   Goutte   WIP
48
49
50
# LOGGING #####################################################################

log = logging.getLogger("HelioPropa")
9bfa6c42   Goutte   More bug hunting.
51
log.setLevel(logging.DEBUG)
2fedd73b   Goutte   Initial implement...
52
# log.setLevel(logging.WARN)                         # <-- set log level here !
b2837a08   Goutte   Add three retries...
53
54
55
56
57
logHandler = logging.FileHandler(get_path('run.log'))
logHandler.setFormatter(logging.Formatter(
    "%(asctime)s - %(levelname)s - %(message)s"
))
log.addHandler(logHandler)
f75faf5f   Goutte   WIP
58
59


9390ec89   Goutte   Initial experimen...
60
61
62
63
# SETUP FLASK ENGINE ##########################################################

app = Flask(__name__, root_path=THIS_DIRECTORY)
app.debug = environ.get('DEBUG') == 'true'
b2837a08   Goutte   Add three retries...
64
if app.debug:
2fedd73b   Goutte   Initial implement...
65
    log.info("Starting Flask app IN DEBUG MODE...")
b2837a08   Goutte   Add three retries...
66
67
else:
    log.info("Starting Flask app...")
9390ec89   Goutte   Initial experimen...
68
69
70
71
72
73
74
75
76
77
78


# SETUP JINJA2 TEMPLATE ENGINE ################################################

def static_global(filename):
    return url_for('static', filename=filename)


def shuffle_filter(seq):
    """
    This shuffles the sequence it is applied to.
2fedd73b   Goutte   Initial implement...
79
    Jinja2 _should_ provide this.
9390ec89   Goutte   Initial experimen...
80
81
82
83
84
85
86
87
88
89
90
    """
    try:
        result = list(seq)
        random.shuffle(result)
        return result
    except:
        return seq


def markdown_filter(value, nl2br=False, p=True):
    """
2fedd73b   Goutte   Initial implement...
91
    Converts markdown into html.
9390ec89   Goutte   Initial experimen...
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
    nl2br: set to True to replace line breaks with <br> tags
    p: set to False to remove the enclosing <p></p> tags
    """
    from markdown import markdown
    from markdown.extensions.nl2br import Nl2BrExtension
    from markdown.extensions.abbr import AbbrExtension
    extensions = [AbbrExtension()]
    if nl2br is True:
        extensions.append(Nl2BrExtension())
    markdowned = markdown(value, output_format='html5', extensions=extensions)
    if p is False:
        markdowned = markdowned.replace(r"<p>", "").replace(r"</p>", "")
    return markdowned


tpl_engine = Environment(loader=FileSystemLoader([get_path('view')]),
                         trim_blocks=True,
                         lstrip_blocks=True)

tpl_engine.globals.update(
    url_for=url_for,
    static=static_global,
)

tpl_engine.filters['markdown'] = markdown_filter
tpl_engine.filters['md'] = markdown_filter
tpl_engine.filters['shuffle'] = shuffle_filter

tpl_global_vars = {
    'request': request,
    'version': version,
    'config': config,
    'now': datetime.datetime.now(),
}


# HELPERS #####################################################################

57f42bd7   Goutte   Log the abortions.
130
131
132
133
134
def abort(code, message):
    log.error(message)
    abort_flask(code, message)


9390ec89   Goutte   Initial experimen...
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
def render_view(view, context=None):
    """
    A simple helper to render [view] template with [context] vars.
    It automatically adds the global template vars defined above, too.
    It returns a string, usually the HTML contents to display.
    """
    context = {} if context is None else context
    return tpl_engine.get_template(view).render(
        dict(tpl_global_vars.items() + context.items())
    )


# def render_page(page, title="My Page", context=None):
#     """
#     A simple helper to render the md_page.html template with [context] vars &
#     the additional contents of `page/[page].md` in the `md_page` variable.
#     It automagically adds the global template vars defined above, too.
#     It returns a string, usually the HTML contents to display.
#     """
#     if context is None:
#         context = {}
#     context['title'] = title
#     context['md_page'] = ''
#     with file(get_path('page/%s.md' % page)) as f:
#         context['md_page'] = f.read()
#     return tpl_engine.get_template('md_page.html').render(
#         dict(tpl_global_vars.items() + context.items())
#     )

2d2af24b   Goutte   Add a basic orbit...
164
def datetime_from_list(time_list):
0b9821dd   Goutte   Clean up.
165
    """
2fedd73b   Goutte   Initial implement...
166
    Datetimes in retrieved CDFs are stored as lists of numbers,
0b9821dd   Goutte   Clean up.
167
168
    with DayOfYear starting at 0. We want it starting at 1 for default parsers.
    """
2d2af24b   Goutte   Add a basic orbit...
169
170
171
172
173
174
    # Day Of Year starts at 0, but for our datetime parser it starts at 1
    doy = '{:03d}'.format(int(''.join(time_list[4:7])) + 1)
    return datetime.datetime.strptime(
        "%s%s%s" % (''.join(time_list[0:4]), doy, ''.join(time_list[7:])),
        "%Y%j%H%M%S%f"
    )
9390ec89   Goutte   Initial experimen...
175

ce8af118   Goutte   Fix the favicon.
176

8644387c   Goutte   Use real data.
177
def get_source_config(slug):
2fedd73b   Goutte   Initial implement...
178
    for s in config['targets']:  # dumb
8644387c   Goutte   Use real data.
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
        if s['slug'] == slug:
            return s
    raise Exception("No source found for slug '%s'." % slug)


def retrieve_data(orbiter, what, started_at, stopped_at):
    """
    Handles remote querying Myriam's API, downloading, extracting and caching
    the netCDF files.
    :param orbiter: key of the source in the YAML config
    :param what: either 'model' or 'orbit', a key in the config of the source
    :param started_at:
    :param stopped_at:
    :return: a list of local file paths to netCDF (.nc) files
    """

    url = config['amda'].format(
        dataSet=what,
        startTime=started_at.isoformat(),
        stopTime=stopped_at.isoformat()
    )
c50cc9d8   Goutte   Continue fixing.
200
    log.info("Fetching remote gzip files list at '%s'." % url)
b2837a08   Goutte   Add three retries...
201
202
    retries = 0
    success = False
92abc15b   Goutte   Mistrust the API ...
203
    errors = []
b2837a08   Goutte   Add three retries...
204
205
206
207
208
209
210
211
    remote_gzip_files = []
    while not success and retries < 3:
        try:
            response = urllib.urlopen(url)
            remote_gzip_files = json.loads(response.read())
            if not remote_gzip_files:
                raise Exception("Failed to fetch data at '%s'." % url)
            if remote_gzip_files == 'NODATASET':
92abc15b   Goutte   Mistrust the API ...
212
213
214
215
216
                raise Exception("API says there's no dataset at '%s'." % url)
            if remote_gzip_files == 'ERROR':
                raise Exception("API returned an error at '%s'." % url)
            if remote_gzip_files == ['OUTOFTIME']:
                raise Exception("API says it's out of time at '%s'." % url)
b2837a08   Goutte   Add three retries...
217
218
219
            success = True
        except Exception as e:
            log.warn("Failed (%d/3) '%s' : %s" % (retries+1, url, e.message))
92abc15b   Goutte   Mistrust the API ...
220
221
            remote_gzip_files = []
            errors.append(e)
b2837a08   Goutte   Add three retries...
222
223
224
        finally:
            retries += 1
    if not remote_gzip_files:
08abc2d4   Goutte   Remove duplicate ...
225
226
227
228
        abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" %
                   (orbiter, url, errors))
    else:
        remote_gzip_files = list(set(remote_gzip_files))
9bfa6c42   Goutte   More bug hunting.
229
230

    log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files))
8644387c   Goutte   Use real data.
231
232
233
234

    # retriever = urllib.URLopener()  # would we need to do this every time ?
    local_gzip_files = []
    for remote_gzip_file in remote_gzip_files:
9bfa6c42   Goutte   More bug hunting.
235
236
        if remote_gzip_file == 'OUTOFTIME':
            continue
4cf497e0   Goutte   Make the targets ...
237
        # hotfix removeme @Myriam
8644387c   Goutte   Use real data.
238
239
240
241
242
243
244
245
        if remote_gzip_file.endswith('/.gz'):
            continue
        remote_gzip_file = remote_gzip_file.replace('cdpp1', 'cdpp', 1)
        #########################
        filename = "%s_%s" % (orbiter, str(remote_gzip_file).split('/')[-1])
        local_gzip_file = get_path("../cache/%s" % filename)
        local_gzip_files.append(local_gzip_file)
        if not isfile(local_gzip_file):
9bfa6c42   Goutte   More bug hunting.
246
            log.debug("Retrieving '%s'..." % local_gzip_file)
8644387c   Goutte   Use real data.
247
            urllib.urlretrieve(remote_gzip_file, local_gzip_file)
9bfa6c42   Goutte   More bug hunting.
248
            log.debug("Retrieved '%s'." % local_gzip_file)
8644387c   Goutte   Use real data.
249
250
251
252
253

    local_netc_files = []
    for local_gzip_file in local_gzip_files:
        local_netc_file = local_gzip_file[0:-3]
        local_netc_files.append(local_netc_file)
9bfa6c42   Goutte   More bug hunting.
254
        log.debug("Unzipping '%s'..." % local_gzip_file)
3c064b17   Goutte   Ignore failures w...
255
256
257
258
259
260
261
262
263
264
265
266
        success = True
        try:
            with gzip.open(local_gzip_file, 'rb') as f:
                file_content = f.read()
                with open(local_netc_file, 'w+b') as g:
                    g.write(file_content)
        except Exception as e:
            success = False
            log.warning("Cannot process gz file '%s' from '%s' : %s" %
                        (local_gzip_file, url, e))
        if success:
            log.debug("Unzipped '%s'." % local_gzip_file)
8644387c   Goutte   Use real data.
267
268
269
270

    return local_netc_files


a4a9ef03   Goutte   Cache generated C...
271
def generate_csv_contents(source_config, started_at, stopped_at):
b2837a08   Goutte   Add three retries...
272
    # @todo iterate on models when there are many
8644387c   Goutte   Use real data.
273
274
275
    try:
        model_slug = source_config['models'][0]['slug']
    except:
a4a9ef03   Goutte   Cache generated C...
276
        abort(500, "Invalid model configuration for '%s'." % source_config['slug'])
28ef3790   Goutte   Clean up.
277
278
279
280

    # Grab the list of netCDF files from Myriam's API
    # http://cdpp.irap.omp.eu/BASE/DDService/getDataUrl.php?dataSet=jupiter_orb_all&StartTime=2014-02-23T10:00:10&StopTime=2017-02-24T23:59:00
    # http://cdpp.irap.omp.eu/BASE/DATA/TAO/JUPITER/SW/sw_2014.nc.gz
9bfa6c42   Goutte   More bug hunting.
281
    log.info("Generating CSV for '%s'..." % source_config['slug'])
a4a9ef03   Goutte   Cache generated C...
282
283
    model_files = retrieve_data(source_config['slug'], model_slug, started_at, stopped_at)
    orbits_files = retrieve_data(source_config['slug'], source_config['orbit']['model'], started_at, stopped_at)
61179cdc   Goutte   Initial work on t...
284

a7ef1487   Goutte   More logs !
285
    log.debug("Crunching CSV contents for '%s'..." % source_config['name'])
61179cdc   Goutte   Initial work on t...
286
287
    si = StringIO.StringIO()
    cw = csv_writer(si)
8644387c   Goutte   Use real data.
288
    cw.writerow((  # the order matters !
61179cdc   Goutte   Initial work on t...
289
290
        'time',
        'vrad', 'vtan', 'vlen',
8644387c   Goutte   Use real data.
291
292
        'magn', 'temp', 'pdyn', 'dens', 'angl',
        'xhci', 'yhci'
61179cdc   Goutte   Initial work on t...
293
    ))
9390ec89   Goutte   Initial experimen...
294

8644387c   Goutte   Use real data.
295
296
297
    precision = "%Y-%m-%dT%H"  # model and orbits times are equal-ish
    orbits_data = {}  # keys are datetime as str, values arrays of XY
    for orbits_file in orbits_files:
a7ef1487   Goutte   More logs !
298
        log.debug("%s: opening orbit NETCDF4 '%s'..." % (source_config['name'], orbits_file))
8644387c   Goutte   Use real data.
299
300
301
302
303
304
305
306
        cdf_handle = Dataset(orbits_file, "r", format="NETCDF4")
        times = cdf_handle.variables['Time']  # YYYY DOY HH MM SS .ms
        data_hci = cdf_handle.variables['HCI']
        for time, datum_hci in zip(times, data_hci):
            dtime = datetime_from_list(time)
            if started_at <= dtime <= stopped_at:
                dkey = dtime.strftime(precision)
                orbits_data[dkey] = datum_hci
a7ef1487   Goutte   More logs !
307
        cdf_handle.close()
8644387c   Goutte   Use real data.
308
309
310
    all_data = {}  # keys are datetime as str, values tuples of data
    for model_file in model_files:
        # Time, StartTime, StopTime, V, B, N, T, Delta_angle, P_dyn
a7ef1487   Goutte   More logs !
311
312
        log.debug("%s: opening model NETCDF4 '%s'..." %
                  (source_config['name'], model_file))
8644387c   Goutte   Use real data.
313
314
315
316
317
318
319
320
        cdf_handle = Dataset(model_file, "r", format="NETCDF4")
        times = cdf_handle.variables['Time']  # YYYY DOY HH MM SS .ms
        data_v = cdf_handle.variables['V']
        data_b = cdf_handle.variables['B']
        data_t = cdf_handle.variables['T']
        data_n = cdf_handle.variables['N']
        data_p = cdf_handle.variables['P_dyn']
        data_d = cdf_handle.variables['Delta_angle']
2fedd73b   Goutte   Initial implement...
321
322
        log.debug("%s: aggregating data from '%s'..." %
                  (source_config['name'], model_file))
8380e043   Goutte   Fix an awful bug ...
323
        for time, datum_v, datum_b, datum_t, datum_n, datum_p, datum_d \
8644387c   Goutte   Use real data.
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
                in zip(times, data_v, data_b, data_t, data_n, data_p, data_d):
            vrad = datum_v[0]
            vtan = datum_v[1]
            dtime = datetime_from_list(time)
            if started_at <= dtime <= stopped_at:
                dkey = dtime.strftime(precision)
                x_hci = None
                y_hci = None
                if dkey in orbits_data:
                    x_hci = orbits_data[dkey][0]
                    y_hci = orbits_data[dkey][1]
                all_data[dkey] = (
                    dtime.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
                    vrad, vtan, sqrt(vrad * vrad + vtan * vtan),
                    datum_b, datum_t, datum_n, datum_p, datum_d,
                    x_hci, y_hci
                )
        cdf_handle.close()

2fedd73b   Goutte   Initial implement...
343
    log.debug("Writing and sorting CSV for '%s'..." % source_config['slug'])
8644387c   Goutte   Use real data.
344
345
    for dkey in sorted(all_data):
        cw.writerow(all_data[dkey])
2d2af24b   Goutte   Add a basic orbit...
346

2fedd73b   Goutte   Initial implement...
347
    log.info("Generated CSV contents for '%s'." % source_config['slug'])
2d2af24b   Goutte   Add a basic orbit...
348
349
    return si.getvalue()

8644387c   Goutte   Use real data.
350

c0df94bc   Goutte   Adding more logs.
351
def generate_csv_file_if_needed(target_config, started_at, stopped_at):
9bfa6c42   Goutte   More bug hunting.
352
    filename = "%s_%s_%s.csv" % (target_config['slug'],
c0df94bc   Goutte   Adding more logs.
353
354
355
356
357
358
359
360
361
362
363
364
                                 started_at.strftime(FILE_DATE_FMT),
                                 stopped_at.strftime(FILE_DATE_FMT))
    local_csv_file = get_path("../cache/%s" % filename)
    if not isfile(local_csv_file):
        log.info("Generating CSV '%s'..." % local_csv_file)
        try:
            with open(local_csv_file, mode="w+") as f:
                f.write(generate_csv_contents(target_config,
                                              started_at=started_at,
                                              stopped_at=stopped_at))
            log.info("Generation of '%s' done." % filename)
        except Exception as e:
5ede388f   Goutte   Make sure failed ...
365
            if isfile(local_csv_file):
92abc15b   Goutte   Mistrust the API ...
366
                log.warn("Removing failed CSV '%s'..." % local_csv_file)
5ede388f   Goutte   Make sure failed ...
367
                removefile(local_csv_file)
9bfa6c42   Goutte   More bug hunting.
368
            abort(500, "Failed creating CSV '%s' : %s" % (filename, e))
c0df94bc   Goutte   Adding more logs.
369
370


a4a9ef03   Goutte   Cache generated C...
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
def increment_hit_counter():
    hit_count_path = get_path("../VISITS")

    if isfile(hit_count_path):
        hit_count = int(open(hit_count_path).read())
        hit_count += 1
    else:
        hit_count = 1

    hit_counter_file = open(hit_count_path, 'w')
    hit_counter_file.write(str(hit_count))
    hit_counter_file.close()

    return hit_count


# ROUTING #####################################################################

@app.route('/favicon.ico')
def favicon():
    return send_from_directory(
        join(app.root_path, 'static', 'img'),
        'favicon.ico', mimetype='image/vnd.microsoft.icon'
    )


@app.route("/")
@app.route("/home.html")
@app.route("/index.html")
def home():
    return render_view('home.html.jinja2', {
        'targets': config['targets'],
        'planets': [s for s in config['targets'] if s['type'] == 'planet'],
        'probes':  [s for s in config['targets'] if s['type'] == 'probe'],
        'comets':  [s for s in config['targets'] if s['type'] == 'comet'],
        'visits':  increment_hit_counter(),
    })


@app.route("/<source>_<started_at>_<stopped_at>.csv")
def get_target_csv(source, started_at, stopped_at):
    """
    Grab data and orbit data for the specified `target`,
    rearrange it and return it as a CSV file.
    `started_at` and `stopped_at` should be UTC.
    """
a4a9ef03   Goutte   Cache generated C...
417
    source_config = get_source_config(source)
a4a9ef03   Goutte   Cache generated C...
418
    try:
c0df94bc   Goutte   Adding more logs.
419
        started_at = datetime.datetime.strptime(started_at, FILE_DATE_FMT)
a4a9ef03   Goutte   Cache generated C...
420
421
422
    except:
        abort(400, "Invalid started_at parameter : '%s'." % started_at)
    try:
c0df94bc   Goutte   Adding more logs.
423
        stopped_at = datetime.datetime.strptime(stopped_at, FILE_DATE_FMT)
a4a9ef03   Goutte   Cache generated C...
424
425
426
    except:
        abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at)

a4a9ef03   Goutte   Cache generated C...
427
    filename = "%s_%s_%s.csv" % (source,
c0df94bc   Goutte   Adding more logs.
428
429
                                 started_at.strftime(FILE_DATE_FMT),
                                 stopped_at.strftime(FILE_DATE_FMT))
a4a9ef03   Goutte   Cache generated C...
430
    local_csv_file = get_path("../cache/%s" % filename)
c0df94bc   Goutte   Adding more logs.
431
    generate_csv_file_if_needed(source_config, started_at, stopped_at)
a4a9ef03   Goutte   Cache generated C...
432
433
434
435
436
437
    if not isfile(local_csv_file):
        abort(500, "Could not cache CSV file at '%s'." % local_csv_file)

    return send_from_directory(get_path("../cache/"), filename)


b2837a08   Goutte   Add three retries...
438
439
440
441
442
443
444
@app.route("/<targets>_<started_at>_<stopped_at>.zip")
def download_targets_zip(targets, started_at, stopped_at):
    """
    Grab data and orbit data for the specified `target`,
    rearrange it and return it as a CSV file.
    `started_at` and `stopped_at` should be UTC.

2fedd73b   Goutte   Initial implement...
445
446
    targets: string list of targets' slugs, separated by `-`.
    This will fail hard if targets' slugs start having `-` in them.
b2837a08   Goutte   Add three retries...
447

2fedd73b   Goutte   Initial implement...
448
    toreview
b2837a08   Goutte   Add three retries...
449
450

    """
2fedd73b   Goutte   Initial implement...
451
452
453
454
    separator = '-'
    targets = targets.split(separator).sort()
    targets_configs = []
    for target in targets:
b2837a08   Goutte   Add three retries...
455
456
        if not target:
            abort(400, "Invalid targets format : `%s`." % targets)
2fedd73b   Goutte   Initial implement...
457
458
        targets_configs.append(get_source_config(target))
    if 0 == len(targets_configs):
b2837a08   Goutte   Add three retries...
459
460
461
462
463
464
465
466
467
468
469
470
        abort(400, "No valid targets specified. What are you doing?")

    date_fmt = "%Y-%m-%dT%H:%M:%S"
    try:
        started_at = datetime.datetime.strptime(started_at, date_fmt)
    except:
        abort(400, "Invalid started_at parameter : '%s'." % started_at)
    try:
        stopped_at = datetime.datetime.strptime(stopped_at, date_fmt)
    except:
        abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at)

2fedd73b   Goutte   Initial implement...
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
    gzip_filename = "%s_%s_%s.tar.gz" % (separator.join(targets),
                                         started_at.strftime(date_fmt),
                                         stopped_at.strftime(date_fmt))
    local_gzip_file = get_path("../cache/%s" % gzip_filename)

    if not isfile(local_gzip_file):
        log.debug("Creating tarball '%s'..." % local_gzip_file)
        # success = True
        # try:
        #     with gzip.open(local_gzip_file, 'rb') as f:
        #         file_content = f.read()
        #         with open(local_netc_file, 'w+b') as g:
        #             g.write(file_content)
        # except Exception as e:
        #     success = False
        #     log.warning("Cannot process gz file '%s' from '%s' : %s" %
        #                 (local_gzip_file, url, e))
        # if success:
        #     log.debug("Unzipped '%s'." % local_gzip_file)

        log.debug("Creating the CSV files themselves...")
        for target_config in targets_configs:
            # get_target_csv(target_config['slug'], started_at.strftime(date_fmt), stopped_at.strftime(date_fmt))

            filename = "%s_%s_%s.csv" % (target_config['slug'],
                                         started_at.strftime(date_fmt),
                                         stopped_at.strftime(date_fmt))
            local_csv_file = get_path("../cache/%s" % filename)
            if not isfile(local_csv_file):
                with open(local_csv_file, mode="w+") as f:
                    f.write(generate_csv_contents(target_config,
                                                  started_at=started_at,
                                                  stopped_at=stopped_at))

        # tar_filename = "%s_%s_%s.tar" % (separator.join(targets),
        #                                  started_at.strftime(date_fmt),
        #                                  stopped_at.strftime(date_fmt))
        # tar_file = get_path("../cache/%s" % tar_filename)

        log.debug("Make the tarball '%s'..." % local_gzip_file)
        with tarfile.open(local_gzip_file, "w:gz") as tar:
            for target_config in targets_configs:
                filename = "%s_%s_%s.csv" % (target_config['slug'],
                                             started_at.strftime(date_fmt),
                                             stopped_at.strftime(date_fmt))
                local_csv_file = get_path("../cache/%s" % filename)
                tar.add(local_csv_file, arcname=filename)

    if not isfile(local_gzip_file):
        abort(500, "Could not cache tarball at '%s'." % local_gzip_file)

    return send_from_directory(get_path("../cache/"), gzip_filename)
b2837a08   Goutte   Add three retries...
523

1754789b   Goutte   Decorate and clea...
524
525
526
527
# DEV TOOLS ###################################################################

# @app.route("/inspect")
# def analyze_cdf():
a4a9ef03   Goutte   Cache generated C...
528
#     """
1754789b   Goutte   Decorate and clea...
529
#     For debug purposes.
a4a9ef03   Goutte   Cache generated C...
530
#     """
1754789b   Goutte   Decorate and clea...
531
532
#     cdf_to_inspect = get_path("../res/dummy.nc")
#     cdf_to_inspect = get_path("../res/dummy_jupiter_coordinates.nc")
a4a9ef03   Goutte   Cache generated C...
533
534
#
#     si = StringIO.StringIO()
1754789b   Goutte   Decorate and clea...
535
536
#     cw = csv.DictWriter(si, fieldnames=['Name', 'Shape', 'Length'])
#     cw.writeheader()
a4a9ef03   Goutte   Cache generated C...
537
#
1754789b   Goutte   Decorate and clea...
538
539
540
541
542
543
544
545
546
547
#     # Time, StartTime, StopTime, V, B, N, T, Delta_angle, P_dyn, QualityFlag
#     cdf_handle = Dataset(cdf_to_inspect, "r", format="NETCDF4")
#     for variable in cdf_handle.variables:
#         v = cdf_handle.variables[variable]
#         cw.writerow({
#             'Name': variable,
#             'Shape': v.shape,
#             'Length': v.size,
#         })
#     cdf_handle.close()
a4a9ef03   Goutte   Cache generated C...
548
549
550
551
#
#     return si.getvalue()


9390ec89   Goutte   Initial experimen...
552
553
554
555
556
557
# MAIN ########################################################################

if __name__ == "__main__":
    # Debug mode on, as the production server does not use this.
    extra_files = [get_path('../config.yml')]
    app.run(debug=True, extra_files=extra_files)