Commit b2837a08049c7e921ed5aa9bf041b2018f237e71

Authored by Goutte
1 parent a245c676

Add three retries when the request to Myriam's API fails for a reason or another…

… on the production server.
Showing 1 changed file with 79 additions and 5 deletions   Show diff stats
web/run.py
... ... @@ -46,13 +46,21 @@ with open(get_path('../config.yml'), 'r') as config_file:
46 46  
47 47 log = logging.getLogger("HelioPropa")
48 48 log.setLevel(logging.INFO)
49   -log.addHandler(logging.FileHandler(get_path('run.log')))
  49 +logHandler = logging.FileHandler(get_path('run.log'))
  50 +logHandler.setFormatter(logging.Formatter(
  51 + "%(asctime)s - %(levelname)s - %(message)s"
  52 +))
  53 +log.addHandler(logHandler)
50 54  
51 55  
52 56 # SETUP FLASK ENGINE ##########################################################
53 57  
54 58 app = Flask(__name__, root_path=THIS_DIRECTORY)
55 59 app.debug = environ.get('DEBUG') == 'true'
  60 +if app.debug:
  61 + log.info("Starting Flask app in debug mode...")
  62 +else:
  63 + log.info("Starting Flask app...")
56 64  
57 65  
58 66 # SETUP JINJA2 TEMPLATE ENGINE ################################################
... ... @@ -179,10 +187,26 @@ def retrieve_data(orbiter, what, started_at, stopped_at):
179 187 startTime=started_at.isoformat(),
180 188 stopTime=stopped_at.isoformat()
181 189 )
182   - response = urllib.urlopen(url)
183   - remote_gzip_files = json.loads(response.read())
184   - if not remote_gzip_files or remote_gzip_files == 'NODATASET':
  190 + retries = 0
  191 + success = False
  192 + remote_gzip_files = []
  193 + while not success and retries < 3:
  194 + try:
  195 + response = urllib.urlopen(url)
  196 + remote_gzip_files = json.loads(response.read())
  197 + if not remote_gzip_files:
  198 + raise Exception("Failed to fetch data at '%s'." % url)
  199 + if remote_gzip_files == 'NODATASET':
  200 + raise Exception("No dataset at '%s'." % url)
  201 + success = True
  202 + except Exception as e:
  203 + log.warn("Failed (%d/3) '%s' : %s" % (retries+1, url, e.message))
  204 + finally:
  205 + retries += 1
  206 + if not remote_gzip_files:
185 207 abort(400, "Failed to fetch data at '%s'." % url)
  208 + if remote_gzip_files == 'NODATASET':
  209 + abort(400, "No dataset at '%s'." % url)
186 210  
187 211 # retriever = urllib.URLopener() # would we need to do this every time ?
188 212 local_gzip_files = []
... ... @@ -211,7 +235,7 @@ def retrieve_data(orbiter, what, started_at, stopped_at):
211 235  
212 236  
213 237 def generate_csv_contents(source_config, started_at, stopped_at):
214   - # todo: iterate on models when there are many
  238 + # @todo iterate on models when there are many
215 239 try:
216 240 model_slug = source_config['models'][0]['slug']
217 241 except:
... ... @@ -356,6 +380,56 @@ def get_target_csv(source, started_at, stopped_at):
356 380 return send_from_directory(get_path("../cache/"), filename)
357 381  
358 382  
  383 +@app.route("/<targets>_<started_at>_<stopped_at>.zip")
  384 +def download_targets_zip(targets, started_at, stopped_at):
  385 + """
  386 + Grab data and orbit data for the specified `target`,
  387 + rearrange it and return it as a CSV file.
  388 + `started_at` and `stopped_at` should be UTC.
  389 +
  390 + targets: string list of targets' slugs, separated by `:`.
  391 +
  392 +
  393 + fixme
  394 +
  395 +
  396 + """
  397 +
  398 + targets_confs = []
  399 + for target in targets.split(':').sort():
  400 + if not target:
  401 + abort(400, "Invalid targets format : `%s`." % targets)
  402 + targets_confs.append(get_source_config(target))
  403 + if 0 == len(targets_confs):
  404 + abort(400, "No valid targets specified. What are you doing?")
  405 +
  406 + date_fmt = "%Y-%m-%dT%H:%M:%S"
  407 + try:
  408 + started_at = datetime.datetime.strptime(started_at, date_fmt)
  409 + except:
  410 + abort(400, "Invalid started_at parameter : '%s'." % started_at)
  411 + try:
  412 + stopped_at = datetime.datetime.strptime(stopped_at, date_fmt)
  413 + except:
  414 + abort(400, "Invalid stopped_at parameter : '%s'." % stopped_at)
  415 +
  416 +
  417 + filename = "%s_%s_%s.csv" % (source,
  418 + started_at.strftime(date_fmt),
  419 + stopped_at.strftime(date_fmt))
  420 +
  421 + local_csv_file = get_path("../cache/%s" % filename)
  422 + if not isfile(local_csv_file):
  423 + with open(local_csv_file, mode="w+") as f:
  424 + f.write(generate_csv_contents(source_config,
  425 + started_at=started_at,
  426 + stopped_at=stopped_at))
  427 +
  428 + if not isfile(local_csv_file):
  429 + abort(500, "Could not cache CSV file at '%s'." % local_csv_file)
  430 +
  431 + return send_from_directory(get_path("../cache/"), filename)
  432 +
359 433 # DEV TOOLS ###################################################################
360 434  
361 435 # @app.route("/inspect")
... ...