From 89c56528070ccccdc5ad1bd97373dda7b3f6daa9 Mon Sep 17 00:00:00 2001 From: Richard Hitier Date: Thu, 26 Oct 2023 14:50:32 +0200 Subject: [PATCH] Rename heliopropa logger --- notebooks/Understanding_Config_YAML.ipynb | 607 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ web/run.py | 134 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++------------------------------------------------------------------- 2 files changed, 674 insertions(+), 67 deletions(-) create mode 100644 notebooks/Understanding_Config_YAML.ipynb diff --git a/notebooks/Understanding_Config_YAML.ipynb b/notebooks/Understanding_Config_YAML.ipynb new file mode 100644 index 0000000..1e105e8 --- /dev/null +++ b/notebooks/Understanding_Config_YAML.ipynb @@ -0,0 +1,607 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "1f0ee132-a51a-4f2b-8f22-2eeea48b4e09", + "metadata": {}, + "source": [ + "#### How we load" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "b43839b2-f9b3-48f3-a4de-77464eb35af1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['meta', 'header', 'authors', 'amda', 'defaults', 'layers', 'inputs', 'targets', 'toots'])\n", + "[{'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'parameters': {'atse': 'mercury_swrt_da',\n", + " 'brad': 'mercury_swrt_bx',\n", + " 'btan': 'mercury_swrt_b',\n", + " 'dens': 'mercury_swrt_n',\n", + " 'pdyn': 'mercury_swrt_pdyn',\n", + " 'temp': 'mercury_swrt_t',\n", + " 'xy_v': 'mercury_swrt_v'},\n", + " 'slug': 'tao-mercury-swrt'}],\n", + " 'dsc': [{'parameters': {'atse': 'mercury_dsc_da',\n", + " 'brad': 'mercury_dsc_bx',\n", + " 'btan': 'mercury_dsc_b',\n", + " 'dens': 'mercury_dsc_n',\n", + " 'pdyn': 'mercury_dsc_pdyn',\n", + " 'temp': 'mercury_dsc_t',\n", + " 'xy_v': 'mercury_dsc_v'},\n", + " 'slug': 'tao-mercury-dsc'}],\n", + " 'om': [{'parameters': {'atse': 'mercury_sw_da',\n", + " 'brad': 'mercury_sw_bx',\n", + " 'btan': 'mercury_sw_b',\n", + " 'dens': 'mercury_sw_n',\n", + " 'pdyn': 'mercury_sw_pdyn',\n", + " 'temp': 'mercury_sw_t',\n", + " 'xy_v': 'mercury_sw_v'},\n", + " 'slug': 'tao-mercury-sw'}],\n", + " 'sa': [{'parameters': {'atse': 'mercury_sta_da',\n", + " 'brad': 'mercury_sta_bx',\n", + " 'btan': 'mercury_sta_b',\n", + " 'dens': 'mercury_sta_n',\n", + " 'pdyn': 'mercury_sta_pdyn',\n", + " 'temp': 'mercury_sta_t',\n", + " 'xy_v': 'mercury_sta_v'},\n", + " 'slug': 'tao-mercury-sta'}],\n", + " 'sb': [{'parameters': {'atse': 'mercury_stb_da',\n", + " 'brad': 'mercury_stb_bx',\n", + " 'btan': 'mercury_stb_b',\n", + " 'dens': 'mercury_stb_n',\n", + " 'pdyn': 'mercury_stb_pdyn',\n", + " 'temp': 'mercury_stb_t',\n", + " 'xy_v': 'mercury_stb_v'},\n", + " 'slug': 'tao-mercury-stb'}],\n", + " 'solo': [{'slug': 'tao-mercury-solo'}]},\n", + " 'name': 'Mercury',\n", + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_mercury_hee'},\n", + " 'slug': 'mercury-orb-all'}],\n", + " 'semimajor': 0,\n", + " 'semiminor': 0},\n", + " 'slug': 'mercury',\n", + " 'title': 'Mercury',\n", + " 'type': 'planet'},\n", + " {'default': True,\n", + " 'locked': False,\n", + " 'models': {'art': [{'parameters': {'atse': 'venus_swrt_da',\n", + " 'brad': 'venus_swrt_bx',\n", + " 'btan': 'venus_swrt_b',\n", + " 'dens': 'venus_swrt_n',\n", + " 'pdyn': 'venus_swrt_pdyn',\n", + " 'temp': 'venus_swrt_t',\n", + " 'xy_v': 'venus_swrt_v'},\n", + " 'slug': 'tao-venus-swrt'}],\n", + " 'dsc': [{'parameters': {'atse': 'venus_dsc_da',\n", + " 'brad': 'venus_dsc_bx',\n", + " 'btan': 'venus_dsc_b',\n", + " 'dens': 'venus_dsc_n',\n", + " 'pdyn': 'venus_dsc_pdyn',\n", + " 'temp': 'venus_dsc_t',\n", + " 'xy_v': 'venus_dsc_v'},\n", + " 'slug': 'tao-venus-dsc'}],\n", + " 'om': [{'parameters': {'atse': 'venus_sw_da',\n", + " 'brad': 'venus_sw_bx',\n", + " 'btan': 'venus_sw_b',\n", + " 'dens': 'venus_sw_n',\n", + " 'pdyn': 'venus_sw_pdyn',\n", + " 'temp': 'venus_sw_t',\n", + " 'xy_v': 'venus_sw_v'},\n", + " 'slug': 'tao-venus-sw'}],\n", + " 'sa': [{'parameters': {'atse': 'venus_sta_da',\n", + " 'brad': 'venus_sta_bx',\n", + " 'btan': 'venus_sta_b',\n", + " 'dens': 'venus_sta_n',\n", + " 'pdyn': 'venus_sta_pdyn',\n", + " 'temp': 'venus_sta_t',\n", + " 'xy_v': 'venus_sta_v'},\n", + " 'slug': 'tao-venus-sta'}],\n", + " 'sb': [{'parameters': {'atse': 'venus_stb_da',\n", + " 'brad': 'venus_stb_bx',\n", + " 'btan': 'venus_stb_b',\n", + " 'dens': 'venus_stb_n',\n", + " 'pdyn': 'venus_stb_pdyn',\n", + " 'temp': 'venus_stb_t',\n", + " 'xy_v': 'venus_stb_v'},\n", + " 'slug': 'tao-venus-stb'}],\n", + " 'solo': [{'slug': 'tao-venus-solo'}]},\n", + " 'name': 'Venus',\n", + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_venus_hee'},\n", + " 'slug': 'venus-orb-all'}],\n", + " 'semimajor': 0.72333199,\n", + " 'semiminor': 0.7233154},\n", + " 'slug': 'venus',\n", + " 'title': 'Venus',\n", + " 'type': 'planet'},\n", + " {'default': True,\n", + " 'locked': False,\n", + " 'models': {'om': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", + " {'parameters': {'dens': 'Dens',\n", + " 'temp': 'Temp',\n", + " 'vtot': 'Vel'},\n", + " 'slug': 'ace_swepam_real_1h'}],\n", + " 'sa': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", + " {'parameters': {'dens': 'Dens',\n", + " 'temp': 'Temp',\n", + " 'vtot': 'Vel'},\n", + " 'slug': 'ace_swepam_real_1h'}],\n", + " 'sb': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", + " {'parameters': {'dens': 'Dens',\n", + " 'temp': 'Temp',\n", + " 'vtot': 'Vel'},\n", + " 'slug': 'ace_swepam_real_1h'}]},\n", + " 'name': 'Earth',\n", + " 'orbit': {'models': []},\n", + " 'slug': 'earth',\n", + " 'title': 'Earth',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'parameters': {'atse': 'mars_swrt_da',\n", + " 'brad': 'mars_swrt_bx',\n", + " 'btan': 'mars_swrt_b',\n", + " 'dens': 'mars_swrt_n',\n", + " 'pdyn': 'mars_swrt_pdyn',\n", + " 'temp': 'mars_swrt_t',\n", + " 'xy_v': 'mars_swrt_v'},\n", + " 'slug': 'tao-mars-swrt'}],\n", + " 'dsc': [{'parameters': {'atse': 'mars_dsc_da',\n", + " 'brad': 'mars_dsc_bx',\n", + " 'btan': 'mars_dsc_b',\n", + " 'dens': 'mars_dsc_n',\n", + " 'pdyn': 'mars_dsc_pdyn',\n", + " 'temp': 'mars_dsc_t',\n", + " 'xy_v': 'mars_dsc_v'},\n", + " 'slug': 'tao-mars-dsc'}],\n", + " 'om': [{'parameters': {'atse': 'mars_sw_da',\n", + " 'brad': 'mars_sw_bx',\n", + " 'btan': 'mars_sw_b',\n", + " 'dens': 'mars_sw_n',\n", + " 'pdyn': 'mars_sw_pdyn',\n", + " 'temp': 'mars_sw_t',\n", + " 'xy_v': 'mars_sw_v'},\n", + " 'slug': 'tao-mars-sw'}],\n", + " 'sa': [{'parameters': {'atse': 'mars_sta_da',\n", + " 'brad': 'mars_sta_bx',\n", + " 'btan': 'mars_sta_b',\n", + " 'dens': 'mars_sta_n',\n", + " 'pdyn': 'mars_sta_pdyn',\n", + " 'temp': 'mars_sta_t',\n", + " 'xy_v': 'mars_sta_v'},\n", + " 'slug': 'tao-mars-sta'}],\n", + " 'sb': [{'parameters': {'atse': 'mars_stb_da',\n", + " 'brad': 'mars_stb_bx',\n", + " 'btan': 'mars_stb_b',\n", + " 'dens': 'mars_stb_n',\n", + " 'pdyn': 'mars_stb_pdyn',\n", + " 'temp': 'mars_stb_t',\n", + " 'xy_v': 'mars_stb_v'},\n", + " 'slug': 'tao-mars-stb'}],\n", + " 'solo': [{'slug': 'tao-mars-solo'}]},\n", + " 'name': 'Mars',\n", + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_mars_hee'},\n", + " 'slug': 'mars-orb-all'}],\n", + " 'semimajor': 1.52366231,\n", + " 'semiminor': 1.51700011},\n", + " 'slug': 'mars',\n", + " 'title': 'Mars',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_jup_swrt'}],\n", + " 'dsc': [{'slug': 'tao_jup_dsc'}],\n", + " 'om': [{'slug': 'tao_jup_sw'}],\n", + " 'sa': [{'slug': 'tao_jup_sta'}],\n", + " 'sb': [{'slug': 'tao_jup_stb'}],\n", + " 'solo': [{'slug': 'tao_jup_solo'}]},\n", + " 'name': 'Jupiter',\n", + " 'orbit': {'models': [{'slug': 'jupiter_orb_all'}],\n", + " 'semimajor': 5.45516759,\n", + " 'semiminor': 4.95155843},\n", + " 'slug': 'jupiter',\n", + " 'tap': {'target_name': 'Jupiter'},\n", + " 'title': 'Jupiter',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_sat_swrt'}],\n", + " 'dsc': [{'slug': 'tao_sat_dsc'}],\n", + " 'om': [{'slug': 'tao_sat_sw'}],\n", + " 'sa': [{'slug': 'tao_sat_sta'}],\n", + " 'sb': [{'slug': 'tao_sat_stb'}],\n", + " 'solo': [{'slug': 'tao_sat_solo'}]},\n", + " 'name': 'Saturn',\n", + " 'orbit': {'models': [{'slug': 'saturn_orb_all'}],\n", + " 'semimajor': 9.53707032,\n", + " 'semiminor': 9.5230773},\n", + " 'slug': 'saturn',\n", + " 'tap': {'target_name': 'Saturn'},\n", + " 'title': 'Saturn',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_ura_swrt'}],\n", + " 'dsc': [{'slug': 'tao_ura_dsc'}],\n", + " 'om': [{'slug': 'tao_ura_sw'}],\n", + " 'sa': [{'slug': 'tao_ura_sta'}],\n", + " 'sb': [{'slug': 'tao_ura_stb'}],\n", + " 'solo': [{'slug': 'tao_ura_solo'}]},\n", + " 'name': 'Uranus',\n", + " 'orbit': {'models': [{'slug': 'uranus_orb_all'}],\n", + " 'semimajor': 19.19,\n", + " 'semiminor': 19.17},\n", + " 'slug': 'uranus',\n", + " 'title': 'Uranus',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_nep_swrt'}],\n", + " 'dsc': [{'slug': 'tao_nep_dsc'}],\n", + " 'om': [{'slug': 'tao_nep_sw'}],\n", + " 'sa': [{'slug': 'tao_nep_sta'}],\n", + " 'sb': [{'slug': 'tao_nep_stb'}],\n", + " 'solo': [{'slug': 'tao_nep_solo'}]},\n", + " 'name': 'Neptune',\n", + " 'orbit': {'models': [{'slug': 'neptune_orb_all'}],\n", + " 'semimajor': 30.06896348,\n", + " 'semiminor': 30.06785516},\n", + " 'slug': 'neptune',\n", + " 'title': 'Neptune',\n", + " 'type': 'planet'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", + " 'om': [{'slug': 'tao_mercury_sw'}],\n", + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", + " 'name': 'SoloColombo Source',\n", + " 'orbit': {'models': [{'slug': 'bepi_cruise_all'}, {'slug': 'earth_orb_all'}]},\n", + " 'slug': 'bepi_src',\n", + " 'title': 'SoloColombo Source',\n", + " 'type': 'source'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_bepi_swrt'}],\n", + " 'om': [{'slug': 'tao_bepi_sw'}],\n", + " 'sa': [{'slug': 'tao_bepi_sta'}]},\n", + " 'name': 'BepiColombo',\n", + " 'orbit': {'models': [{'slug': 'bepi_cruise_all'}]},\n", + " 'slug': 'bepi',\n", + " 'title': 'BepiColombo',\n", + " 'type': 'probe'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", + " 'om': [{'slug': 'tao_mercury_sw'}],\n", + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", + " 'name': 'Solo Source',\n", + " 'orbit': {'models': [{'slug': 'so_orb_all'}, {'slug': 'earth_orb_all'}]},\n", + " 'slug': 'solo_src',\n", + " 'title': 'Solo Source',\n", + " 'type': 'source'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_so_swrt'}], 'om': [{'slug': 'tao_so_sw'}]},\n", + " 'name': 'SolarOrbiter',\n", + " 'orbit': {'models': [{'slug': 'so_orb_all'}]},\n", + " 'slug': 'solo',\n", + " 'title': 'SolarOrbiter',\n", + " 'type': 'probe'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", + " 'om': [{'slug': 'tao_mercury_sw'}],\n", + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", + " 'name': 'Parker Source',\n", + " 'orbit': {'models': [{'slug': 'psp_orb_all'}, {'slug': 'earth_orb_all'}]},\n", + " 'slug': 'psp_src',\n", + " 'title': 'Parker Source',\n", + " 'type': 'source'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'art': [{'slug': 'tao_psp_swrt'}],\n", + " 'om': [{'slug': 'tao_psp_sw'}],\n", + " 'sa': [{'slug': 'tao_psp_sta'}]},\n", + " 'name': 'Parker Solar Probe',\n", + " 'orbit': {'models': [{'slug': 'psp_orb_all'}]},\n", + " 'slug': 'psp',\n", + " 'title': 'Parker Solar Probe',\n", + " 'type': 'probe'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'om': [{'slug': 'tao_ros_sw'}],\n", + " 'sa': [{'slug': 'tao_ros_sw'}],\n", + " 'sb': [{'slug': 'tao_ros_sw'}]},\n", + " 'name': 'Rosetta',\n", + " 'orbit': {'models': [{'slug': 'ros_orb_cruise',\n", + " 'stopped_at': '2014-08-02T00:00:00'},\n", + " {'slug': 'p67_orb_all',\n", + " 'started_at': '2014-08-02T00:00:00'}]},\n", + " 'slug': 'rosetta',\n", + " 'title': 'Rosetta',\n", + " 'type': 'probe'},\n", + " {'default': False,\n", + " 'locked': False,\n", + " 'models': {'om': [{'slug': 'tao_juno_sw'}],\n", + " 'sa': [{'slug': 'tao_juno_sw'}],\n", + " 'sb': [{'slug': 'tao_juno_sw'}]},\n", + " 'name': 'Juno',\n", + " 'orbit': {'models': [{'slug': 'juno_cruise_all',\n", + " 'stopped_at': '2016-07-05T03:53:00'},\n", + " {'slug': 'jupiter_orb_all',\n", + " 'started_at': '2016-07-05T03:53:00'}]},\n", + " 'slug': 'juno',\n", + " 'title': 'Juno',\n", + " 'type': 'probe'},\n", + " {'default': False,\n", + " 'locked': True,\n", + " 'models': {'om': [{'slug': 'tao_p67_sw'}],\n", + " 'sa': [{'slug': 'tao_p67_sw'}],\n", + " 'sb': [{'slug': 'tao_p67_sw'}]},\n", + " 'name': 'Churyumov-Gerasimenko',\n", + " 'orbit': {'models': [{'parameters': {'hee': 'XYZ_HEE'},\n", + " 'slug': 'p67_orb_all'}]},\n", + " 'slug': 'p67',\n", + " 'title': 'Churyumov-Gerasimenko (coming soon)',\n", + " 'type': 'comet'}]\n" + ] + } + ], + "source": [ + "from yaml import load as yaml_load\n", + "from yaml import Loader\n", + "from pprint import pprint\n", + "\n", + "\n", + "config_file = '../my_cfg.yaml'\n", + "config_file = '../config.yml'\n", + "\n", + "with open(config_file, 'r', encoding='utf8') as config_file:\n", + " config = yaml_load(config_file.read(), Loader=Loader)\n", + "\n", + "\n", + "pprint(config.keys())\n", + "pprint(config['targets'])" + ] + }, + { + "cell_type": "markdown", + "id": "48c3efcb-3f20-4ccc-b2ec-21bed3a14b9f", + "metadata": {}, + "source": [ + "#### How we read" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b02f3e74-0d95-4b3f-b696-a610a90f6e16", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import os\n", + "\n", + "sys.path.insert(0, os.path.abspath('..'))\n", + "from web.run import get_target_config" + ] + }, + { + "cell_type": "markdown", + "id": "9aefa4ad-7600-4034-b90e-26be168f0850", + "metadata": {}, + "source": [ + "##### The 'targets' section of the config structure, is a list of dictionnaries" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "eb88fb52-705c-45e4-87ef-79fb37590991", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " planet Mercury\n", + " planet Venus\n", + " planet Earth\n", + " planet Mars\n", + " planet Jupiter\n", + " planet Saturn\n", + " planet Uranus\n", + " planet Neptune\n", + " source SoloColombo Source\n", + " probe BepiColombo\n", + " source Solo Source\n", + " probe SolarOrbiter\n", + " source Parker Source\n", + " probe Parker Solar Probe\n", + " probe Rosetta\n", + " probe Juno\n", + " comet Churyumov-Gerasimenko\n" + ] + } + ], + "source": [ + "for _t in config['targets']:\n", + " print(f\" {_t['type']:10} {_t['name']}\")" + ] + }, + { + "cell_type": "markdown", + "id": "075f56b5-17f0-42b3-a982-a169c747345f", + "metadata": {}, + "source": [ + "##### Each planet or probe section contains a dict " + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "49e2025d-cb8a-49a1-bb05-ecfef42b32ed", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['type', 'slug', 'name', 'title', 'orbit', 'models', 'locked', 'default'])\n" + ] + } + ], + "source": [ + "\n", + "this_target_name = 'mars'\n", + "this_target_cfg = get_target_config(this_target_name)\n", + "pprint(this_target_cfg.keys())" + ] + }, + { + "cell_type": "markdown", + "id": "58a14aa1-ba29-4b19-894b-99a1ef88654e", + "metadata": {}, + "source": [ + "##### in which we are mainly interested by 'orbit' and 'models' keys" + ] + }, + { + "cell_type": "markdown", + "id": "1c1a6438-4f2c-4cbe-846f-3a36dd4922b7", + "metadata": {}, + "source": [ + "In the models section there is a dict indexed by the sources , each value being a list of dataset id with the amda ids of parameters." + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "7c80f0cd-0a64-4b62-803f-7d813dd4b0cd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "models\n", + "------\n", + "{'art': [{'parameters': {'atse': 'mars_swrt_da',\n", + " 'brad': 'mars_swrt_bx',\n", + " 'btan': 'mars_swrt_b',\n", + " 'dens': 'mars_swrt_n',\n", + " 'pdyn': 'mars_swrt_pdyn',\n", + " 'temp': 'mars_swrt_t',\n", + " 'xy_v': 'mars_swrt_v'},\n", + " 'slug': 'tao-mars-swrt'}],\n", + " 'dsc': [{'parameters': {'atse': 'mars_dsc_da',\n", + " 'brad': 'mars_dsc_bx',\n", + " 'btan': 'mars_dsc_b',\n", + " 'dens': 'mars_dsc_n',\n", + " 'pdyn': 'mars_dsc_pdyn',\n", + " 'temp': 'mars_dsc_t',\n", + " 'xy_v': 'mars_dsc_v'},\n", + " 'slug': 'tao-mars-dsc'}],\n", + " 'om': [{'parameters': {'atse': 'mars_sw_da',\n", + " 'brad': 'mars_sw_bx',\n", + " 'btan': 'mars_sw_b',\n", + " 'dens': 'mars_sw_n',\n", + " 'pdyn': 'mars_sw_pdyn',\n", + " 'temp': 'mars_sw_t',\n", + " 'xy_v': 'mars_sw_v'},\n", + " 'slug': 'tao-mars-sw'}],\n", + " 'sa': [{'parameters': {'atse': 'mars_sta_da',\n", + " 'brad': 'mars_sta_bx',\n", + " 'btan': 'mars_sta_b',\n", + " 'dens': 'mars_sta_n',\n", + " 'pdyn': 'mars_sta_pdyn',\n", + " 'temp': 'mars_sta_t',\n", + " 'xy_v': 'mars_sta_v'},\n", + " 'slug': 'tao-mars-sta'}],\n", + " 'sb': [{'parameters': {'atse': 'mars_stb_da',\n", + " 'brad': 'mars_stb_bx',\n", + " 'btan': 'mars_stb_b',\n", + " 'dens': 'mars_stb_n',\n", + " 'pdyn': 'mars_stb_pdyn',\n", + " 'temp': 'mars_stb_t',\n", + " 'xy_v': 'mars_stb_v'},\n", + " 'slug': 'tao-mars-stb'}],\n", + " 'solo': [{'slug': 'tao-mars-solo'}]}\n" + ] + } + ], + "source": [ + "print(\"models\\n------\")\n", + "pprint(this_target_cfg['models'])" + ] + }, + { + "cell_type": "markdown", + "id": "ce2ff32d-8dde-45cf-8b47-11f0773da0f0", + "metadata": {}, + "source": [ + "In the orbit section there is a dict indexed by the sources , each value being a list of dataset id with the amda ids of parameters." + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "2c8ca3c6-ef82-482b-b159-486b2177e35a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "orbit\n", + "-----\n", + "{'models': [{'parameters': {'xy_hee': 'xyz_mars_hee'}, 'slug': 'mars-orb-all'}],\n", + " 'semimajor': 1.52366231,\n", + " 'semiminor': 1.51700011}\n" + ] + } + ], + "source": [ + "print(\"orbit\\n-----\")\n", + "pprint(this_target_cfg['orbit'])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/web/run.py b/web/run.py index 93a7dca..8edfa4a 100755 --- a/web/run.py +++ b/web/run.py @@ -72,16 +72,16 @@ DEBUG = environ.get('DEBUG') == 'true' LOG_FILE = get_path('run.log') -log = logging.getLogger("HelioPropa") +hp_logger = logging.getLogger("HelioPropa") if DEBUG: - log.setLevel(logging.DEBUG) + hp_logger.setLevel(logging.DEBUG) else: - log.setLevel(logging.ERROR) + hp_logger.setLevel(logging.ERROR) logHandler = logging.FileHandler(LOG_FILE) logHandler.setFormatter(logging.Formatter( "%(asctime)s - %(levelname)s - %(message)s" )) -log.addHandler(logHandler) +hp_logger.addHandler(logHandler) # HARDCODED CONFIGURATION ##################################################### @@ -176,13 +176,13 @@ environ['CDF_LIB'] = CDF_LIB app = Flask(__name__, root_path=THIS_DIRECTORY) app.debug = DEBUG if app.debug: - log.info("Starting Flask app IN DEBUG MODE...") + hp_logger.info("Starting Flask app IN DEBUG MODE...") else: - log.info("Starting Flask app...") + hp_logger.info("Starting Flask app...") def handle_error(e): - log.error(e) + hp_logger.error(e) return str(e) # wish we could use the default error renderer here @@ -280,7 +280,7 @@ tpl_global_vars = { # HELPERS ##################################################################### def abort(code, message): - log.error("Abort: " + message) + hp_logger.error("Abort: " + message) abort_flask(code, message) @@ -343,7 +343,7 @@ def datetime_from_list(time_list): try: time_list = [str(i, 'UTF8') for i in time_list] except Exception as e: - log.error(e) + hp_logger.error(e) # Day Of Year starts at 0, but for our datetime parser it starts at 1 doy = '{:03d}'.format(int(''.join(time_list[4:7])) + 1) return datetime.datetime.strptime( @@ -561,7 +561,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): startTime=started_at.isoformat(), stopTime=stopped_at.isoformat() ) - log.info("Fetching remote gzip files list at '%s'." % url) + hp_logger.info("Fetching remote gzip files list at '%s'." % url) retries = 0 success = False errors = [] @@ -581,21 +581,21 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): # raise Exception("API says it's out of time at '%s'." % url) success = True except Exception as e: - log.warning("Failed (%d/3) '%s' : %s" % (retries + 1, url, e.message)) + hp_logger.warning("Failed (%d/3) '%s' : %s" % (retries + 1, url, e.message)) remote_gzip_files = [] errors.append(e) finally: retries += 1 if not remote_gzip_files: - log.error("Failed to retrieve data from AMDA.") - log.error("Failed to fetch gzip files list for %s at '%s' : %s" % - (orbiter, url, errors)) + hp_logger.error("Failed to retrieve data from AMDA.") + hp_logger.error("Failed to fetch gzip files list for %s at '%s' : %s" % + (orbiter, url, errors)) abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" % (orbiter, url, errors)) else: remote_gzip_files = list(set(remote_gzip_files)) - log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) + hp_logger.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) local_gzip_files = [] for remote_gzip_file in remote_gzip_files: @@ -609,16 +609,16 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): local_gzip_file = join(CACHE_DIR, get_local_filename(remote_gzip_file)) local_gzip_files.append(local_gzip_file) if not isfile(local_gzip_file): - log.debug("Retrieving '%s'..." % local_gzip_file) + hp_logger.debug("Retrieving '%s'..." % local_gzip_file) urllib_request.urlretrieve(remote_gzip_file, local_gzip_file) - log.debug("Retrieved '%s'." % local_gzip_file) + hp_logger.debug("Retrieved '%s'." % local_gzip_file) else: - log.debug("Found '%s' in the cache." % local_gzip_file) + hp_logger.debug("Found '%s' in the cache." % local_gzip_file) local_netc_files = [] for local_gzip_file in local_gzip_files: local_netc_file = local_gzip_file[0:-3] - log.debug("Unzipping '%s'..." % local_gzip_file) + hp_logger.debug("Unzipping '%s'..." % local_gzip_file) success = True try: with gzip.open(local_gzip_file) as f: @@ -627,14 +627,14 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): g.write(file_content) except Exception as e: success = False - log.error("Cannot process gz file '%s' from '%s' : %s" % - (local_gzip_file, url, e)) + hp_logger.error("Cannot process gz file '%s' from '%s' : %s" % + (local_gzip_file, url, e)) # Sometimes, the downloaded gz is corrupted, and CRC checks fail. # We want to delete the local gz file and try again next time. removefile(local_gzip_file) if success: local_netc_files.append(local_netc_file) - log.debug("Unzipped '%s'." % local_gzip_file) + hp_logger.debug("Unzipped '%s'." % local_gzip_file) return list(set(local_netc_files)) # remove possible dupes @@ -689,7 +689,7 @@ def get_data_for_target(target_config, input_slug, """ :return: dict whose keys are datetime as str, values tuples of data """ - log.debug("Grabbing data for '%s'..." % target_config['slug']) + hp_logger.debug("Grabbing data for '%s'..." % target_config['slug']) try: models = target_config['models'][input_slug] @@ -717,19 +717,19 @@ def get_data_for_target(target_config, input_slug, target_config['slug'], orbit['slug'], s0, s1 ) for orbit_file in orbit_files: - log.debug("%s: opening orbit NETCDF4 '%s'..." % - (target_config['name'], orbit_file)) + hp_logger.debug("%s: opening orbit NETCDF4 '%s'..." % + (target_config['name'], orbit_file)) cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms data_hee = _read_var(cdf_handle, nc_keys, 'hee', mandatory=True) - log.debug("%s: aggregating data from '%s'..." % - (target_config['name'], orbit_file)) + hp_logger.debug("%s: aggregating data from '%s'..." % + (target_config['name'], orbit_file)) for ltime, datum_hee in zip(times, data_hee): try: dtime = datetime_from_list(ltime) except Exception: - log.error("Failed to parse time from get__data_for_target %s." % ltime) + hp_logger.error("Failed to parse time from get__data_for_target %s." % ltime) raise # Keep only what's in the interval if s0 <= dtime <= s1: @@ -749,12 +749,12 @@ def get_data_for_target(target_config, input_slug, nc_keys.update(model['parameters']) if len(model_files) == 0: - log.warning("No model data for '%s' '%s'." - % (target_config['slug'], model['slug'])) + hp_logger.warning("No model data for '%s' '%s'." + % (target_config['slug'], model['slug'])) for model_file in model_files: - log.debug("%s: opening model NETCDF4 '%s'..." % - (target_config['name'], model_file)) + hp_logger.debug("%s: opening model NETCDF4 '%s'..." % + (target_config['name'], model_file)) cdf_handle = Dataset(model_file, "r", format="NETCDF4") # log.debug(cdf_handle.variables.keys()) @@ -782,15 +782,15 @@ def get_data_for_target(target_config, input_slug, # FIXME ignored_count = 0 - log.debug("%s: aggregating data from '%s'..." % - (target_config['name'], model_file)) + hp_logger.debug("%s: aggregating data from '%s'..." % + (target_config['name'], model_file)) for ltime, datum_v, datum_b, datum_t, datum_n, datum_p, datum_a \ in zip(times, data_v, data_b, data_t, data_n, data_p, data_a): try: dtime = datetime_from_list(ltime) except Exception: - log.error("Failed to parse time from %s." % ltime) + hp_logger.error("Failed to parse time from %s." % ltime) raise if not (s0 <= dtime <= s1): @@ -857,8 +857,8 @@ def get_data_for_target(target_config, input_slug, # Improve this loop so as to remove this stinky debug log if ignored_count > 0: - log.debug(" Ignored %d datum(s) during ~\"drizzling\"." - % ignored_count) + hp_logger.debug(" Ignored %d datum(s) during ~\"drizzling\"." + % ignored_count) cdf_handle.close() @@ -867,7 +867,7 @@ def get_data_for_target(target_config, input_slug, def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): target_config = get_target_config(target_slug) - log.debug("Crunching CSV contents for '%s'..." % target_config['name']) + hp_logger.debug("Crunching CSV contents for '%s'..." % target_config['name']) si = StringIO() cw = csv_writer(si) cw.writerow(PROPERTIES) @@ -877,11 +877,11 @@ def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): started_at=started_at, stopped_at=stopped_at ) - log.debug("Writing and sorting CSV for '%s'..." % target_config['slug']) + hp_logger.debug("Writing and sorting CSV for '%s'..." % target_config['slug']) for dkey in sorted(all_data): cw.writerow(all_data[dkey]) - log.info("Generated CSV contents for '%s'." % target_config['slug']) + hp_logger.info("Generated CSV contents for '%s'." % target_config['slug']) return si.getvalue() @@ -891,10 +891,10 @@ def generate_csv_contents_spz(target_slug, input_slug, started_at, stopped_at): orbit_dict = target_config['orbit']['models'][0]['parameters'] parameters_dict = {**plasma_dict, **orbit_dict} - log.info(f"Aggregating dataframes speazy parameters for '{input_slug}' to '{target_slug}'" ) + hp_logger.info(f"Aggregating dataframes speazy parameters for '{input_slug}' to '{target_slug}'") list_df = [] for _name, _id in parameters_dict.items(): - log.debug(f"Getting parameter id '{_id}' for '{_name}'") + hp_logger.debug(f"Getting parameter id '{_id}' for '{_name}'") _df = amda.get_data(_id, started_at, stopped_at).to_dataframe() if _name == 'xy_v': _df = _df.rename(columns={_df.columns[0]: 'vrad', _df.columns[1]: 'vtan'}) @@ -952,23 +952,23 @@ def generate_csv_file_if_needed(target_slug, input_slug, csv_generator = generate_csv_contents if generate: - log.info("Generating CSV '%s'..." % local_csv_file) + hp_logger.info("Generating CSV '%s'..." % local_csv_file) try: with open(local_csv_file, mode="w+") as f: f.write(csv_generator( target_slug=target_slug, input_slug=input_slug, started_at=started_at, stopped_at=stopped_at )) - log.info("Generation of '%s' done." % filename) + hp_logger.info("Generation of '%s' done." % filename) except Exception as e: from sys import exc_info from traceback import extract_tb exc_type, exc_value, exc_traceback = exc_info() - log.error(e) + hp_logger.error(e) for trace in extract_tb(exc_traceback): - log.error(trace) + hp_logger.error(trace) if isfile(local_csv_file): - log.warning("Removing failed CSV '%s'..." % local_csv_file) + hp_logger.warning("Removing failed CSV '%s'..." % local_csv_file) removefile(local_csv_file) abort(500, "Failed creating CSV '%s' : %s" % (filename, e)) @@ -1074,7 +1074,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): try: index = _data['columns'].index(_key) except ValueError: - log.error("Key %s not found in columns of %s" % (_key, _data)) + hp_logger.error("Key %s not found in columns of %s" % (_key, _data)) raise return index @@ -1106,7 +1106,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): with open(get_path("../data/catalog/%s" % cl_datum['file'])) as f: json_data = json.load(f) if 'start' not in cl_datum: - log.error("Invalid configuration: 'start' is missing.") + hp_logger.error("Invalid configuration: 'start' is missing.") continue # skip this if 'format' not in cl_datum: cl_datum['format'] = CME_DATE_FMT @@ -1189,12 +1189,12 @@ def update_spacepy(): Importing pycdf will fail if the toolbox is not up to date. """ try: - log.info("Updating spacepy's toolbox…") + hp_logger.info("Updating spacepy's toolbox…") import spacepy.toolbox spacepy.toolbox.update() except Exception as e: - log.error("Failed to update spacepy : %s." % e) + hp_logger.error("Failed to update spacepy : %s." % e) tpl_global_vars['visits'] = get_hit_counter() @@ -1330,7 +1330,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): local_gzip_file = join(CACHE_DIR, gzip_filename) if not isfile(local_gzip_file): - log.debug("Creating the CSV files for the tarball...") + hp_logger.debug("Creating the CSV files for the tarball...") for target_config in targets_configs: filename = "%s_%s_%s_%s.csv" % ( target_config['slug'], input_slug, sta, sto @@ -1345,7 +1345,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): input_slug=input_slug )) - log.debug("Creating the tarball '%s'..." % local_gzip_file) + hp_logger.debug("Creating the tarball '%s'..." % local_gzip_file) with tarfile.open(local_gzip_file, "w:gz") as tar: for target_config in targets_configs: filename = "%s_%s_%s_%s.csv" % ( @@ -1410,7 +1410,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): nc_path = join(CACHE_DIR, nc_filename) if not isfile(nc_path): - log.debug("Creating the NetCDF file '%s'..." % nc_filename) + hp_logger.debug("Creating the NetCDF file '%s'..." % nc_filename) nc_handle = Dataset(nc_path, "w", format="NETCDF4") try: nc_handle.description = "Model and orbit data for targets" # todo @@ -1419,7 +1419,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): available_params = list(PROPERTIES) for target in targets_configs: target_slug = target['slug'] - log.debug("Adding group '%s' to the NetCDF..." % target_slug) + hp_logger.debug("Adding group '%s' to the NetCDF..." % target_slug) nc_group = nc_handle.createGroup(target_slug) data = get_data_for_target( target_config=target, input_slug=input_slug, @@ -1473,10 +1473,10 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): values_y.append(dval[index_y]) nc_x[:] = values_x nc_y[:] = values_y - log.debug("Writing NetCDF '%s'..." % nc_filename) + hp_logger.debug("Writing NetCDF '%s'..." % nc_filename) except Exception: - log.error("Failed to generate NetCDF '%s'." % nc_filename) + hp_logger.error("Failed to generate NetCDF '%s'." % nc_filename) raise finally: nc_handle.close() @@ -1529,7 +1529,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): cdf_path = join(CACHE_DIR, cdf_filename) if not isfile(cdf_path): - log.debug("Creating the CDF file '%s'..." % cdf_filename) + hp_logger.debug("Creating the CDF file '%s'..." % cdf_filename) try: from spacepy import pycdf except ImportError: @@ -1539,10 +1539,10 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): try: from spacepy import pycdf except ImportError as e: - log.error("Failed to import pycdf from spacepy : %s" % e) + hp_logger.error("Failed to import pycdf from spacepy : %s" % e) raise except Exception as e: - log.error("Failed to import pycdf from spacepy : %s" % e) + hp_logger.error("Failed to import pycdf from spacepy : %s" % e) raise try: @@ -1618,18 +1618,18 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): else: values_xhee.append(0) values_yhee.append(0) - log.warning("Orbit data for %s has NaNs." % target_slug) + hp_logger.warning("Orbit data for %s has NaNs." % target_slug) cdf_handle[kx] = values_xhee cdf_handle[ky] = values_yhee cdf_handle[kx].attrs['UNITS'] = 'Au' cdf_handle[ky].attrs['UNITS'] = 'Au' - log.debug("Writing CDF '%s'..." % cdf_filename) + hp_logger.debug("Writing CDF '%s'..." % cdf_filename) cdf_handle.close() - log.debug("Wrote CDF '%s'." % cdf_filename) + hp_logger.debug("Wrote CDF '%s'." % cdf_filename) except Exception as e: - log.error("Failed to generate CDF '%s'." % cdf_filename) + hp_logger.error("Failed to generate CDF '%s'." % cdf_filename) if isfile(cdf_path): removefile(cdf_path) raise @@ -1643,7 +1643,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): @app.route("/_auroral_catalog.csv") def download_auroral_catalog_csv(target): tc = validate_tap_target_config(target) - log.debug("Requesting auroral emissions CSV for %s..." % tc['name']) + hp_logger.debug("Requesting auroral emissions CSV for %s..." % tc['name']) filename = "%s_auroral_catalog.csv" % (target) local_csv_file = join(CACHE_DIR, filename) @@ -1670,10 +1670,10 @@ def download_auroral_catalog_csv(target): # 'time_min', 'time_max', 'thumbnail_url', 'external_link' # cw.writerow(head) - log.debug("Writing auroral emissions CSV for %s..." % tc['name']) + hp_logger.debug("Writing auroral emissions CSV for %s..." % tc['name']) cw.writerows(emissions) - log.info("Generated auroral emissions CSV contents for %s." % tc['name']) + hp_logger.info("Generated auroral emissions CSV contents for %s." % tc['name']) return si.getvalue() # if not isfile(local_csv_file): -- libgit2 0.21.2