Commit 89c56528070ccccdc5ad1bd97373dda7b3f6daa9
1 parent
e4a36ad2
Exists in
rhitier-dev
Rename heliopropa logger
Showing
2 changed files
with
674 additions
and
67 deletions
Show diff stats
@@ -0,0 +1,607 @@ | @@ -0,0 +1,607 @@ | ||
1 | +{ | ||
2 | + "cells": [ | ||
3 | + { | ||
4 | + "cell_type": "markdown", | ||
5 | + "id": "1f0ee132-a51a-4f2b-8f22-2eeea48b4e09", | ||
6 | + "metadata": {}, | ||
7 | + "source": [ | ||
8 | + "#### How we load" | ||
9 | + ] | ||
10 | + }, | ||
11 | + { | ||
12 | + "cell_type": "code", | ||
13 | + "execution_count": 28, | ||
14 | + "id": "b43839b2-f9b3-48f3-a4de-77464eb35af1", | ||
15 | + "metadata": {}, | ||
16 | + "outputs": [ | ||
17 | + { | ||
18 | + "name": "stdout", | ||
19 | + "output_type": "stream", | ||
20 | + "text": [ | ||
21 | + "dict_keys(['meta', 'header', 'authors', 'amda', 'defaults', 'layers', 'inputs', 'targets', 'toots'])\n", | ||
22 | + "[{'default': False,\n", | ||
23 | + " 'locked': False,\n", | ||
24 | + " 'models': {'art': [{'parameters': {'atse': 'mercury_swrt_da',\n", | ||
25 | + " 'brad': 'mercury_swrt_bx',\n", | ||
26 | + " 'btan': 'mercury_swrt_b',\n", | ||
27 | + " 'dens': 'mercury_swrt_n',\n", | ||
28 | + " 'pdyn': 'mercury_swrt_pdyn',\n", | ||
29 | + " 'temp': 'mercury_swrt_t',\n", | ||
30 | + " 'xy_v': 'mercury_swrt_v'},\n", | ||
31 | + " 'slug': 'tao-mercury-swrt'}],\n", | ||
32 | + " 'dsc': [{'parameters': {'atse': 'mercury_dsc_da',\n", | ||
33 | + " 'brad': 'mercury_dsc_bx',\n", | ||
34 | + " 'btan': 'mercury_dsc_b',\n", | ||
35 | + " 'dens': 'mercury_dsc_n',\n", | ||
36 | + " 'pdyn': 'mercury_dsc_pdyn',\n", | ||
37 | + " 'temp': 'mercury_dsc_t',\n", | ||
38 | + " 'xy_v': 'mercury_dsc_v'},\n", | ||
39 | + " 'slug': 'tao-mercury-dsc'}],\n", | ||
40 | + " 'om': [{'parameters': {'atse': 'mercury_sw_da',\n", | ||
41 | + " 'brad': 'mercury_sw_bx',\n", | ||
42 | + " 'btan': 'mercury_sw_b',\n", | ||
43 | + " 'dens': 'mercury_sw_n',\n", | ||
44 | + " 'pdyn': 'mercury_sw_pdyn',\n", | ||
45 | + " 'temp': 'mercury_sw_t',\n", | ||
46 | + " 'xy_v': 'mercury_sw_v'},\n", | ||
47 | + " 'slug': 'tao-mercury-sw'}],\n", | ||
48 | + " 'sa': [{'parameters': {'atse': 'mercury_sta_da',\n", | ||
49 | + " 'brad': 'mercury_sta_bx',\n", | ||
50 | + " 'btan': 'mercury_sta_b',\n", | ||
51 | + " 'dens': 'mercury_sta_n',\n", | ||
52 | + " 'pdyn': 'mercury_sta_pdyn',\n", | ||
53 | + " 'temp': 'mercury_sta_t',\n", | ||
54 | + " 'xy_v': 'mercury_sta_v'},\n", | ||
55 | + " 'slug': 'tao-mercury-sta'}],\n", | ||
56 | + " 'sb': [{'parameters': {'atse': 'mercury_stb_da',\n", | ||
57 | + " 'brad': 'mercury_stb_bx',\n", | ||
58 | + " 'btan': 'mercury_stb_b',\n", | ||
59 | + " 'dens': 'mercury_stb_n',\n", | ||
60 | + " 'pdyn': 'mercury_stb_pdyn',\n", | ||
61 | + " 'temp': 'mercury_stb_t',\n", | ||
62 | + " 'xy_v': 'mercury_stb_v'},\n", | ||
63 | + " 'slug': 'tao-mercury-stb'}],\n", | ||
64 | + " 'solo': [{'slug': 'tao-mercury-solo'}]},\n", | ||
65 | + " 'name': 'Mercury',\n", | ||
66 | + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_mercury_hee'},\n", | ||
67 | + " 'slug': 'mercury-orb-all'}],\n", | ||
68 | + " 'semimajor': 0,\n", | ||
69 | + " 'semiminor': 0},\n", | ||
70 | + " 'slug': 'mercury',\n", | ||
71 | + " 'title': 'Mercury',\n", | ||
72 | + " 'type': 'planet'},\n", | ||
73 | + " {'default': True,\n", | ||
74 | + " 'locked': False,\n", | ||
75 | + " 'models': {'art': [{'parameters': {'atse': 'venus_swrt_da',\n", | ||
76 | + " 'brad': 'venus_swrt_bx',\n", | ||
77 | + " 'btan': 'venus_swrt_b',\n", | ||
78 | + " 'dens': 'venus_swrt_n',\n", | ||
79 | + " 'pdyn': 'venus_swrt_pdyn',\n", | ||
80 | + " 'temp': 'venus_swrt_t',\n", | ||
81 | + " 'xy_v': 'venus_swrt_v'},\n", | ||
82 | + " 'slug': 'tao-venus-swrt'}],\n", | ||
83 | + " 'dsc': [{'parameters': {'atse': 'venus_dsc_da',\n", | ||
84 | + " 'brad': 'venus_dsc_bx',\n", | ||
85 | + " 'btan': 'venus_dsc_b',\n", | ||
86 | + " 'dens': 'venus_dsc_n',\n", | ||
87 | + " 'pdyn': 'venus_dsc_pdyn',\n", | ||
88 | + " 'temp': 'venus_dsc_t',\n", | ||
89 | + " 'xy_v': 'venus_dsc_v'},\n", | ||
90 | + " 'slug': 'tao-venus-dsc'}],\n", | ||
91 | + " 'om': [{'parameters': {'atse': 'venus_sw_da',\n", | ||
92 | + " 'brad': 'venus_sw_bx',\n", | ||
93 | + " 'btan': 'venus_sw_b',\n", | ||
94 | + " 'dens': 'venus_sw_n',\n", | ||
95 | + " 'pdyn': 'venus_sw_pdyn',\n", | ||
96 | + " 'temp': 'venus_sw_t',\n", | ||
97 | + " 'xy_v': 'venus_sw_v'},\n", | ||
98 | + " 'slug': 'tao-venus-sw'}],\n", | ||
99 | + " 'sa': [{'parameters': {'atse': 'venus_sta_da',\n", | ||
100 | + " 'brad': 'venus_sta_bx',\n", | ||
101 | + " 'btan': 'venus_sta_b',\n", | ||
102 | + " 'dens': 'venus_sta_n',\n", | ||
103 | + " 'pdyn': 'venus_sta_pdyn',\n", | ||
104 | + " 'temp': 'venus_sta_t',\n", | ||
105 | + " 'xy_v': 'venus_sta_v'},\n", | ||
106 | + " 'slug': 'tao-venus-sta'}],\n", | ||
107 | + " 'sb': [{'parameters': {'atse': 'venus_stb_da',\n", | ||
108 | + " 'brad': 'venus_stb_bx',\n", | ||
109 | + " 'btan': 'venus_stb_b',\n", | ||
110 | + " 'dens': 'venus_stb_n',\n", | ||
111 | + " 'pdyn': 'venus_stb_pdyn',\n", | ||
112 | + " 'temp': 'venus_stb_t',\n", | ||
113 | + " 'xy_v': 'venus_stb_v'},\n", | ||
114 | + " 'slug': 'tao-venus-stb'}],\n", | ||
115 | + " 'solo': [{'slug': 'tao-venus-solo'}]},\n", | ||
116 | + " 'name': 'Venus',\n", | ||
117 | + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_venus_hee'},\n", | ||
118 | + " 'slug': 'venus-orb-all'}],\n", | ||
119 | + " 'semimajor': 0.72333199,\n", | ||
120 | + " 'semiminor': 0.7233154},\n", | ||
121 | + " 'slug': 'venus',\n", | ||
122 | + " 'title': 'Venus',\n", | ||
123 | + " 'type': 'planet'},\n", | ||
124 | + " {'default': True,\n", | ||
125 | + " 'locked': False,\n", | ||
126 | + " 'models': {'om': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", | ||
127 | + " {'parameters': {'dens': 'Dens',\n", | ||
128 | + " 'temp': 'Temp',\n", | ||
129 | + " 'vtot': 'Vel'},\n", | ||
130 | + " 'slug': 'ace_swepam_real_1h'}],\n", | ||
131 | + " 'sa': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", | ||
132 | + " {'parameters': {'dens': 'Dens',\n", | ||
133 | + " 'temp': 'Temp',\n", | ||
134 | + " 'vtot': 'Vel'},\n", | ||
135 | + " 'slug': 'ace_swepam_real_1h'}],\n", | ||
136 | + " 'sb': [{'parameters': {'pdyn': 'RamP'}, 'slug': 'omni_hour_all'},\n", | ||
137 | + " {'parameters': {'dens': 'Dens',\n", | ||
138 | + " 'temp': 'Temp',\n", | ||
139 | + " 'vtot': 'Vel'},\n", | ||
140 | + " 'slug': 'ace_swepam_real_1h'}]},\n", | ||
141 | + " 'name': 'Earth',\n", | ||
142 | + " 'orbit': {'models': []},\n", | ||
143 | + " 'slug': 'earth',\n", | ||
144 | + " 'title': 'Earth',\n", | ||
145 | + " 'type': 'planet'},\n", | ||
146 | + " {'default': False,\n", | ||
147 | + " 'locked': False,\n", | ||
148 | + " 'models': {'art': [{'parameters': {'atse': 'mars_swrt_da',\n", | ||
149 | + " 'brad': 'mars_swrt_bx',\n", | ||
150 | + " 'btan': 'mars_swrt_b',\n", | ||
151 | + " 'dens': 'mars_swrt_n',\n", | ||
152 | + " 'pdyn': 'mars_swrt_pdyn',\n", | ||
153 | + " 'temp': 'mars_swrt_t',\n", | ||
154 | + " 'xy_v': 'mars_swrt_v'},\n", | ||
155 | + " 'slug': 'tao-mars-swrt'}],\n", | ||
156 | + " 'dsc': [{'parameters': {'atse': 'mars_dsc_da',\n", | ||
157 | + " 'brad': 'mars_dsc_bx',\n", | ||
158 | + " 'btan': 'mars_dsc_b',\n", | ||
159 | + " 'dens': 'mars_dsc_n',\n", | ||
160 | + " 'pdyn': 'mars_dsc_pdyn',\n", | ||
161 | + " 'temp': 'mars_dsc_t',\n", | ||
162 | + " 'xy_v': 'mars_dsc_v'},\n", | ||
163 | + " 'slug': 'tao-mars-dsc'}],\n", | ||
164 | + " 'om': [{'parameters': {'atse': 'mars_sw_da',\n", | ||
165 | + " 'brad': 'mars_sw_bx',\n", | ||
166 | + " 'btan': 'mars_sw_b',\n", | ||
167 | + " 'dens': 'mars_sw_n',\n", | ||
168 | + " 'pdyn': 'mars_sw_pdyn',\n", | ||
169 | + " 'temp': 'mars_sw_t',\n", | ||
170 | + " 'xy_v': 'mars_sw_v'},\n", | ||
171 | + " 'slug': 'tao-mars-sw'}],\n", | ||
172 | + " 'sa': [{'parameters': {'atse': 'mars_sta_da',\n", | ||
173 | + " 'brad': 'mars_sta_bx',\n", | ||
174 | + " 'btan': 'mars_sta_b',\n", | ||
175 | + " 'dens': 'mars_sta_n',\n", | ||
176 | + " 'pdyn': 'mars_sta_pdyn',\n", | ||
177 | + " 'temp': 'mars_sta_t',\n", | ||
178 | + " 'xy_v': 'mars_sta_v'},\n", | ||
179 | + " 'slug': 'tao-mars-sta'}],\n", | ||
180 | + " 'sb': [{'parameters': {'atse': 'mars_stb_da',\n", | ||
181 | + " 'brad': 'mars_stb_bx',\n", | ||
182 | + " 'btan': 'mars_stb_b',\n", | ||
183 | + " 'dens': 'mars_stb_n',\n", | ||
184 | + " 'pdyn': 'mars_stb_pdyn',\n", | ||
185 | + " 'temp': 'mars_stb_t',\n", | ||
186 | + " 'xy_v': 'mars_stb_v'},\n", | ||
187 | + " 'slug': 'tao-mars-stb'}],\n", | ||
188 | + " 'solo': [{'slug': 'tao-mars-solo'}]},\n", | ||
189 | + " 'name': 'Mars',\n", | ||
190 | + " 'orbit': {'models': [{'parameters': {'xy_hee': 'xyz_mars_hee'},\n", | ||
191 | + " 'slug': 'mars-orb-all'}],\n", | ||
192 | + " 'semimajor': 1.52366231,\n", | ||
193 | + " 'semiminor': 1.51700011},\n", | ||
194 | + " 'slug': 'mars',\n", | ||
195 | + " 'title': 'Mars',\n", | ||
196 | + " 'type': 'planet'},\n", | ||
197 | + " {'default': False,\n", | ||
198 | + " 'locked': False,\n", | ||
199 | + " 'models': {'art': [{'slug': 'tao_jup_swrt'}],\n", | ||
200 | + " 'dsc': [{'slug': 'tao_jup_dsc'}],\n", | ||
201 | + " 'om': [{'slug': 'tao_jup_sw'}],\n", | ||
202 | + " 'sa': [{'slug': 'tao_jup_sta'}],\n", | ||
203 | + " 'sb': [{'slug': 'tao_jup_stb'}],\n", | ||
204 | + " 'solo': [{'slug': 'tao_jup_solo'}]},\n", | ||
205 | + " 'name': 'Jupiter',\n", | ||
206 | + " 'orbit': {'models': [{'slug': 'jupiter_orb_all'}],\n", | ||
207 | + " 'semimajor': 5.45516759,\n", | ||
208 | + " 'semiminor': 4.95155843},\n", | ||
209 | + " 'slug': 'jupiter',\n", | ||
210 | + " 'tap': {'target_name': 'Jupiter'},\n", | ||
211 | + " 'title': 'Jupiter',\n", | ||
212 | + " 'type': 'planet'},\n", | ||
213 | + " {'default': False,\n", | ||
214 | + " 'locked': False,\n", | ||
215 | + " 'models': {'art': [{'slug': 'tao_sat_swrt'}],\n", | ||
216 | + " 'dsc': [{'slug': 'tao_sat_dsc'}],\n", | ||
217 | + " 'om': [{'slug': 'tao_sat_sw'}],\n", | ||
218 | + " 'sa': [{'slug': 'tao_sat_sta'}],\n", | ||
219 | + " 'sb': [{'slug': 'tao_sat_stb'}],\n", | ||
220 | + " 'solo': [{'slug': 'tao_sat_solo'}]},\n", | ||
221 | + " 'name': 'Saturn',\n", | ||
222 | + " 'orbit': {'models': [{'slug': 'saturn_orb_all'}],\n", | ||
223 | + " 'semimajor': 9.53707032,\n", | ||
224 | + " 'semiminor': 9.5230773},\n", | ||
225 | + " 'slug': 'saturn',\n", | ||
226 | + " 'tap': {'target_name': 'Saturn'},\n", | ||
227 | + " 'title': 'Saturn',\n", | ||
228 | + " 'type': 'planet'},\n", | ||
229 | + " {'default': False,\n", | ||
230 | + " 'locked': False,\n", | ||
231 | + " 'models': {'art': [{'slug': 'tao_ura_swrt'}],\n", | ||
232 | + " 'dsc': [{'slug': 'tao_ura_dsc'}],\n", | ||
233 | + " 'om': [{'slug': 'tao_ura_sw'}],\n", | ||
234 | + " 'sa': [{'slug': 'tao_ura_sta'}],\n", | ||
235 | + " 'sb': [{'slug': 'tao_ura_stb'}],\n", | ||
236 | + " 'solo': [{'slug': 'tao_ura_solo'}]},\n", | ||
237 | + " 'name': 'Uranus',\n", | ||
238 | + " 'orbit': {'models': [{'slug': 'uranus_orb_all'}],\n", | ||
239 | + " 'semimajor': 19.19,\n", | ||
240 | + " 'semiminor': 19.17},\n", | ||
241 | + " 'slug': 'uranus',\n", | ||
242 | + " 'title': 'Uranus',\n", | ||
243 | + " 'type': 'planet'},\n", | ||
244 | + " {'default': False,\n", | ||
245 | + " 'locked': False,\n", | ||
246 | + " 'models': {'art': [{'slug': 'tao_nep_swrt'}],\n", | ||
247 | + " 'dsc': [{'slug': 'tao_nep_dsc'}],\n", | ||
248 | + " 'om': [{'slug': 'tao_nep_sw'}],\n", | ||
249 | + " 'sa': [{'slug': 'tao_nep_sta'}],\n", | ||
250 | + " 'sb': [{'slug': 'tao_nep_stb'}],\n", | ||
251 | + " 'solo': [{'slug': 'tao_nep_solo'}]},\n", | ||
252 | + " 'name': 'Neptune',\n", | ||
253 | + " 'orbit': {'models': [{'slug': 'neptune_orb_all'}],\n", | ||
254 | + " 'semimajor': 30.06896348,\n", | ||
255 | + " 'semiminor': 30.06785516},\n", | ||
256 | + " 'slug': 'neptune',\n", | ||
257 | + " 'title': 'Neptune',\n", | ||
258 | + " 'type': 'planet'},\n", | ||
259 | + " {'default': False,\n", | ||
260 | + " 'locked': False,\n", | ||
261 | + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", | ||
262 | + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", | ||
263 | + " 'om': [{'slug': 'tao_mercury_sw'}],\n", | ||
264 | + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", | ||
265 | + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", | ||
266 | + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", | ||
267 | + " 'name': 'SoloColombo Source',\n", | ||
268 | + " 'orbit': {'models': [{'slug': 'bepi_cruise_all'}, {'slug': 'earth_orb_all'}]},\n", | ||
269 | + " 'slug': 'bepi_src',\n", | ||
270 | + " 'title': 'SoloColombo Source',\n", | ||
271 | + " 'type': 'source'},\n", | ||
272 | + " {'default': False,\n", | ||
273 | + " 'locked': False,\n", | ||
274 | + " 'models': {'art': [{'slug': 'tao_bepi_swrt'}],\n", | ||
275 | + " 'om': [{'slug': 'tao_bepi_sw'}],\n", | ||
276 | + " 'sa': [{'slug': 'tao_bepi_sta'}]},\n", | ||
277 | + " 'name': 'BepiColombo',\n", | ||
278 | + " 'orbit': {'models': [{'slug': 'bepi_cruise_all'}]},\n", | ||
279 | + " 'slug': 'bepi',\n", | ||
280 | + " 'title': 'BepiColombo',\n", | ||
281 | + " 'type': 'probe'},\n", | ||
282 | + " {'default': False,\n", | ||
283 | + " 'locked': False,\n", | ||
284 | + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", | ||
285 | + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", | ||
286 | + " 'om': [{'slug': 'tao_mercury_sw'}],\n", | ||
287 | + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", | ||
288 | + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", | ||
289 | + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", | ||
290 | + " 'name': 'Solo Source',\n", | ||
291 | + " 'orbit': {'models': [{'slug': 'so_orb_all'}, {'slug': 'earth_orb_all'}]},\n", | ||
292 | + " 'slug': 'solo_src',\n", | ||
293 | + " 'title': 'Solo Source',\n", | ||
294 | + " 'type': 'source'},\n", | ||
295 | + " {'default': False,\n", | ||
296 | + " 'locked': False,\n", | ||
297 | + " 'models': {'art': [{'slug': 'tao_so_swrt'}], 'om': [{'slug': 'tao_so_sw'}]},\n", | ||
298 | + " 'name': 'SolarOrbiter',\n", | ||
299 | + " 'orbit': {'models': [{'slug': 'so_orb_all'}]},\n", | ||
300 | + " 'slug': 'solo',\n", | ||
301 | + " 'title': 'SolarOrbiter',\n", | ||
302 | + " 'type': 'probe'},\n", | ||
303 | + " {'default': False,\n", | ||
304 | + " 'locked': False,\n", | ||
305 | + " 'models': {'art': [{'slug': 'tao_mercury_swrt'}],\n", | ||
306 | + " 'dsc': [{'slug': 'tao_mercury_dsc'}],\n", | ||
307 | + " 'om': [{'slug': 'tao_mercury_sw'}],\n", | ||
308 | + " 'sa': [{'slug': 'tao_mercury_sta'}],\n", | ||
309 | + " 'sb': [{'slug': 'tao_mercury_stb'}],\n", | ||
310 | + " 'solo': [{'slug': 'tao_mercury_solo'}]},\n", | ||
311 | + " 'name': 'Parker Source',\n", | ||
312 | + " 'orbit': {'models': [{'slug': 'psp_orb_all'}, {'slug': 'earth_orb_all'}]},\n", | ||
313 | + " 'slug': 'psp_src',\n", | ||
314 | + " 'title': 'Parker Source',\n", | ||
315 | + " 'type': 'source'},\n", | ||
316 | + " {'default': False,\n", | ||
317 | + " 'locked': False,\n", | ||
318 | + " 'models': {'art': [{'slug': 'tao_psp_swrt'}],\n", | ||
319 | + " 'om': [{'slug': 'tao_psp_sw'}],\n", | ||
320 | + " 'sa': [{'slug': 'tao_psp_sta'}]},\n", | ||
321 | + " 'name': 'Parker Solar Probe',\n", | ||
322 | + " 'orbit': {'models': [{'slug': 'psp_orb_all'}]},\n", | ||
323 | + " 'slug': 'psp',\n", | ||
324 | + " 'title': 'Parker Solar Probe',\n", | ||
325 | + " 'type': 'probe'},\n", | ||
326 | + " {'default': False,\n", | ||
327 | + " 'locked': False,\n", | ||
328 | + " 'models': {'om': [{'slug': 'tao_ros_sw'}],\n", | ||
329 | + " 'sa': [{'slug': 'tao_ros_sw'}],\n", | ||
330 | + " 'sb': [{'slug': 'tao_ros_sw'}]},\n", | ||
331 | + " 'name': 'Rosetta',\n", | ||
332 | + " 'orbit': {'models': [{'slug': 'ros_orb_cruise',\n", | ||
333 | + " 'stopped_at': '2014-08-02T00:00:00'},\n", | ||
334 | + " {'slug': 'p67_orb_all',\n", | ||
335 | + " 'started_at': '2014-08-02T00:00:00'}]},\n", | ||
336 | + " 'slug': 'rosetta',\n", | ||
337 | + " 'title': 'Rosetta',\n", | ||
338 | + " 'type': 'probe'},\n", | ||
339 | + " {'default': False,\n", | ||
340 | + " 'locked': False,\n", | ||
341 | + " 'models': {'om': [{'slug': 'tao_juno_sw'}],\n", | ||
342 | + " 'sa': [{'slug': 'tao_juno_sw'}],\n", | ||
343 | + " 'sb': [{'slug': 'tao_juno_sw'}]},\n", | ||
344 | + " 'name': 'Juno',\n", | ||
345 | + " 'orbit': {'models': [{'slug': 'juno_cruise_all',\n", | ||
346 | + " 'stopped_at': '2016-07-05T03:53:00'},\n", | ||
347 | + " {'slug': 'jupiter_orb_all',\n", | ||
348 | + " 'started_at': '2016-07-05T03:53:00'}]},\n", | ||
349 | + " 'slug': 'juno',\n", | ||
350 | + " 'title': 'Juno',\n", | ||
351 | + " 'type': 'probe'},\n", | ||
352 | + " {'default': False,\n", | ||
353 | + " 'locked': True,\n", | ||
354 | + " 'models': {'om': [{'slug': 'tao_p67_sw'}],\n", | ||
355 | + " 'sa': [{'slug': 'tao_p67_sw'}],\n", | ||
356 | + " 'sb': [{'slug': 'tao_p67_sw'}]},\n", | ||
357 | + " 'name': 'Churyumov-Gerasimenko',\n", | ||
358 | + " 'orbit': {'models': [{'parameters': {'hee': 'XYZ_HEE'},\n", | ||
359 | + " 'slug': 'p67_orb_all'}]},\n", | ||
360 | + " 'slug': 'p67',\n", | ||
361 | + " 'title': 'Churyumov-Gerasimenko (coming soon)',\n", | ||
362 | + " 'type': 'comet'}]\n" | ||
363 | + ] | ||
364 | + } | ||
365 | + ], | ||
366 | + "source": [ | ||
367 | + "from yaml import load as yaml_load\n", | ||
368 | + "from yaml import Loader\n", | ||
369 | + "from pprint import pprint\n", | ||
370 | + "\n", | ||
371 | + "\n", | ||
372 | + "config_file = '../my_cfg.yaml'\n", | ||
373 | + "config_file = '../config.yml'\n", | ||
374 | + "\n", | ||
375 | + "with open(config_file, 'r', encoding='utf8') as config_file:\n", | ||
376 | + " config = yaml_load(config_file.read(), Loader=Loader)\n", | ||
377 | + "\n", | ||
378 | + "\n", | ||
379 | + "pprint(config.keys())\n", | ||
380 | + "pprint(config['targets'])" | ||
381 | + ] | ||
382 | + }, | ||
383 | + { | ||
384 | + "cell_type": "markdown", | ||
385 | + "id": "48c3efcb-3f20-4ccc-b2ec-21bed3a14b9f", | ||
386 | + "metadata": {}, | ||
387 | + "source": [ | ||
388 | + "#### How we read" | ||
389 | + ] | ||
390 | + }, | ||
391 | + { | ||
392 | + "cell_type": "code", | ||
393 | + "execution_count": null, | ||
394 | + "id": "b02f3e74-0d95-4b3f-b696-a610a90f6e16", | ||
395 | + "metadata": {}, | ||
396 | + "outputs": [], | ||
397 | + "source": [ | ||
398 | + "import sys\n", | ||
399 | + "import os\n", | ||
400 | + "\n", | ||
401 | + "sys.path.insert(0, os.path.abspath('..'))\n", | ||
402 | + "from web.run import get_target_config" | ||
403 | + ] | ||
404 | + }, | ||
405 | + { | ||
406 | + "cell_type": "markdown", | ||
407 | + "id": "9aefa4ad-7600-4034-b90e-26be168f0850", | ||
408 | + "metadata": {}, | ||
409 | + "source": [ | ||
410 | + "##### The 'targets' section of the config structure, is a list of dictionnaries" | ||
411 | + ] | ||
412 | + }, | ||
413 | + { | ||
414 | + "cell_type": "code", | ||
415 | + "execution_count": 36, | ||
416 | + "id": "eb88fb52-705c-45e4-87ef-79fb37590991", | ||
417 | + "metadata": {}, | ||
418 | + "outputs": [ | ||
419 | + { | ||
420 | + "name": "stdout", | ||
421 | + "output_type": "stream", | ||
422 | + "text": [ | ||
423 | + " planet Mercury\n", | ||
424 | + " planet Venus\n", | ||
425 | + " planet Earth\n", | ||
426 | + " planet Mars\n", | ||
427 | + " planet Jupiter\n", | ||
428 | + " planet Saturn\n", | ||
429 | + " planet Uranus\n", | ||
430 | + " planet Neptune\n", | ||
431 | + " source SoloColombo Source\n", | ||
432 | + " probe BepiColombo\n", | ||
433 | + " source Solo Source\n", | ||
434 | + " probe SolarOrbiter\n", | ||
435 | + " source Parker Source\n", | ||
436 | + " probe Parker Solar Probe\n", | ||
437 | + " probe Rosetta\n", | ||
438 | + " probe Juno\n", | ||
439 | + " comet Churyumov-Gerasimenko\n" | ||
440 | + ] | ||
441 | + } | ||
442 | + ], | ||
443 | + "source": [ | ||
444 | + "for _t in config['targets']:\n", | ||
445 | + " print(f\" {_t['type']:10} {_t['name']}\")" | ||
446 | + ] | ||
447 | + }, | ||
448 | + { | ||
449 | + "cell_type": "markdown", | ||
450 | + "id": "075f56b5-17f0-42b3-a982-a169c747345f", | ||
451 | + "metadata": {}, | ||
452 | + "source": [ | ||
453 | + "##### Each planet or probe section contains a dict " | ||
454 | + ] | ||
455 | + }, | ||
456 | + { | ||
457 | + "cell_type": "code", | ||
458 | + "execution_count": 38, | ||
459 | + "id": "49e2025d-cb8a-49a1-bb05-ecfef42b32ed", | ||
460 | + "metadata": {}, | ||
461 | + "outputs": [ | ||
462 | + { | ||
463 | + "name": "stdout", | ||
464 | + "output_type": "stream", | ||
465 | + "text": [ | ||
466 | + "dict_keys(['type', 'slug', 'name', 'title', 'orbit', 'models', 'locked', 'default'])\n" | ||
467 | + ] | ||
468 | + } | ||
469 | + ], | ||
470 | + "source": [ | ||
471 | + "\n", | ||
472 | + "this_target_name = 'mars'\n", | ||
473 | + "this_target_cfg = get_target_config(this_target_name)\n", | ||
474 | + "pprint(this_target_cfg.keys())" | ||
475 | + ] | ||
476 | + }, | ||
477 | + { | ||
478 | + "cell_type": "markdown", | ||
479 | + "id": "58a14aa1-ba29-4b19-894b-99a1ef88654e", | ||
480 | + "metadata": {}, | ||
481 | + "source": [ | ||
482 | + "##### in which we are mainly interested by 'orbit' and 'models' keys" | ||
483 | + ] | ||
484 | + }, | ||
485 | + { | ||
486 | + "cell_type": "markdown", | ||
487 | + "id": "1c1a6438-4f2c-4cbe-846f-3a36dd4922b7", | ||
488 | + "metadata": {}, | ||
489 | + "source": [ | ||
490 | + "In the models section there is a dict indexed by the sources , each value being a list of dataset id with the amda ids of parameters." | ||
491 | + ] | ||
492 | + }, | ||
493 | + { | ||
494 | + "cell_type": "code", | ||
495 | + "execution_count": 41, | ||
496 | + "id": "7c80f0cd-0a64-4b62-803f-7d813dd4b0cd", | ||
497 | + "metadata": {}, | ||
498 | + "outputs": [ | ||
499 | + { | ||
500 | + "name": "stdout", | ||
501 | + "output_type": "stream", | ||
502 | + "text": [ | ||
503 | + "models\n", | ||
504 | + "------\n", | ||
505 | + "{'art': [{'parameters': {'atse': 'mars_swrt_da',\n", | ||
506 | + " 'brad': 'mars_swrt_bx',\n", | ||
507 | + " 'btan': 'mars_swrt_b',\n", | ||
508 | + " 'dens': 'mars_swrt_n',\n", | ||
509 | + " 'pdyn': 'mars_swrt_pdyn',\n", | ||
510 | + " 'temp': 'mars_swrt_t',\n", | ||
511 | + " 'xy_v': 'mars_swrt_v'},\n", | ||
512 | + " 'slug': 'tao-mars-swrt'}],\n", | ||
513 | + " 'dsc': [{'parameters': {'atse': 'mars_dsc_da',\n", | ||
514 | + " 'brad': 'mars_dsc_bx',\n", | ||
515 | + " 'btan': 'mars_dsc_b',\n", | ||
516 | + " 'dens': 'mars_dsc_n',\n", | ||
517 | + " 'pdyn': 'mars_dsc_pdyn',\n", | ||
518 | + " 'temp': 'mars_dsc_t',\n", | ||
519 | + " 'xy_v': 'mars_dsc_v'},\n", | ||
520 | + " 'slug': 'tao-mars-dsc'}],\n", | ||
521 | + " 'om': [{'parameters': {'atse': 'mars_sw_da',\n", | ||
522 | + " 'brad': 'mars_sw_bx',\n", | ||
523 | + " 'btan': 'mars_sw_b',\n", | ||
524 | + " 'dens': 'mars_sw_n',\n", | ||
525 | + " 'pdyn': 'mars_sw_pdyn',\n", | ||
526 | + " 'temp': 'mars_sw_t',\n", | ||
527 | + " 'xy_v': 'mars_sw_v'},\n", | ||
528 | + " 'slug': 'tao-mars-sw'}],\n", | ||
529 | + " 'sa': [{'parameters': {'atse': 'mars_sta_da',\n", | ||
530 | + " 'brad': 'mars_sta_bx',\n", | ||
531 | + " 'btan': 'mars_sta_b',\n", | ||
532 | + " 'dens': 'mars_sta_n',\n", | ||
533 | + " 'pdyn': 'mars_sta_pdyn',\n", | ||
534 | + " 'temp': 'mars_sta_t',\n", | ||
535 | + " 'xy_v': 'mars_sta_v'},\n", | ||
536 | + " 'slug': 'tao-mars-sta'}],\n", | ||
537 | + " 'sb': [{'parameters': {'atse': 'mars_stb_da',\n", | ||
538 | + " 'brad': 'mars_stb_bx',\n", | ||
539 | + " 'btan': 'mars_stb_b',\n", | ||
540 | + " 'dens': 'mars_stb_n',\n", | ||
541 | + " 'pdyn': 'mars_stb_pdyn',\n", | ||
542 | + " 'temp': 'mars_stb_t',\n", | ||
543 | + " 'xy_v': 'mars_stb_v'},\n", | ||
544 | + " 'slug': 'tao-mars-stb'}],\n", | ||
545 | + " 'solo': [{'slug': 'tao-mars-solo'}]}\n" | ||
546 | + ] | ||
547 | + } | ||
548 | + ], | ||
549 | + "source": [ | ||
550 | + "print(\"models\\n------\")\n", | ||
551 | + "pprint(this_target_cfg['models'])" | ||
552 | + ] | ||
553 | + }, | ||
554 | + { | ||
555 | + "cell_type": "markdown", | ||
556 | + "id": "ce2ff32d-8dde-45cf-8b47-11f0773da0f0", | ||
557 | + "metadata": {}, | ||
558 | + "source": [ | ||
559 | + "In the orbit section there is a dict indexed by the sources , each value being a list of dataset id with the amda ids of parameters." | ||
560 | + ] | ||
561 | + }, | ||
562 | + { | ||
563 | + "cell_type": "code", | ||
564 | + "execution_count": 42, | ||
565 | + "id": "2c8ca3c6-ef82-482b-b159-486b2177e35a", | ||
566 | + "metadata": {}, | ||
567 | + "outputs": [ | ||
568 | + { | ||
569 | + "name": "stdout", | ||
570 | + "output_type": "stream", | ||
571 | + "text": [ | ||
572 | + "orbit\n", | ||
573 | + "-----\n", | ||
574 | + "{'models': [{'parameters': {'xy_hee': 'xyz_mars_hee'}, 'slug': 'mars-orb-all'}],\n", | ||
575 | + " 'semimajor': 1.52366231,\n", | ||
576 | + " 'semiminor': 1.51700011}\n" | ||
577 | + ] | ||
578 | + } | ||
579 | + ], | ||
580 | + "source": [ | ||
581 | + "print(\"orbit\\n-----\")\n", | ||
582 | + "pprint(this_target_cfg['orbit'])" | ||
583 | + ] | ||
584 | + } | ||
585 | + ], | ||
586 | + "metadata": { | ||
587 | + "kernelspec": { | ||
588 | + "display_name": "Python 3 (ipykernel)", | ||
589 | + "language": "python", | ||
590 | + "name": "python3" | ||
591 | + }, | ||
592 | + "language_info": { | ||
593 | + "codemirror_mode": { | ||
594 | + "name": "ipython", | ||
595 | + "version": 3 | ||
596 | + }, | ||
597 | + "file_extension": ".py", | ||
598 | + "mimetype": "text/x-python", | ||
599 | + "name": "python", | ||
600 | + "nbconvert_exporter": "python", | ||
601 | + "pygments_lexer": "ipython3", | ||
602 | + "version": "3.10.12" | ||
603 | + } | ||
604 | + }, | ||
605 | + "nbformat": 4, | ||
606 | + "nbformat_minor": 5 | ||
607 | +} |
web/run.py
@@ -72,16 +72,16 @@ DEBUG = environ.get('DEBUG') == 'true' | @@ -72,16 +72,16 @@ DEBUG = environ.get('DEBUG') == 'true' | ||
72 | 72 | ||
73 | LOG_FILE = get_path('run.log') | 73 | LOG_FILE = get_path('run.log') |
74 | 74 | ||
75 | -log = logging.getLogger("HelioPropa") | 75 | +hp_logger = logging.getLogger("HelioPropa") |
76 | if DEBUG: | 76 | if DEBUG: |
77 | - log.setLevel(logging.DEBUG) | 77 | + hp_logger.setLevel(logging.DEBUG) |
78 | else: | 78 | else: |
79 | - log.setLevel(logging.ERROR) | 79 | + hp_logger.setLevel(logging.ERROR) |
80 | logHandler = logging.FileHandler(LOG_FILE) | 80 | logHandler = logging.FileHandler(LOG_FILE) |
81 | logHandler.setFormatter(logging.Formatter( | 81 | logHandler.setFormatter(logging.Formatter( |
82 | "%(asctime)s - %(levelname)s - %(message)s" | 82 | "%(asctime)s - %(levelname)s - %(message)s" |
83 | )) | 83 | )) |
84 | -log.addHandler(logHandler) | 84 | +hp_logger.addHandler(logHandler) |
85 | 85 | ||
86 | # HARDCODED CONFIGURATION ##################################################### | 86 | # HARDCODED CONFIGURATION ##################################################### |
87 | 87 | ||
@@ -176,13 +176,13 @@ environ['CDF_LIB'] = CDF_LIB | @@ -176,13 +176,13 @@ environ['CDF_LIB'] = CDF_LIB | ||
176 | app = Flask(__name__, root_path=THIS_DIRECTORY) | 176 | app = Flask(__name__, root_path=THIS_DIRECTORY) |
177 | app.debug = DEBUG | 177 | app.debug = DEBUG |
178 | if app.debug: | 178 | if app.debug: |
179 | - log.info("Starting Flask app IN DEBUG MODE...") | 179 | + hp_logger.info("Starting Flask app IN DEBUG MODE...") |
180 | else: | 180 | else: |
181 | - log.info("Starting Flask app...") | 181 | + hp_logger.info("Starting Flask app...") |
182 | 182 | ||
183 | 183 | ||
184 | def handle_error(e): | 184 | def handle_error(e): |
185 | - log.error(e) | 185 | + hp_logger.error(e) |
186 | return str(e) # wish we could use the default error renderer here | 186 | return str(e) # wish we could use the default error renderer here |
187 | 187 | ||
188 | 188 | ||
@@ -280,7 +280,7 @@ tpl_global_vars = { | @@ -280,7 +280,7 @@ tpl_global_vars = { | ||
280 | # HELPERS ##################################################################### | 280 | # HELPERS ##################################################################### |
281 | 281 | ||
282 | def abort(code, message): | 282 | def abort(code, message): |
283 | - log.error("Abort: " + message) | 283 | + hp_logger.error("Abort: " + message) |
284 | abort_flask(code, message) | 284 | abort_flask(code, message) |
285 | 285 | ||
286 | 286 | ||
@@ -343,7 +343,7 @@ def datetime_from_list(time_list): | @@ -343,7 +343,7 @@ def datetime_from_list(time_list): | ||
343 | try: | 343 | try: |
344 | time_list = [str(i, 'UTF8') for i in time_list] | 344 | time_list = [str(i, 'UTF8') for i in time_list] |
345 | except Exception as e: | 345 | except Exception as e: |
346 | - log.error(e) | 346 | + hp_logger.error(e) |
347 | # Day Of Year starts at 0, but for our datetime parser it starts at 1 | 347 | # Day Of Year starts at 0, but for our datetime parser it starts at 1 |
348 | doy = '{:03d}'.format(int(''.join(time_list[4:7])) + 1) | 348 | doy = '{:03d}'.format(int(''.join(time_list[4:7])) + 1) |
349 | return datetime.datetime.strptime( | 349 | return datetime.datetime.strptime( |
@@ -561,7 +561,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | @@ -561,7 +561,7 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | ||
561 | startTime=started_at.isoformat(), | 561 | startTime=started_at.isoformat(), |
562 | stopTime=stopped_at.isoformat() | 562 | stopTime=stopped_at.isoformat() |
563 | ) | 563 | ) |
564 | - log.info("Fetching remote gzip files list at '%s'." % url) | 564 | + hp_logger.info("Fetching remote gzip files list at '%s'." % url) |
565 | retries = 0 | 565 | retries = 0 |
566 | success = False | 566 | success = False |
567 | errors = [] | 567 | errors = [] |
@@ -581,21 +581,21 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | @@ -581,21 +581,21 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | ||
581 | # raise Exception("API says it's out of time at '%s'." % url) | 581 | # raise Exception("API says it's out of time at '%s'." % url) |
582 | success = True | 582 | success = True |
583 | except Exception as e: | 583 | except Exception as e: |
584 | - log.warning("Failed (%d/3) '%s' : %s" % (retries + 1, url, e.message)) | 584 | + hp_logger.warning("Failed (%d/3) '%s' : %s" % (retries + 1, url, e.message)) |
585 | remote_gzip_files = [] | 585 | remote_gzip_files = [] |
586 | errors.append(e) | 586 | errors.append(e) |
587 | finally: | 587 | finally: |
588 | retries += 1 | 588 | retries += 1 |
589 | if not remote_gzip_files: | 589 | if not remote_gzip_files: |
590 | - log.error("Failed to retrieve data from AMDA.") | ||
591 | - log.error("Failed to fetch gzip files list for %s at '%s' : %s" % | ||
592 | - (orbiter, url, errors)) | 590 | + hp_logger.error("Failed to retrieve data from AMDA.") |
591 | + hp_logger.error("Failed to fetch gzip files list for %s at '%s' : %s" % | ||
592 | + (orbiter, url, errors)) | ||
593 | abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" % | 593 | abort(400, "Failed to fetch gzip files list for %s at '%s' : %s" % |
594 | (orbiter, url, errors)) | 594 | (orbiter, url, errors)) |
595 | else: | 595 | else: |
596 | remote_gzip_files = list(set(remote_gzip_files)) | 596 | remote_gzip_files = list(set(remote_gzip_files)) |
597 | 597 | ||
598 | - log.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) | 598 | + hp_logger.debug("Fetched remote gzip files list : %s." % str(remote_gzip_files)) |
599 | 599 | ||
600 | local_gzip_files = [] | 600 | local_gzip_files = [] |
601 | for remote_gzip_file in remote_gzip_files: | 601 | for remote_gzip_file in remote_gzip_files: |
@@ -609,16 +609,16 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | @@ -609,16 +609,16 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | ||
609 | local_gzip_file = join(CACHE_DIR, get_local_filename(remote_gzip_file)) | 609 | local_gzip_file = join(CACHE_DIR, get_local_filename(remote_gzip_file)) |
610 | local_gzip_files.append(local_gzip_file) | 610 | local_gzip_files.append(local_gzip_file) |
611 | if not isfile(local_gzip_file): | 611 | if not isfile(local_gzip_file): |
612 | - log.debug("Retrieving '%s'..." % local_gzip_file) | 612 | + hp_logger.debug("Retrieving '%s'..." % local_gzip_file) |
613 | urllib_request.urlretrieve(remote_gzip_file, local_gzip_file) | 613 | urllib_request.urlretrieve(remote_gzip_file, local_gzip_file) |
614 | - log.debug("Retrieved '%s'." % local_gzip_file) | 614 | + hp_logger.debug("Retrieved '%s'." % local_gzip_file) |
615 | else: | 615 | else: |
616 | - log.debug("Found '%s' in the cache." % local_gzip_file) | 616 | + hp_logger.debug("Found '%s' in the cache." % local_gzip_file) |
617 | 617 | ||
618 | local_netc_files = [] | 618 | local_netc_files = [] |
619 | for local_gzip_file in local_gzip_files: | 619 | for local_gzip_file in local_gzip_files: |
620 | local_netc_file = local_gzip_file[0:-3] | 620 | local_netc_file = local_gzip_file[0:-3] |
621 | - log.debug("Unzipping '%s'..." % local_gzip_file) | 621 | + hp_logger.debug("Unzipping '%s'..." % local_gzip_file) |
622 | success = True | 622 | success = True |
623 | try: | 623 | try: |
624 | with gzip.open(local_gzip_file) as f: | 624 | with gzip.open(local_gzip_file) as f: |
@@ -627,14 +627,14 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | @@ -627,14 +627,14 @@ def retrieve_amda_netcdf(orbiter, what, started_at, stopped_at): | ||
627 | g.write(file_content) | 627 | g.write(file_content) |
628 | except Exception as e: | 628 | except Exception as e: |
629 | success = False | 629 | success = False |
630 | - log.error("Cannot process gz file '%s' from '%s' : %s" % | ||
631 | - (local_gzip_file, url, e)) | 630 | + hp_logger.error("Cannot process gz file '%s' from '%s' : %s" % |
631 | + (local_gzip_file, url, e)) | ||
632 | # Sometimes, the downloaded gz is corrupted, and CRC checks fail. | 632 | # Sometimes, the downloaded gz is corrupted, and CRC checks fail. |
633 | # We want to delete the local gz file and try again next time. | 633 | # We want to delete the local gz file and try again next time. |
634 | removefile(local_gzip_file) | 634 | removefile(local_gzip_file) |
635 | if success: | 635 | if success: |
636 | local_netc_files.append(local_netc_file) | 636 | local_netc_files.append(local_netc_file) |
637 | - log.debug("Unzipped '%s'." % local_gzip_file) | 637 | + hp_logger.debug("Unzipped '%s'." % local_gzip_file) |
638 | 638 | ||
639 | return list(set(local_netc_files)) # remove possible dupes | 639 | return list(set(local_netc_files)) # remove possible dupes |
640 | 640 | ||
@@ -689,7 +689,7 @@ def get_data_for_target(target_config, input_slug, | @@ -689,7 +689,7 @@ def get_data_for_target(target_config, input_slug, | ||
689 | """ | 689 | """ |
690 | :return: dict whose keys are datetime as str, values tuples of data | 690 | :return: dict whose keys are datetime as str, values tuples of data |
691 | """ | 691 | """ |
692 | - log.debug("Grabbing data for '%s'..." % target_config['slug']) | 692 | + hp_logger.debug("Grabbing data for '%s'..." % target_config['slug']) |
693 | 693 | ||
694 | try: | 694 | try: |
695 | models = target_config['models'][input_slug] | 695 | models = target_config['models'][input_slug] |
@@ -717,19 +717,19 @@ def get_data_for_target(target_config, input_slug, | @@ -717,19 +717,19 @@ def get_data_for_target(target_config, input_slug, | ||
717 | target_config['slug'], orbit['slug'], s0, s1 | 717 | target_config['slug'], orbit['slug'], s0, s1 |
718 | ) | 718 | ) |
719 | for orbit_file in orbit_files: | 719 | for orbit_file in orbit_files: |
720 | - log.debug("%s: opening orbit NETCDF4 '%s'..." % | ||
721 | - (target_config['name'], orbit_file)) | 720 | + hp_logger.debug("%s: opening orbit NETCDF4 '%s'..." % |
721 | + (target_config['name'], orbit_file)) | ||
722 | cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") | 722 | cdf_handle = Dataset(orbit_file, "r", format="NETCDF4") |
723 | times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms | 723 | times = cdf_handle.variables['Time'] # YYYY DOY HH MM SS .ms |
724 | data_hee = _read_var(cdf_handle, nc_keys, 'hee', mandatory=True) | 724 | data_hee = _read_var(cdf_handle, nc_keys, 'hee', mandatory=True) |
725 | 725 | ||
726 | - log.debug("%s: aggregating data from '%s'..." % | ||
727 | - (target_config['name'], orbit_file)) | 726 | + hp_logger.debug("%s: aggregating data from '%s'..." % |
727 | + (target_config['name'], orbit_file)) | ||
728 | for ltime, datum_hee in zip(times, data_hee): | 728 | for ltime, datum_hee in zip(times, data_hee): |
729 | try: | 729 | try: |
730 | dtime = datetime_from_list(ltime) | 730 | dtime = datetime_from_list(ltime) |
731 | except Exception: | 731 | except Exception: |
732 | - log.error("Failed to parse time from get__data_for_target %s." % ltime) | 732 | + hp_logger.error("Failed to parse time from get__data_for_target %s." % ltime) |
733 | raise | 733 | raise |
734 | # Keep only what's in the interval | 734 | # Keep only what's in the interval |
735 | if s0 <= dtime <= s1: | 735 | if s0 <= dtime <= s1: |
@@ -749,12 +749,12 @@ def get_data_for_target(target_config, input_slug, | @@ -749,12 +749,12 @@ def get_data_for_target(target_config, input_slug, | ||
749 | nc_keys.update(model['parameters']) | 749 | nc_keys.update(model['parameters']) |
750 | 750 | ||
751 | if len(model_files) == 0: | 751 | if len(model_files) == 0: |
752 | - log.warning("No model data for '%s' '%s'." | ||
753 | - % (target_config['slug'], model['slug'])) | 752 | + hp_logger.warning("No model data for '%s' '%s'." |
753 | + % (target_config['slug'], model['slug'])) | ||
754 | 754 | ||
755 | for model_file in model_files: | 755 | for model_file in model_files: |
756 | - log.debug("%s: opening model NETCDF4 '%s'..." % | ||
757 | - (target_config['name'], model_file)) | 756 | + hp_logger.debug("%s: opening model NETCDF4 '%s'..." % |
757 | + (target_config['name'], model_file)) | ||
758 | cdf_handle = Dataset(model_file, "r", format="NETCDF4") | 758 | cdf_handle = Dataset(model_file, "r", format="NETCDF4") |
759 | 759 | ||
760 | # log.debug(cdf_handle.variables.keys()) | 760 | # log.debug(cdf_handle.variables.keys()) |
@@ -782,15 +782,15 @@ def get_data_for_target(target_config, input_slug, | @@ -782,15 +782,15 @@ def get_data_for_target(target_config, input_slug, | ||
782 | # FIXME | 782 | # FIXME |
783 | ignored_count = 0 | 783 | ignored_count = 0 |
784 | 784 | ||
785 | - log.debug("%s: aggregating data from '%s'..." % | ||
786 | - (target_config['name'], model_file)) | 785 | + hp_logger.debug("%s: aggregating data from '%s'..." % |
786 | + (target_config['name'], model_file)) | ||
787 | for ltime, datum_v, datum_b, datum_t, datum_n, datum_p, datum_a \ | 787 | for ltime, datum_v, datum_b, datum_t, datum_n, datum_p, datum_a \ |
788 | in zip(times, data_v, data_b, data_t, data_n, data_p, data_a): | 788 | in zip(times, data_v, data_b, data_t, data_n, data_p, data_a): |
789 | 789 | ||
790 | try: | 790 | try: |
791 | dtime = datetime_from_list(ltime) | 791 | dtime = datetime_from_list(ltime) |
792 | except Exception: | 792 | except Exception: |
793 | - log.error("Failed to parse time from %s." % ltime) | 793 | + hp_logger.error("Failed to parse time from %s." % ltime) |
794 | raise | 794 | raise |
795 | 795 | ||
796 | if not (s0 <= dtime <= s1): | 796 | if not (s0 <= dtime <= s1): |
@@ -857,8 +857,8 @@ def get_data_for_target(target_config, input_slug, | @@ -857,8 +857,8 @@ def get_data_for_target(target_config, input_slug, | ||
857 | 857 | ||
858 | # Improve this loop so as to remove this stinky debug log | 858 | # Improve this loop so as to remove this stinky debug log |
859 | if ignored_count > 0: | 859 | if ignored_count > 0: |
860 | - log.debug(" Ignored %d datum(s) during ~\"drizzling\"." | ||
861 | - % ignored_count) | 860 | + hp_logger.debug(" Ignored %d datum(s) during ~\"drizzling\"." |
861 | + % ignored_count) | ||
862 | 862 | ||
863 | cdf_handle.close() | 863 | cdf_handle.close() |
864 | 864 | ||
@@ -867,7 +867,7 @@ def get_data_for_target(target_config, input_slug, | @@ -867,7 +867,7 @@ def get_data_for_target(target_config, input_slug, | ||
867 | 867 | ||
868 | def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): | 868 | def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): |
869 | target_config = get_target_config(target_slug) | 869 | target_config = get_target_config(target_slug) |
870 | - log.debug("Crunching CSV contents for '%s'..." % target_config['name']) | 870 | + hp_logger.debug("Crunching CSV contents for '%s'..." % target_config['name']) |
871 | si = StringIO() | 871 | si = StringIO() |
872 | cw = csv_writer(si) | 872 | cw = csv_writer(si) |
873 | cw.writerow(PROPERTIES) | 873 | cw.writerow(PROPERTIES) |
@@ -877,11 +877,11 @@ def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): | @@ -877,11 +877,11 @@ def generate_csv_contents(target_slug, input_slug, started_at, stopped_at): | ||
877 | started_at=started_at, stopped_at=stopped_at | 877 | started_at=started_at, stopped_at=stopped_at |
878 | ) | 878 | ) |
879 | 879 | ||
880 | - log.debug("Writing and sorting CSV for '%s'..." % target_config['slug']) | 880 | + hp_logger.debug("Writing and sorting CSV for '%s'..." % target_config['slug']) |
881 | for dkey in sorted(all_data): | 881 | for dkey in sorted(all_data): |
882 | cw.writerow(all_data[dkey]) | 882 | cw.writerow(all_data[dkey]) |
883 | 883 | ||
884 | - log.info("Generated CSV contents for '%s'." % target_config['slug']) | 884 | + hp_logger.info("Generated CSV contents for '%s'." % target_config['slug']) |
885 | return si.getvalue() | 885 | return si.getvalue() |
886 | 886 | ||
887 | 887 | ||
@@ -891,10 +891,10 @@ def generate_csv_contents_spz(target_slug, input_slug, started_at, stopped_at): | @@ -891,10 +891,10 @@ def generate_csv_contents_spz(target_slug, input_slug, started_at, stopped_at): | ||
891 | orbit_dict = target_config['orbit']['models'][0]['parameters'] | 891 | orbit_dict = target_config['orbit']['models'][0]['parameters'] |
892 | parameters_dict = {**plasma_dict, **orbit_dict} | 892 | parameters_dict = {**plasma_dict, **orbit_dict} |
893 | 893 | ||
894 | - log.info(f"Aggregating dataframes speazy parameters for '{input_slug}' to '{target_slug}'" ) | 894 | + hp_logger.info(f"Aggregating dataframes speazy parameters for '{input_slug}' to '{target_slug}'") |
895 | list_df = [] | 895 | list_df = [] |
896 | for _name, _id in parameters_dict.items(): | 896 | for _name, _id in parameters_dict.items(): |
897 | - log.debug(f"Getting parameter id '{_id}' for '{_name}'") | 897 | + hp_logger.debug(f"Getting parameter id '{_id}' for '{_name}'") |
898 | _df = amda.get_data(_id, started_at, stopped_at).to_dataframe() | 898 | _df = amda.get_data(_id, started_at, stopped_at).to_dataframe() |
899 | if _name == 'xy_v': | 899 | if _name == 'xy_v': |
900 | _df = _df.rename(columns={_df.columns[0]: 'vrad', _df.columns[1]: 'vtan'}) | 900 | _df = _df.rename(columns={_df.columns[0]: 'vrad', _df.columns[1]: 'vtan'}) |
@@ -952,23 +952,23 @@ def generate_csv_file_if_needed(target_slug, input_slug, | @@ -952,23 +952,23 @@ def generate_csv_file_if_needed(target_slug, input_slug, | ||
952 | csv_generator = generate_csv_contents | 952 | csv_generator = generate_csv_contents |
953 | 953 | ||
954 | if generate: | 954 | if generate: |
955 | - log.info("Generating CSV '%s'..." % local_csv_file) | 955 | + hp_logger.info("Generating CSV '%s'..." % local_csv_file) |
956 | try: | 956 | try: |
957 | with open(local_csv_file, mode="w+") as f: | 957 | with open(local_csv_file, mode="w+") as f: |
958 | f.write(csv_generator( | 958 | f.write(csv_generator( |
959 | target_slug=target_slug, input_slug=input_slug, | 959 | target_slug=target_slug, input_slug=input_slug, |
960 | started_at=started_at, stopped_at=stopped_at | 960 | started_at=started_at, stopped_at=stopped_at |
961 | )) | 961 | )) |
962 | - log.info("Generation of '%s' done." % filename) | 962 | + hp_logger.info("Generation of '%s' done." % filename) |
963 | except Exception as e: | 963 | except Exception as e: |
964 | from sys import exc_info | 964 | from sys import exc_info |
965 | from traceback import extract_tb | 965 | from traceback import extract_tb |
966 | exc_type, exc_value, exc_traceback = exc_info() | 966 | exc_type, exc_value, exc_traceback = exc_info() |
967 | - log.error(e) | 967 | + hp_logger.error(e) |
968 | for trace in extract_tb(exc_traceback): | 968 | for trace in extract_tb(exc_traceback): |
969 | - log.error(trace) | 969 | + hp_logger.error(trace) |
970 | if isfile(local_csv_file): | 970 | if isfile(local_csv_file): |
971 | - log.warning("Removing failed CSV '%s'..." % local_csv_file) | 971 | + hp_logger.warning("Removing failed CSV '%s'..." % local_csv_file) |
972 | removefile(local_csv_file) | 972 | removefile(local_csv_file) |
973 | abort(500, "Failed creating CSV '%s' : %s" % (filename, e)) | 973 | abort(500, "Failed creating CSV '%s' : %s" % (filename, e)) |
974 | 974 | ||
@@ -1074,7 +1074,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): | @@ -1074,7 +1074,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): | ||
1074 | try: | 1074 | try: |
1075 | index = _data['columns'].index(_key) | 1075 | index = _data['columns'].index(_key) |
1076 | except ValueError: | 1076 | except ValueError: |
1077 | - log.error("Key %s not found in columns of %s" % (_key, _data)) | 1077 | + hp_logger.error("Key %s not found in columns of %s" % (_key, _data)) |
1078 | raise | 1078 | raise |
1079 | return index | 1079 | return index |
1080 | 1080 | ||
@@ -1106,7 +1106,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): | @@ -1106,7 +1106,7 @@ def get_catalog_layers(input_slug, target_slug, started_at, stopped_at): | ||
1106 | with open(get_path("../data/catalog/%s" % cl_datum['file'])) as f: | 1106 | with open(get_path("../data/catalog/%s" % cl_datum['file'])) as f: |
1107 | json_data = json.load(f) | 1107 | json_data = json.load(f) |
1108 | if 'start' not in cl_datum: | 1108 | if 'start' not in cl_datum: |
1109 | - log.error("Invalid configuration: 'start' is missing.") | 1109 | + hp_logger.error("Invalid configuration: 'start' is missing.") |
1110 | continue # skip this | 1110 | continue # skip this |
1111 | if 'format' not in cl_datum: | 1111 | if 'format' not in cl_datum: |
1112 | cl_datum['format'] = CME_DATE_FMT | 1112 | cl_datum['format'] = CME_DATE_FMT |
@@ -1189,12 +1189,12 @@ def update_spacepy(): | @@ -1189,12 +1189,12 @@ def update_spacepy(): | ||
1189 | Importing pycdf will fail if the toolbox is not up to date. | 1189 | Importing pycdf will fail if the toolbox is not up to date. |
1190 | """ | 1190 | """ |
1191 | try: | 1191 | try: |
1192 | - log.info("Updating spacepy's toolbox…") | 1192 | + hp_logger.info("Updating spacepy's toolbox…") |
1193 | import spacepy.toolbox | 1193 | import spacepy.toolbox |
1194 | 1194 | ||
1195 | spacepy.toolbox.update() | 1195 | spacepy.toolbox.update() |
1196 | except Exception as e: | 1196 | except Exception as e: |
1197 | - log.error("Failed to update spacepy : %s." % e) | 1197 | + hp_logger.error("Failed to update spacepy : %s." % e) |
1198 | 1198 | ||
1199 | 1199 | ||
1200 | tpl_global_vars['visits'] = get_hit_counter() | 1200 | tpl_global_vars['visits'] = get_hit_counter() |
@@ -1330,7 +1330,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): | @@ -1330,7 +1330,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): | ||
1330 | local_gzip_file = join(CACHE_DIR, gzip_filename) | 1330 | local_gzip_file = join(CACHE_DIR, gzip_filename) |
1331 | 1331 | ||
1332 | if not isfile(local_gzip_file): | 1332 | if not isfile(local_gzip_file): |
1333 | - log.debug("Creating the CSV files for the tarball...") | 1333 | + hp_logger.debug("Creating the CSV files for the tarball...") |
1334 | for target_config in targets_configs: | 1334 | for target_config in targets_configs: |
1335 | filename = "%s_%s_%s_%s.csv" % ( | 1335 | filename = "%s_%s_%s_%s.csv" % ( |
1336 | target_config['slug'], input_slug, sta, sto | 1336 | target_config['slug'], input_slug, sta, sto |
@@ -1345,7 +1345,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): | @@ -1345,7 +1345,7 @@ def download_targets_tarball(targets, inp, started_at, stopped_at): | ||
1345 | input_slug=input_slug | 1345 | input_slug=input_slug |
1346 | )) | 1346 | )) |
1347 | 1347 | ||
1348 | - log.debug("Creating the tarball '%s'..." % local_gzip_file) | 1348 | + hp_logger.debug("Creating the tarball '%s'..." % local_gzip_file) |
1349 | with tarfile.open(local_gzip_file, "w:gz") as tar: | 1349 | with tarfile.open(local_gzip_file, "w:gz") as tar: |
1350 | for target_config in targets_configs: | 1350 | for target_config in targets_configs: |
1351 | filename = "%s_%s_%s_%s.csv" % ( | 1351 | filename = "%s_%s_%s_%s.csv" % ( |
@@ -1410,7 +1410,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | @@ -1410,7 +1410,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | ||
1410 | nc_path = join(CACHE_DIR, nc_filename) | 1410 | nc_path = join(CACHE_DIR, nc_filename) |
1411 | 1411 | ||
1412 | if not isfile(nc_path): | 1412 | if not isfile(nc_path): |
1413 | - log.debug("Creating the NetCDF file '%s'..." % nc_filename) | 1413 | + hp_logger.debug("Creating the NetCDF file '%s'..." % nc_filename) |
1414 | nc_handle = Dataset(nc_path, "w", format="NETCDF4") | 1414 | nc_handle = Dataset(nc_path, "w", format="NETCDF4") |
1415 | try: | 1415 | try: |
1416 | nc_handle.description = "Model and orbit data for targets" # todo | 1416 | nc_handle.description = "Model and orbit data for targets" # todo |
@@ -1419,7 +1419,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | @@ -1419,7 +1419,7 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | ||
1419 | available_params = list(PROPERTIES) | 1419 | available_params = list(PROPERTIES) |
1420 | for target in targets_configs: | 1420 | for target in targets_configs: |
1421 | target_slug = target['slug'] | 1421 | target_slug = target['slug'] |
1422 | - log.debug("Adding group '%s' to the NetCDF..." % target_slug) | 1422 | + hp_logger.debug("Adding group '%s' to the NetCDF..." % target_slug) |
1423 | nc_group = nc_handle.createGroup(target_slug) | 1423 | nc_group = nc_handle.createGroup(target_slug) |
1424 | data = get_data_for_target( | 1424 | data = get_data_for_target( |
1425 | target_config=target, input_slug=input_slug, | 1425 | target_config=target, input_slug=input_slug, |
@@ -1473,10 +1473,10 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | @@ -1473,10 +1473,10 @@ def download_targets_netcdf(targets, inp, params, started_at, stopped_at): | ||
1473 | values_y.append(dval[index_y]) | 1473 | values_y.append(dval[index_y]) |
1474 | nc_x[:] = values_x | 1474 | nc_x[:] = values_x |
1475 | nc_y[:] = values_y | 1475 | nc_y[:] = values_y |
1476 | - log.debug("Writing NetCDF '%s'..." % nc_filename) | 1476 | + hp_logger.debug("Writing NetCDF '%s'..." % nc_filename) |
1477 | 1477 | ||
1478 | except Exception: | 1478 | except Exception: |
1479 | - log.error("Failed to generate NetCDF '%s'." % nc_filename) | 1479 | + hp_logger.error("Failed to generate NetCDF '%s'." % nc_filename) |
1480 | raise | 1480 | raise |
1481 | finally: | 1481 | finally: |
1482 | nc_handle.close() | 1482 | nc_handle.close() |
@@ -1529,7 +1529,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | @@ -1529,7 +1529,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | ||
1529 | cdf_path = join(CACHE_DIR, cdf_filename) | 1529 | cdf_path = join(CACHE_DIR, cdf_filename) |
1530 | 1530 | ||
1531 | if not isfile(cdf_path): | 1531 | if not isfile(cdf_path): |
1532 | - log.debug("Creating the CDF file '%s'..." % cdf_filename) | 1532 | + hp_logger.debug("Creating the CDF file '%s'..." % cdf_filename) |
1533 | try: | 1533 | try: |
1534 | from spacepy import pycdf | 1534 | from spacepy import pycdf |
1535 | except ImportError: | 1535 | except ImportError: |
@@ -1539,10 +1539,10 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | @@ -1539,10 +1539,10 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | ||
1539 | try: | 1539 | try: |
1540 | from spacepy import pycdf | 1540 | from spacepy import pycdf |
1541 | except ImportError as e: | 1541 | except ImportError as e: |
1542 | - log.error("Failed to import pycdf from spacepy : %s" % e) | 1542 | + hp_logger.error("Failed to import pycdf from spacepy : %s" % e) |
1543 | raise | 1543 | raise |
1544 | except Exception as e: | 1544 | except Exception as e: |
1545 | - log.error("Failed to import pycdf from spacepy : %s" % e) | 1545 | + hp_logger.error("Failed to import pycdf from spacepy : %s" % e) |
1546 | raise | 1546 | raise |
1547 | 1547 | ||
1548 | try: | 1548 | try: |
@@ -1618,18 +1618,18 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | @@ -1618,18 +1618,18 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | ||
1618 | else: | 1618 | else: |
1619 | values_xhee.append(0) | 1619 | values_xhee.append(0) |
1620 | values_yhee.append(0) | 1620 | values_yhee.append(0) |
1621 | - log.warning("Orbit data for %s has NaNs." % target_slug) | 1621 | + hp_logger.warning("Orbit data for %s has NaNs." % target_slug) |
1622 | cdf_handle[kx] = values_xhee | 1622 | cdf_handle[kx] = values_xhee |
1623 | cdf_handle[ky] = values_yhee | 1623 | cdf_handle[ky] = values_yhee |
1624 | cdf_handle[kx].attrs['UNITS'] = 'Au' | 1624 | cdf_handle[kx].attrs['UNITS'] = 'Au' |
1625 | cdf_handle[ky].attrs['UNITS'] = 'Au' | 1625 | cdf_handle[ky].attrs['UNITS'] = 'Au' |
1626 | 1626 | ||
1627 | - log.debug("Writing CDF '%s'..." % cdf_filename) | 1627 | + hp_logger.debug("Writing CDF '%s'..." % cdf_filename) |
1628 | cdf_handle.close() | 1628 | cdf_handle.close() |
1629 | - log.debug("Wrote CDF '%s'." % cdf_filename) | 1629 | + hp_logger.debug("Wrote CDF '%s'." % cdf_filename) |
1630 | 1630 | ||
1631 | except Exception as e: | 1631 | except Exception as e: |
1632 | - log.error("Failed to generate CDF '%s'." % cdf_filename) | 1632 | + hp_logger.error("Failed to generate CDF '%s'." % cdf_filename) |
1633 | if isfile(cdf_path): | 1633 | if isfile(cdf_path): |
1634 | removefile(cdf_path) | 1634 | removefile(cdf_path) |
1635 | raise | 1635 | raise |
@@ -1643,7 +1643,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | @@ -1643,7 +1643,7 @@ def download_targets_cdf(targets, inp, started_at, stopped_at): | ||
1643 | @app.route("/<target>_auroral_catalog.csv") | 1643 | @app.route("/<target>_auroral_catalog.csv") |
1644 | def download_auroral_catalog_csv(target): | 1644 | def download_auroral_catalog_csv(target): |
1645 | tc = validate_tap_target_config(target) | 1645 | tc = validate_tap_target_config(target) |
1646 | - log.debug("Requesting auroral emissions CSV for %s..." % tc['name']) | 1646 | + hp_logger.debug("Requesting auroral emissions CSV for %s..." % tc['name']) |
1647 | 1647 | ||
1648 | filename = "%s_auroral_catalog.csv" % (target) | 1648 | filename = "%s_auroral_catalog.csv" % (target) |
1649 | local_csv_file = join(CACHE_DIR, filename) | 1649 | local_csv_file = join(CACHE_DIR, filename) |
@@ -1670,10 +1670,10 @@ def download_auroral_catalog_csv(target): | @@ -1670,10 +1670,10 @@ def download_auroral_catalog_csv(target): | ||
1670 | # 'time_min', 'time_max', 'thumbnail_url', 'external_link' | 1670 | # 'time_min', 'time_max', 'thumbnail_url', 'external_link' |
1671 | # cw.writerow(head) | 1671 | # cw.writerow(head) |
1672 | 1672 | ||
1673 | - log.debug("Writing auroral emissions CSV for %s..." % tc['name']) | 1673 | + hp_logger.debug("Writing auroral emissions CSV for %s..." % tc['name']) |
1674 | cw.writerows(emissions) | 1674 | cw.writerows(emissions) |
1675 | 1675 | ||
1676 | - log.info("Generated auroral emissions CSV contents for %s." % tc['name']) | 1676 | + hp_logger.info("Generated auroral emissions CSV contents for %s." % tc['name']) |
1677 | return si.getvalue() | 1677 | return si.getvalue() |
1678 | 1678 | ||
1679 | # if not isfile(local_csv_file): | 1679 | # if not isfile(local_csv_file): |