Commit ef60c3eccfb751a028a3ea6207bb81b64350903d
1 parent
4cb0ff36
Exists in
master
and in
1 other branch
Majordome and monitoring ok
Showing
5 changed files
with
71 additions
and
34 deletions
Show diff stats
src/majordome/tasks.py
1 | 1 | from __future__ import absolute_import |
2 | +from django.core.exceptions import ObjectDoesNotExist | |
2 | 3 | import scheduler |
3 | 4 | import scheduler.tasks |
4 | 5 | from celery.task import Task |
... | ... | @@ -31,6 +32,9 @@ class Majordome(Task): |
31 | 32 | status_tel = "" |
32 | 33 | status_nir = "" |
33 | 34 | status_vis = "" |
35 | + timers = {} | |
36 | + functions = {} | |
37 | + schedule = None | |
34 | 38 | majordome_status = "STARTING" |
35 | 39 | |
36 | 40 | |
... | ... | @@ -93,6 +97,8 @@ class Majordome(Task): |
93 | 97 | self.night_end = getNightEnd() |
94 | 98 | self.night_start_jd = secondsToJulianDate(getNightStart()) |
95 | 99 | self.night_end_jd = secondsToJulianDate(getNightEnd()) |
100 | + self.timer_night_start = self.night_start - getCurrentTime() | |
101 | + self.timer_night_end = self.night_end - getCurrentTime() | |
96 | 102 | self.timer_status = 5 |
97 | 103 | self.timer_plc = 2 |
98 | 104 | self.timer_schedule = 1 |
... | ... | @@ -148,8 +154,13 @@ class Majordome(Task): |
148 | 154 | |
149 | 155 | def handleEnvironmentTimer(self): |
150 | 156 | self.timers["environment"] = self.timer_plc |
151 | - site_status = SiteWatch.objects.latest('updated') | |
152 | - weather_status = WeatherWatch.objects.latest('updated') | |
157 | + try: | |
158 | + site_status = SiteWatch.objects.latest('updated') | |
159 | + weather_status = WeatherWatch.objects.latest('updated') | |
160 | + except ObjectDoesNotExist: | |
161 | + if (settings.DEBUG): | |
162 | + log.info("No site_status or weather_status found in database") | |
163 | + return (1) | |
153 | 164 | self.handlePLC(site_status, weather_status) |
154 | 165 | return (0) |
155 | 166 | |
... | ... | @@ -172,12 +183,22 @@ class Majordome(Task): |
172 | 183 | return (0) |
173 | 184 | |
174 | 185 | def handleScheduleTimer(self): |
175 | - self.timers["scheduler"] = self.timer_schedule | |
186 | + self.timers["schedule"] = self.timer_schedule | |
176 | 187 | if (self.isValidStatus(self.status_tel)): |
177 | 188 | if (self.schedule == None): |
178 | - self.schedule = Schedule.objects.latest('created') | |
189 | + try: | |
190 | + self.schedule = Schedule.objects.latest('created') | |
191 | + except ObjectDoesNotExist: | |
192 | + if (settings.DEBUG): | |
193 | + log.info("No schedule found in database") | |
194 | + return (1) | |
179 | 195 | else: |
180 | - schedule = Schedule.objects.latest('created') | |
196 | + try: | |
197 | + schedule = Schedule.objects.latest('created') | |
198 | + except ObjectDoesNotExist: | |
199 | + if (settings.DEBUG): | |
200 | + log.info("No schedule found in database") | |
201 | + return (1) | |
181 | 202 | if (schedule.created != self.schedule.created): |
182 | 203 | self.next_sequence = None |
183 | 204 | self.schedule = schedule |
... | ... | @@ -204,10 +225,12 @@ class Majordome(Task): |
204 | 225 | self.notifyTelescopeStatus("night_start") |
205 | 226 | return (0) |
206 | 227 | |
228 | + def notifyTelescopeStatus(self, timer_name): | |
229 | + return (self.notifyDeviceStatus("telescope", timer_name, self.status_tel)) | |
230 | + | |
207 | 231 | def notifyDeviceStatus(self, device_name, timer_name, status): |
208 | - Log.objects.create(agent=device_name, create=datetime.datetime.now(), | |
209 | - message="The action : " + str(timer_name) + " has been canceled : Telescope status : " | |
210 | - + status) | |
232 | + Log.objects.create(agent=device_name, created=datetime.datetime.now(), | |
233 | + message="The action : " + str(timer_name) + " has been canceled : Telescope status : " + str(status)) | |
211 | 234 | # maybe reset some variables and do a scheduling |
212 | 235 | return (0) |
213 | 236 | |
... | ... | @@ -344,8 +367,9 @@ class Majordome(Task): |
344 | 367 | Check if the instrument status is valid |
345 | 368 | ''' |
346 | 369 | def isValidStatus(self, status): |
347 | - if (status == "" or status == "ERROR"): | |
348 | - return (False) | |
370 | + # TODO REMOVE COMMENT AND CHANGE WHEN DEFINED | |
371 | + # if (status == "" or status == "ERROR"): | |
372 | + # return (False) | |
349 | 373 | return (True) |
350 | 374 | |
351 | 375 | ''' |
... | ... | @@ -380,3 +404,7 @@ class Majordome(Task): |
380 | 404 | |
381 | 405 | print("change_obs_conditions") |
382 | 406 | pass |
407 | + | |
408 | +if (__name__ == "__main__"): | |
409 | + m = Majordome() | |
410 | + m.run() | |
383 | 411 | \ No newline at end of file | ... | ... |
src/monitoring/tasks.py
... | ... | @@ -11,7 +11,9 @@ log = L.setupLogger("MonitoringTaskLogger", "Monitoring") |
11 | 11 | Infinite task created at the program's start. |
12 | 12 | Checks the plc status, parse it, analyse it, store it in db |
13 | 13 | ''' |
14 | -class monitoring(Task): | |
14 | +class Monitoring(Task): | |
15 | + timers = {} | |
16 | + functions = {} | |
15 | 17 | |
16 | 18 | def run(self): |
17 | 19 | self.setContext() |
... | ... | @@ -24,14 +26,15 @@ class monitoring(Task): |
24 | 26 | return (0) |
25 | 27 | |
26 | 28 | def setTime(self): |
27 | - self.timer_status = 10 | |
28 | - self.timers = {"timer_status", self.timer_status} | |
29 | - self.functions = {"timer_status", self.handleTimerStatus} | |
29 | + self.timer_status = 2 | |
30 | + self.timers = {"timer_status": self.timer_status} | |
31 | + self.functions = {"timer_status": self.handleTimerStatus} | |
30 | 32 | return (0) |
31 | 33 | |
32 | 34 | def handleTimerStatus(self): |
33 | 35 | self.timers["timer_status"] = self.timer_status |
34 | 36 | self.status_plc = self.plc.get("STATUS") |
37 | + print("handle Timer") | |
35 | 38 | # TODO: parse, analyse, store |
36 | 39 | return (0) |
37 | 40 | |
... | ... | @@ -41,12 +44,17 @@ class monitoring(Task): |
41 | 44 | time.sleep(self.timers[minimal_timer]) |
42 | 45 | self.timers = {key: value - self.timers[minimal_timer] for key, value in self.timers.items()} |
43 | 46 | for timer_name, timer_value in self.timers.items(): |
47 | + print("Loop") | |
44 | 48 | if (timer_value <= 0): |
45 | - if (timer_name in self.function): | |
49 | + if (timer_name in self.functions): | |
46 | 50 | self.functions[timer_name]() |
47 | 51 | else: |
48 | 52 | if (settings.DEBUG): |
49 | 53 | log.info("Timer : " + str(timer_name) + "is not known by the monitoring") |
50 | 54 | if (settings.DEBUG): |
51 | 55 | log.info("Timer : " + str(timer_name) + " executed by monitoring") |
52 | - return (0) | |
53 | 56 | \ No newline at end of file |
57 | + return (0) | |
58 | + | |
59 | +if (__name__ == "__main__"): | |
60 | + m = Monitoring() | |
61 | + m.run() | |
54 | 62 | \ No newline at end of file | ... | ... |
src/pyros/__init__.py
... | ... | @@ -9,6 +9,12 @@ from django.conf import settings |
9 | 9 | @worker_ready.connect |
10 | 10 | def start_permanent_tasks(signal, sender): |
11 | 11 | import monitoring.tasks |
12 | + import alert_manager.tasks | |
13 | + import majordome.tasks | |
12 | 14 | |
13 | 15 | if sender.hostname == "pyros@monitoring": |
14 | - monitoring.tasks.monitoring.delay() | |
16 | + monitoring.tasks.Monitoring.delay() | |
17 | + elif sender.hostname == "pyros@majordome": | |
18 | + majordome.tasks.Majordome.delay() | |
19 | + elif sender.hostname == "pyros@alert_manager": | |
20 | + alert_manager.tasks.alert_listener.delay() | ... | ... |
src/pyros/settings.py
... | ... | @@ -257,12 +257,9 @@ CELERY_QUEUES = { |
257 | 257 | "alert_listener_q": {"exchange": "alert_listener_q", "routing_key": "alert_listener_q"}, |
258 | 258 | "analysis_q": {"exchange": "analysis_q", "routing_key": "analysis_q"}, |
259 | 259 | "system_status_q": {"exchange": "system_status_q", "routing_key": "system_status_q"}, |
260 | - "change_obs_conditions_q": {"exchange": "change_obs_conditions_q", "routing_key": "change_obs_conditions_q"}, | |
261 | 260 | "monitoring_q": {"exchange": "monitoring_q", "routing_key": "monitoring_q"}, |
261 | + "majordome_q": {"exchange": "majordome_q", "routing_key": "majordome_q"}, | |
262 | 262 | "scheduling_q": {"exchange": "scheduling_q", "routing_key": "scheduling_q"}, |
263 | - "execute_sequence_q": {"exchange": "execute_sequence_q", "routing_key": "execute_sequence_q"}, | |
264 | - "execute_plan_vis_q": {"exchange": "execute_plan_vis_q", "routing_key": "execute_plan_vis_q"}, | |
265 | - "execute_plan_nir_q": {"exchange": "execute_plan_nir_q", "routing_key": "execute_plan_nir_q"}, | |
266 | 263 | "create_calibrations_q": {"exchange": "create_calibrations_q", "routing_key": "create_calibrations_q"}, |
267 | 264 | } |
268 | 265 | # "simulator_q": {"exchange": "simulator_q", "routing_key": "simulator_q"}, |
... | ... | @@ -270,11 +267,8 @@ CELERY_QUEUES = { |
270 | 267 | CELERY_ROUTES = { |
271 | 268 | "alert_manager.tasks.alert_listener": {"queue": "alert_listener_q"}, |
272 | 269 | "analyzer.tasks.analysis": {"queue": "analysis_q"}, |
273 | - "majordome.tasks.execute_sequence": {"queue": "execute_sequence_q"}, | |
274 | - "majordome.tasks.system_pause": {"queue": "system_status_q"}, | |
275 | - "majordome.tasks.system_restart": {"queue": "system_status_q"}, | |
276 | - "majordome.tasks.change_obs_conditions": {"queue": "change_obs_conditions_q"}, | |
277 | - "monitoring.tasks.monitoring": {"queue": "monitoring_q"}, | |
270 | + "majordome.tasks.Majordome": {"queue": "majordome_q"}, | |
271 | + "monitoring.tasks.Monitoring": {"queue": "monitoring_q"}, | |
278 | 272 | "observation_manager.tasks.execute_plan_vis": {"queue": "execute_plan_vis_q"}, |
279 | 273 | "observation_manager.tasks.execute_plan_nir": {"queue": "execute_plan_nir_q"}, |
280 | 274 | "observation_manager.tasks.create_calibrations": {"queue": "create_calibrations_q"}, | ... | ... |
src/scripts/start_celery_workers.sh
... | ... | @@ -9,12 +9,13 @@ source ../private/venv_py3_pyros/bin/activate |
9 | 9 | |
10 | 10 | ./scripts/kill_celery_workers.sh |
11 | 11 | celery worker -A pyros -Q alert_listener_q -n pyros@alert_listener -c 1 & |
12 | -celery worker -A pyros -Q scheduling_q --purge -n pyros@scheduling -c 1 & | |
13 | -celery worker -A pyros -Q execute_sequence_q --purge -n pyros@execute_sequence -c 1 & | |
14 | -celery worker -A pyros -Q execute_plan_vis_q --purge -n pyros@execute_plan_vis -c 1 & | |
15 | -celery worker -A pyros -Q execute_plan_nir_q --purge -n pyros@execute_plan_nir -c 1 & | |
16 | -celery worker -A pyros -Q create_calibrations_q --purge -n pyros@create_calibrations -c 1 & | |
17 | -celery worker -A pyros -Q analysis_q --purge -n pyros@analysis -c 5 & | |
18 | -celery worker -A pyros -Q system_status_q --purge -n pyros@system_status -c 1 & | |
19 | -celery worker -A pyros -Q change_obs_conditions_q --purge -n pyros@change_obs_conditions -c 1 & | |
20 | 12 | celery worker -A pyros -Q monitoring_q -n pyros@monitoring -c 1 & |
13 | +celery worker -A pyros -Q majordome_q -n pyros@majordome -c 1 & | |
14 | +# celery worker -A pyros -Q scheduling_q --purge -n pyros@scheduling -c 1 & | |
15 | +# celery worker -A pyros -Q execute_sequence_q --purge -n pyros@execute_sequence -c 1 & | |
16 | +# celery worker -A pyros -Q execute_plan_vis_q --purge -n pyros@execute_plan_vis -c 1 & | |
17 | +# celery worker -A pyros -Q execute_plan_nir_q --purge -n pyros@execute_plan_nir -c 1 & | |
18 | +# celery worker -A pyros -Q create_calibrations_q --purge -n pyros@create_calibrations -c 1 & | |
19 | +# celery worker -A pyros -Q analysis_q --purge -n pyros@analysis -c 5 & | |
20 | +# celery worker -A pyros -Q system_status_q --purge -n pyros@system_status -c 1 & | |
21 | +# celery worker -A pyros -Q change_obs_conditions_q --purge -n pyros@change_obs_conditions -c 1 & | ... | ... |