# Standard import import ast import datetime import os, yaml import pickle # Django imports from django.forms.models import model_to_dict from user_mgmt.models import PyrosUser, ScientificProgram, Period from seq_submit.models import Sequence, Album, Plan from .forms import SequenceForm, PlanForm #, AlbumForm from django.db import IntegrityError, transaction # Project imports from src.core.pyros_django.obs_config.obsconfig_class import OBSConfig from django.http import HttpRequest from dashboard.config_pyros import ConfigPyros # guitastro import import vendor.guitastro.src.guitastro as guitastro #from silk.profiling.profiler import silk_profile import numpy #@silk_profile(name="check_sequence_file") @transaction.atomic def check_sequence_file_validity_and_save(yaml_content: dict, request: HttpRequest): ''' Create a sequence in DB from the uploaded sequence (yaml_content) ''' # Create a sequence seq object (from yaml_content) to be saved in DB seq = Sequence.objects.create() seq.pyros_user = PyrosUser.objects.get(id=request.user.id) # Get the unit config unit_name = os.environ["unit_name"] #with silk_profile(name="init obsconfig"): config = OBSConfig(os.environ["PATH_TO_OBSCONF_FILE"], unit_name) result = { "succeed": True, "errors": [], } # Get boolean to simplified to know if the file is written in simplified mode (i.e. : Each field of the form is directly associated to its value) is_simplified = yaml_content.get("simplified", False) # Get scientific programs for the user who is submitting the sequence file user_sp = request.user.get_scientific_programs() process_sequence(yaml_content, seq, config, is_simplified, result, user_sp) process_albums(yaml_content, result, config, seq, is_simplified) # optim possible ? #[ process_plans(a["Album"].get("Plans")) for a in albums_from_file ] # Puis écrire la fonction process_plans() # For each Album a (in file) # Tu itères 2 fois sur albums_from_file, y aurait pas moyen d'iterer 1 seule fois dessus ? #for album in yaml_content["sequence"]["ALBUMS"]: """ # Old for album in albums_from_file: album = album["Album"] plans = album.get("Plans") # If no plan has been defined if plans == None: result["errors"].append(f"Album {album['name']} has no plans. Please add at least one plan") # pour eviter le else (plus lisible) continue plans = [a["Album"].get("Plans") for a in albums_from_file] process_plans(plans, result, is_simplified, config, album, seq) # For each plan p (in album a) for plan in plans: new_plan_object = Plan.objects.create(album=Album.objects.get(name=album["name"], sequence=seq), complete=True) new_plan_object.config_attributes = {} plan = plan["Plan"] config_attributes = {} plan_form = PlanForm(data_from_config=config.getEditableChannelAttributes(config.unit_name,list(config.get_channels(config.unit_name).keys())[0]),edited_plan=None) # Process each plan field for field in plan_form.fields: plan_field = plan[field] ''' min_value = None max_value = None value_type = None ''' min_value = max_value = value_type = None if field not in plan.keys(): result["errors"].append(f"Missing field : '{field}' for plan {plans.index(plan)}") continue # TODO : ajouter max_value, min_value, suppression plan et album si invalides if not is_simplified: if plan_field.get("value_type"): value_type = plan_field["value_type"] if type(plan_field["value"]) == str and ast.literal_eval(plan_field["value"]) != value_type: result["errors"].append(f"Field {field} value doesn't correspond to the assigned type (type required : {value_type})") if plan_field.get("min_value"): min_value = plan_field["min_value"] if type(min_value) == str: min_value = ast.literal_eval(min_value) ''' if type(plan_field["min_value"]) == str: min_value = ast.literal_eval(plan_field["min_value"]) else: min_value = plan_field["min_value"] ''' if plan_field.get("max_value"): max_value = plan_field["max_value"] if type(max_value) == str: max_value = ast.literal_eval(max_value) ''' if type(plan_field.get("max_value")) == str: max_value = ast.literal_eval(plan_field["max_value"]) else: max_value = plan_field["max_value"] ''' if field == "nb_images": new_plan_object.__dict__[field] = plan_field if is_simplified else plan_field["value"] ''' if is_simplified: new_plan_object.__dict__[field] = plan_field else: new_plan_object.__dict__[field] = plan_field["value"] ''' else: # shortcut possible ? #new_plan_object_field = new_plan_object.config_attributes[field] if is_simplified: new_plan_object.config_attributes[field] = plan_field else: if plan_field.get("values"): index_value = plan_field["value"] values = plan_field["values"] if index_value < 0 or index_value > len(plan_field["values"]): result["errors"].append(f"Value of Plan field '{field}' isn't valid, index out of bounds ({index_value} > {len(values)})") index_value = 0 value = plan_field["values"][index_value] try: # linked values splitted_values = value.split(";") config_attributes[field] = {} for splitted_value in splitted_values: subkey,subvalue = splitted_value.split(":") config_attributes[field][subkey] = ast.literal_eval(subvalue) # vaudrait mieux préciser l'exception ici except: # Do nothing, normal string config_attributes[field] = ast.literal_eval(value) new_plan_object.config_attributes[field] = config_attributes[field] else: if max_value and min_value: if plan_field["value"] > max_value: result["errors"].append(f"Plan field {field} doesn't respect max value") if plan_field["value"] < min_value: result["errors"].append(f"Plan field {field} doesn't respect min value") new_plan_object.config_attributes[field] = plan_field["value"] # end foreach plan field new_plan_object.save() # end foreach plan # end foreach album """ seq.status = Sequence.TOBEPLANNED seq.complete = True period = Period.objects.exploitation_period() if Period.objects.next_period() != None and Period.objects.next_period().start_date < seq.start_date.date(): period = Period.objects.next_period() seq.period = period # Sum seq duration duration = 0 max_duration = 0 for album in seq.albums.all(): for plan in album.plans.all(): duration = plan.nb_images * (plan.config_attributes.get("exposuretime",0) + plan.config_attributes.get("readouttime",0)) plan.duration = duration plan.save() if duration >= max_duration: max_duration = duration seq.duration = max_duration fn = guitastro.FileNames() home = config.getHome() guitastro_home = guitastro.Home(home) fn.longitude = guitastro_home.longitude seq.night_id = fn.get_night(seq.start_date.isoformat()[:19]) try: seq.save() except IntegrityError as e: result["errors"].append(str(e)) if len(result["errors"]) > 0: result["succeed"] = False seq.delete() else: result["sequence_id"] = seq.id return result def process_albums(yaml_content, result, config, seq, is_simplified): # Create ALBUMS albums_from_file = yaml_content["sequence"]["ALBUMS"] chosen_layout = seq.config_attributes["layout"] if type(chosen_layout) == int: #with silk_profile(name="Get layout from config"): layouts = config.get_layouts(config.unit_name)["layouts"] chosen_layout = list(layouts)[chosen_layout] # Get album of the selected layout #with silk_profile(name="Get album of layout from config"): layout_albums = config.getLayoutByName(unit_name=config.unit_name, name_of_layout=chosen_layout)["ALBUMS"] # check if we have all the albums of that layout described in the sequence file #with silk_profile(name="Iterate on each album & plan (create)"): if len(layout_albums) == len(albums_from_file): for album in albums_from_file: album = album["Album"] if album["name"] not in layout_albums: result["errors"].append(f"Album {album['name']} is not in the chosen layout. The available albums on this layout are : {layout_albums}") else: # Create album Album.objects.create(name=album["name"], sequence=seq, complete=True) # Create plan for that album plans = album.get("Plans") process_plans(plans, result, is_simplified, config, album, seq) else: result["errors"].append(f"The number of albums doesn't correspond to the chosen layout") #@silk_profile(name="process_plans") def process_plans(plans: dict, result: dict, is_simplified: bool, config: OBSConfig, album: dict, seq: dict): if plans == None: result["errors"].append(f"Album {album['name']} has no plans. Please add at least one plan") # exit function return None for plan in plans: #new_plan_object = Plan.objects.create(album=Album.objects.get(name=album["name"], sequence=seq), complete=True) #new_plan_object.config_attributes = {} plan = plan["Plan"] nb_images = 0 config_attributes = {} #with silk_profile(name="Create plan form"): plan_form = PlanForm(data_from_config=config.getEditableChannelAttributes(config.unit_name, list(config.get_channels(config.unit_name).keys())[0]), edited_plan=None) # Process each plan field #with silk_profile(name="iterate on plan fields"): for field in plan_form.fields: plan_field = plan[field] ''' min_value = None max_value = None value_type = None ''' if field == "nb_images": nb_images = plan_field if is_simplified else plan_field["value"] else: process_plan_field(result, config_attributes, plan_field, field, plans, plan, is_simplified) # end foreach plan field try: Plan.objects.create(album=Album.objects.get(name=album["name"], sequence=seq), complete=True, nb_images=nb_images, config_attributes=config_attributes) except Album.DoesNotExist: result["errors"].append(f"Album {album['name']} not appearing in obsconfig. Please refer to the observatory configuration to set a valid album name. ") def process_plan_field(result, config_attributes, plan_field, field, plans, plan, is_simplified): if field not in plan.keys(): result["errors"].append(f"Missing field : '{field}' for plan {plans.index(plan)}") # exit function return None if is_simplified: #new_plan_object.config_attributes[field] = plan_field config_attributes[field] = plan_field else: value_type, min_value, max_value = prepare_check_plan_field_value(plan_field, field, result) check_and_set_plan_field_value(config_attributes, plan_field, field, result, value_type, min_value, max_value) def check_and_set_plan_field_value(config_attributes, plan_field, field, result, value_type, min_value, max_value): # if the value is a index of a list, get the value from this index if plan_field.get("values"): index_value = plan_field["value"] values = plan_field["values"] if index_value < 0 or index_value > len(plan_field["values"]): result["errors"].append(f"Value of Plan field '{field}' isn't valid, index out of bounds ({index_value} > {len(values)})") index_value = 0 value = plan_field["values"][index_value] try: # linked values splitted_values = value.split(";") config_attributes[field] = {} for splitted_value in splitted_values: subkey,subvalue = splitted_value.split(":") config_attributes[field][subkey] = ast.literal_eval(subvalue) # vaudrait mieux préciser l'exception ici except ValueError: # Do nothing, normal string config_attributes[field] = ast.literal_eval(value) #new_plan_object.config_attributes[field] = config_attributes[field] else: # check min and max values if they exist if max_value and min_value: if plan_field["value"] > max_value: result["errors"].append(f"Plan field {field} doesn't respect max value") if plan_field["value"] < min_value: result["errors"].append(f"Plan field {field} doesn't respect min value") #new_plan_object.config_attributes[field] = plan_field["value"] config_attributes[field] = plan_field["value"] def prepare_check_plan_field_value(plan_field, field, result): min_value = max_value = value_type = None # get value type, min_value and max_value if they're in the plan form if plan_field.get("value_type"): value_type = plan_field["value_type"] # If value type doesn't match with the value from the form, add an error to result if type(plan_field["value"]) == str and ast.literal_eval(plan_field["value"]) != value_type: result["errors"].append(f"Field {field} value doesn't correspond to the assigned type (type required : {value_type})") if plan_field.get("min_value"): min_value = plan_field["min_value"] if type(min_value) == str: min_value = ast.literal_eval(min_value) if plan_field.get("max_value"): max_value = plan_field["max_value"] if type(max_value) == str: max_value = ast.literal_eval(max_value) return value_type, min_value, max_value def process_sequence(yaml_content, seq, config, is_simplified, result, user_sp): # From user sp, get all SP that can observe / submit sequence for the current period sp_list = ScientificProgram.objects.observable_programs().filter(id__in=user_sp) # Create a Sequence form sequence_form = SequenceForm(instance=seq, data_from_config=config.getEditableMountAttributes(config.unit_name), layouts = config.get_layouts(config.unit_name), sp_list=sp_list) if is_simplified: seq.scientific_program = sp_list[yaml_content["sequence"]["scientific_program"]] if yaml_content["sequence"].get("id"): seq.id = yaml_content["sequence"].get("id") seq.save() else: # get scientific program field's attributes yaml_seq_sp = yaml_content["sequence"]["scientific_program"] sp_index_value = yaml_seq_sp["value"] values = yaml_seq_sp["values"] # Check if index of sp is valid (in range of possible index from values) if sp_index_value < 0 or sp_index_value > len(values): result["errors"].append(f"SP value isn't valid, index out of bounds ({sp_index_value} > {len(values)})") sp_index_value = 0 chosen_sp = ScientificProgram.objects.get(name=values[sp_index_value]) # If the sp is associated to that user, associate the sp to the sequence if chosen_sp in sp_list: #seq.scientific_program = ScientificProgram.objects.get(name=yaml_content["sequence"]["scientific_program"]["values"][sp_index_value]) seq.scientific_program = chosen_sp else: result["errors"].append(f"SP {chosen_sp.name} is not assigned to that user ") seq.config_attributes = {} # Fill all Sequence form fields #with silk_profile(name="iterate sequence fields form"): for field, field_attributes in sequence_form.fields.items(): #if sequence_form.fields[field].required == False or field == "scientific_program": if not field_attributes.required or field=="scientific_program": continue # pour lisibilité, simplicité et éviter redondance yaml_field = yaml_content["sequence"][field] value = yaml_field if is_simplified else yaml_field["value"] ''' (orig) if is_simplified: value = yaml_content["sequence"][field] else: value = yaml_content["sequence"][field]["value"] ''' # If the current field of the sequence isn't found in the file, add an error message to inform the user the missing field if field not in yaml_content["sequence"]: result["errors"].append(f"{field} not in yaml file") else: if is_simplified: # If the field is a choicefield, get choices and associate the index to the real value if sequence_form.fields[field].__dict__.get("_choices"): # y a pas conflit ici avec la variable "value" définie au-dessus ? -> Non car on transforme l'ancien value qui est un index en une vraie valeur values = [value[0] for value in sequence_form.fields[field].__dict__.get("_choices")] value = values[value] # Transform the string value to a datetime value if field == "start_date": if type(value) != datetime.datetime: #value = datetime.datetime.strptime(yaml_content["sequence"][field]["value"],'%d/%m/%Y %H:%M:%S') # ISO format value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') seq.__dict__[field] = value else: if yaml_field.get("values"): # Transform the original value which is an index to a "real" value from the "values" attributes index_value = yaml_field["value"] values = yaml_field["values"] if index_value < 0 or index_value > len(yaml_field["values"]): result["errors"].append(f"Value of {field} isn't valid, index out of bounds ({index_value} > {len(values)})") index_value = 0 value = yaml_field["values"][index_value] else: # Transform the string value to a datetime value if field == "start_date": if type(value) != datetime.datetime: #value = datetime.datetime.strptime(yaml_content["sequence"][field]["value"],'%d/%m/%Y %H:%M:%S') # ISO format value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') seq.__dict__[field] = value ''' (orig) else: if not is_simplified: if yaml_content["sequence"][field].get("values"): index_value = yaml_content["sequence"][field]["value"] values = yaml_content["sequence"][field]["values"] if index_value < 0 or index_value > len(yaml_content["sequence"][field]["values"]): result["errors"].append(f"Value of {field} isn't valid, index out of bounds ({index_value} > {len(values)})") index_value = 0 value = yaml_content["sequence"][field]["values"][index_value] else: if field == "start_date": if type(value) != datetime.datetime: #value = datetime.datetime.strptime(yaml_content["sequence"][field]["value"],'%d/%m/%Y %H:%M:%S') # ISO format value = datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') seq.__dict__[field] = value else: if sequence_form.fields[field].__dict__.get("_choices"): values = [value[0] for value in sequence_form.fields[field].__dict__.get("_choices")] value = values[value] ''' # suffisant ? => if field in seq.__dict__ # If field is an attribute of the sequence, associate the field to the value if field in seq.__dict__: seq.__dict__[field] = value else: # else associate field & value in config_attributes sequence's field (JsonField) = variable fields of an sequence seq.config_attributes[field] = value def create_sequence_pickle(sequence): seq_dict = model_to_dict(sequence) fullseq_dict = { "sequence":seq_dict, "albums": {} } for album in sequence.albums.all(): fullseq_dict["albums"][album.name] = {"plans" : []} for plan in album.plans.all(): fullseq_dict["albums"][f"{album.name}"]["plans"].append(model_to_dict(instance=plan)) period = sequence.period # Old folder & file creation # root_project_path = os.environ.get("PROJECT_ROOT_PATH") # data_path = root_project_path + "/data/" # if not os.path.exists(data_path + "sequences_pickle"): # os.mkdir(data_path +"sequences_pickle") # if not os.path.exists(data_path + f"sequences_pickle/P{period.id}"): # os.mkdir(data_path + f"sequences_pickle/P{period.id}") # if not os.path.exists(data_path +f"sequences_pickle/P{period.id}/{sequence.night_id}"): # os.mkdir(data_path +f"sequences_pickle/P{period.id}/{sequence.night_id}") # seq_pickle_file_name = data_path +f"./sequences_pickle/P{period.id}/{sequence.night_id}/{sequence.id}.p" # get guitastro ephemeris unit_name = os.environ["unit_name"] config = OBSConfig(os.environ["PATH_TO_OBSCONF_FILE"], unit_name) pyros_config = ConfigPyros(os.environ["pyros_config_file"]) config.fn.fcontext = "pyros_seq" period_id = str(period.id) if len(str(period.id)) < 3: while len(period_id) < 3: period_id = "0" + period_id fn_param = { "period" : f"P{period_id}", "version": "1", "unit": config.unit_name, "date": sequence.night_id, "id_seq": sequence.id } test_mode = False if sequence.id >= 9990000000: # in test mode config.fn.rootdir = os.path.abspath(config.fn.rootdir.replace("PRODUCTS/","PRODUCTS/TESTS/", 1)) test_mode = True config.fn.fname = config.fn.naming_set(fn_param) fpath_name = config.fn.join(config.fn.fname) # create dirs if they don't exist os.makedirs(os.path.dirname(fpath_name), exist_ok=True) print(fpath_name) eph = guitastro.Ephemeris() eph.set_home(config.getHome()) # duskelev a parametrer dans obsconfig (yml) duskelev = -7 errors = [] try: #fullseq_dict["ephem"] = eph.target2night(fullseq_dict["sequence"]["config_attributes"]["target"], sequence.night_id, None, None, preferance=sequence.start_expo_pref, duskelev=duskelev) # change fcontext to eph context config.fn.fcontext = "pyros_eph" if test_mode: config.fn.rootdir = os.path.abspath(config.fn.rootdir.replace("PRODUCTS/","PRODUCTS/TESTS/", 1)) eph_root_dir = config.fn.rootdir fn_param["target"] = "sun" config.fn.fname = config.fn.naming_set(fn_param) sun_eph_fpath = config.fn.join(config.fn.fname) fn_param["target"] = "moon" config.fn.fname = config.fn.naming_set(fn_param) moon_eph_fpath = config.fn.join(config.fn.fname) # open eph files sun_eph = pickle.load(open(sun_eph_fpath,"rb")) moon_eph = pickle.load(open(moon_eph_fpath,"rb")) ephem = eph.target2night(fullseq_dict["sequence"]["config_attributes"]["target"], sequence.night_id, sun_eph, moon_eph, preference=sequence.start_expo_pref, duskelev=duskelev) except ValueError: errors.append("Target value is not valid") except guitastro.ephemeris.EphemerisException as ephemException: errors.append(str(ephemException)) if len(errors) == 0 and numpy.sum(ephem["visibility"]) == 0 : errors.append("Target is not visible.") if len(errors) == 0: pickle.dump(ephem, open(f"{fpath_name[:-2]}.f","wb")) pickle.dump(fullseq_dict, open(fpath_name,"wb")) return errors