Commit c1d72f1c301d2285232373bc80a442960ed416ab
1 parent
a0cfe93a
Exists in
master
improve hierarchy and update file paths.
Showing
7 changed files
with
11 additions
and
21 deletions
Show diff stats
DaCHS/build_BDD.py
... | ... | @@ -24,11 +24,10 @@ SQLDic = Dict[str, object] |
24 | 24 | SpaseDic = Dict[str, List[ElTr.Element]] |
25 | 25 | |
26 | 26 | # Paths |
27 | -WORKING_DIR = op.dirname(op.dirname(op.abspath(__file__))) | |
28 | -OUTPUT_PATH = op.join(WORKING_DIR, 'SERVER') | |
29 | -SQL_FILE_PATH = op.join(OUTPUT_PATH, 'amdadb_db.sql') | |
27 | +WORKING_DIR = op.dirname(op.abspath(__file__)) # parent directory | |
28 | +OUTPUT_SQL_FILE_PATH = op.join(WORKING_DIR, 'DaCHS', 'amdadb_db.sql') | |
30 | 29 | SPASE_DIR = op.join(WORKING_DIR, 'DATA') |
31 | -LOG_FILE_PATH = op.join(WORKING_DIR, 'build_granules.log') # Set to None if you want to log in stdout instead of a file | |
30 | +LOG_FILE_PATH = op.join(WORKING_DIR, 'log', 'build_granules.log') # Set to None if you want to log in stdout instead of a file | |
32 | 31 | |
33 | 32 | # XML and SQL formats |
34 | 33 | XMLNS = 'http://www.spase-group.org/data/schema' |
... | ... | @@ -544,7 +543,7 @@ Return a list containing the granules, where each granule is a dictionary, with: |
544 | 543 | def write_sql(granules_list): |
545 | 544 | """Write a SQL script which insert all the granules in the database.""" |
546 | 545 | |
547 | - with open(SQL_FILE_PATH, 'w') as sql_file: | |
546 | + with open(OUTPUT_SQL_FILE_PATH, 'w') as sql_file: | |
548 | 547 | sql_file.write(SQL_HEADER) |
549 | 548 | for gr in granules_list: |
550 | 549 | keys = ', '.join(gr.keys()) |
... | ... | @@ -573,4 +572,4 @@ if __name__ == '__main__': |
573 | 572 | |
574 | 573 | import subprocess |
575 | 574 | |
576 | - subprocess.Popen(['notify-send', 'The SQL script %s has been generated.' % SQL_FILE_PATH]) | |
575 | + subprocess.Popen(['notify-send', 'The SQL script %s has been generated.' % OUTPUT_SQL_FILE_PATH]) | ... | ... |
get_cdf.php renamed to converter/get_cdf.php
nc2cdf.py renamed to converter/nc2cdf.py
create_granules.py
1 | 1 | #!/usr/bin/env python |
2 | 2 | # -*- coding: utf-8 -*- |
3 | 3 | |
4 | -# interpreter: Python 3.6 with anaconda. Please set and prepare the conda environment. | |
5 | -# set PATH $HOME/.anaconda2/bin/ $PATH; and source $HOME/.anaconda2/etc/fish/conf.d/conda.fish | |
6 | -# set PATH $HOME/.anaconda3/bin/ $PATH; and source $HOME/.anaconda3/etc/fish/conf.d/conda.fish | |
7 | -# Add this lines in your init.fish (adapt for Bash terms), so you can choose which conda version to use: | |
8 | -# conda3 # Using conda3 | |
9 | -# conda create --name granules # 1st time only | |
10 | -# activate granules # or `conda activate granules` in Bash terms | |
11 | -# conda install netCDF4 # 1st time only | |
12 | - | |
13 | 4 | """This script download all files from a ``SPASE`` registry, then log and correct eventual errors |
14 | 5 | and add several files and information, such as granules estimation size.""" |
15 | 6 | |
... | ... | @@ -42,10 +33,10 @@ NUMDATA_KEYWORDS = ['/NumericalData/', '/NumericalOutput/'] |
42 | 33 | GRANULE_KEYWORD = '/Granules/' |
43 | 34 | |
44 | 35 | # local paths |
45 | -BASE_DIR = op.dirname(op.dirname(op.abspath(__file__))) | |
46 | -SPASE_DIR = op.join(BASE_DIR, 'DATA') # /!\ Double-check this : this directory will be recursively deleted. | |
47 | -LOG_FILE_PATH = op.join(BASE_DIR, 'create_granules.log') | |
48 | -BLACKLIST_PATH = op.join(BASE_DIR, 'blacklist') | |
36 | +WORKING_DIR = op.dirname(op.abspath(__file__)) # current directory | |
37 | +SPASE_DIR = op.join(WORKING_DIR, 'DATA') # /!\ Double-check this : this directory will be recursively deleted. | |
38 | +LOG_FILE_PATH = op.join(WORKING_DIR, 'log', 'create_granules.log') | |
39 | +BLACKLIST_PATH = op.join(WORKING_DIR, 'resources', 'blacklist') | |
49 | 40 | |
50 | 41 | LOG_FILE = open(LOG_FILE_PATH, 'w+') # Please set to None if you want to log in stdout instead of a file. |
51 | 42 | |
... | ... | @@ -438,8 +429,8 @@ def write_all_granules() -> None: |
438 | 429 | |
439 | 430 | |
440 | 431 | if __name__ == '__main__': |
441 | - if not op.exists(BASE_DIR): | |
442 | - makedirs(BASE_DIR) | |
432 | + if not op.exists(WORKING_DIR): | |
433 | + makedirs(WORKING_DIR) | |
443 | 434 | |
444 | 435 | if op.isdir(SPASE_DIR): |
445 | 436 | print('Clearing SPASE directory (%s)...' % SPASE_DIR) | ... | ... |
doc/CDF_spec.md renamed to specs/CDF_spec.md
doc/spase-2_2_6.xsd renamed to specs/spase-2_2_6.xsd
doc/spase-amda-1_3_0.xsd renamed to specs/spase-amda-1_3_0.xsd