diff --git a/CHANGES.rst b/CHANGES.rst index 6996c881f..507a7ae40 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,45 @@ +0.21.0 (2019-07-23) +=================== + +New Features +------------ + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Updated ``README`` to include instructions on package installation via ``pip``. + +Web Application +~~~~~~~~~~~~~~~ + +- Updated all webpages to conform to Web Application Accessibility Guidelines. +- Upgraded to ``django`` version 2.2. +- ``bokeh`` is now imported in ``base`` template so that the version being used is consistent across all HTML templates. + +``jwql`` Repository +~~~~~~~~~~~~~~~~~~~ + +- The ``jwql`` package is now available on PyPI (https://pypi.org/project/jwql/) and installable via ``pip``. +- Updated Jenkins configuration file to include in-line comments and descriptions. +- Added ``utils`` function to validate the ``config.json`` file during import of ``jwql`` package. +- Added support for monitoring contents of the ``jwql`` central storage area in the filesystem monitor. + + +Bug Fixes +--------- + +Web Application +~~~~~~~~~~~~~~~ + +- Fixed position error of JWQL version display in footer. + +``jwql`` Repository +~~~~~~~~~~~~~~~~~~~ + +- Fixed spelling error in dark monitor database column names. +- Fixed dark monitor to avoid processing files that are not in the filesystem. + + 0.20.0 (2019-06-05) =================== diff --git a/Jenkinsfile b/Jenkinsfile index 2f0c00fad..44d1185ff 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,39 +1,94 @@ -// Obtain files from source control system. +// JWQL Jenkinsfile +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// +// Authors: +// -------- +// - Matthew Bourque +// - Lauren Chambers +// - Joshua Alexander +// - Sara Ogaz +// - Matt Rendina +// +// Notes: +// ------ +// - More info here: https://github.com/spacetelescope/jenkinsfile_ci_examples +// - Syntax defined here: https://github.com/spacetelescope/jenkins_shared_ci_utils +// +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// +// scm_checkout() does the following: +// 1. Disables pipeline execution if [skip ci] or [ci skip] is present in the +// commit message, letting users exclude individual commits from CI +// 2. Clones the Git repository +// 3. Creates a local cache of the repository to avoid commit drift between tasks +// (i.e. Each stage is guaranteed to receive the same source code regardless of +// commits taking place after the pipeline has started.) if (utils.scm_checkout()) return -matrix_os = ["linux-stable"] +// Establish OS and Python version variables for the matrix +matrix_os = ["linux-stable"] // (Note that Jenkins can only be run with Linux, not MacOSX/Windows) matrix_python = ["3.5", "3.6"] + +// Set up the matrix of builds matrix = [] -withCredentials([string( - credentialsId: 'jwql-codecov', - variable: 'codecov_token')]) { - - for (os in matrix_os) { - for (python_ver in matrix_python) { - // Define each build configuration, copying and overriding values as necessary. - env_py = "_python_${python_ver}".replace(".", "_") - bc = new BuildConfig() - bc.nodetype = os - bc.name = "debug-${os}-${env_py}" - bc.conda_packages = ["python=${python_ver}"] - bc.build_cmds = [ - "conda env update --file=environment${env_py}.yml", - "pip install codecov pytest-cov", - "python setup.py install"] - bc.test_cmds = [ - "pytest -s --junitxml=results.xml --cov=./jwql/ --cov-report=xml:coverage.xml", - "sed -i 's/file=\"[^\"]*\"//g;s/line=\"[^\"]*\"//g;s/skips=\"[^\"]*\"//g' results.xml", - "codecov --token=${codecov_token}", - "mkdir -v reports", - "mv -v coverage.xml reports/coverage.xml"] - matrix += bc +// Define IDs that live on the Jenkins server (here, for CodeCov and PyPI) +withCredentials([ + string(credentialsId: 'jwql-codecov', variable: 'codecov_token'), + usernamePassword(credentialsId:'jwql-pypi', usernameVariable: 'pypi_username', passwordVariable: 'pypi_password')]) + +// Iterate over the above variables to define the build matrix. +{ + for (os in matrix_os) { + for (python_ver in matrix_python) { + // Define each build configuration, copying and overriding values as necessary. + + // Define a string variable to reflect the python version of this build + env_py = "_python_${python_ver}".replace(".", "_") + + // Create a new build configuration + bc = new BuildConfig() + + // Define the OS (only "linux-stable" used here) + bc.nodetype = os + + // Give the build configuration a name. This string becomes the + // stage header on Jenkins' UI. Keep it short! + bc.name = "debug-${os}-${env_py}" + + // (Required) Define what packages to include in the base conda environment. + // This specification also tells Jenkins to spin up a new conda environment for + // your build, rather than using the default environment. + bc.conda_packages = ["python=${python_ver}"] + + // Execute a series of commands to set up the build, including + // any packages that have to be installed with pip + bc.build_cmds = [ + "conda env update --file=environment${env_py}.yml", // Update env from file + "pip install codecov pytest-cov", // Install additional packages + "python setup.py install", // Install JWQL package + "python setup.py sdist bdist_wheel" // Build JWQL pacakge wheel for PyPI + ] + + // Execute a series of test commands + bc.test_cmds = [ + // Run pytest + "pytest ./jwql/tests/ -s --junitxml=results.xml --cov=./jwql/ --cov-report=xml:coverage.xml", + // Add a truly magical command that makes Jenkins work for Python 3.5 + "sed -i 's/file=\"[^\"]*\"//g;s/line=\"[^\"]*\"//g;s/skips=\"[^\"]*\"//g' results.xml", + // Define CodeCov token + "codecov --token=${codecov_token}", + // Move the CodeCov report to a different dir to not confuse Jenkins about results.xml + "mkdir -v reports", + "mv -v coverage.xml reports/coverage.xml", + // Update the package wheel to PYPI + "twine upload -u '${pypi_username}' -p '${pypi_password}' --repository-url https://upload.pypi.org/legacy/ --skip-existing dist/*"] + + // Add the build to the matrix + matrix += bc + } } - } - // bc1 = utils.copy(bc0) - // bc1.build_cmds[0] = "conda install -q -y python=3.5" - // Iterate over configurations that define the (distibuted) build matrix. - // Spawn a host of the given nodetype for each combination and run in parallel. - utils.run(matrix) + // Submit the build configurations and execute them in parallel + utils.run(matrix) } diff --git a/README.md b/README.md index e03cd513c..94f46dd14 100644 --- a/README.md +++ b/README.md @@ -23,13 +23,23 @@ Official API documentation can be found on [ReadTheDocs](https://jwql.readthedoc The `jwql` application is currently under heavy development. The `1.0` release is expected in 2019. Currently, a development version of the web application can be found at [https://dljwql.stsci.edu](https://dljwql.stsci.edu). -## Installation +## Installation for Users + +To install `jwql`, simply use `pip`: + +``` +pip install jwql +``` + +The section below describes a more detailed installation for users that wish to contribute to the `jwql` repository. + +## Installation for Contributors Getting `jwql` up and running on your own computer requires four steps, detailed below: 1. Cloning the GitHub repository -1. Installing the `conda`environment -1. Installing the python package -1. Setting up the configuration file +2. Installing the `conda`environment +3. Installing the python package +4. Setting up the configuration file ### Prerequisites @@ -64,25 +74,31 @@ Following the download of the `jwql` repository, contributors can then install t conda update conda ``` -Next, activate the `base` environment: +Next, activate the `base` or `root` environment (depending on your version of `conda`): ``` -source activate base +source activate base/root ``` Lastly, create the `jwql` environment with either Python 3.5 or 3.6, via the `environment_python_3_5.yml` or `environment_python_3_6.yml` file, respectively. We recommend installing with the 3.6 version: ``` -conda env create -f environment_python_3_6.yml +conda env create -f environment_python_3_6.yml --name jwql-3.6 ``` ### Package Installation -Next, you need to install the `jwql` package. While still in the `jwql/` directory, run the following command to set up the package: +Next, you need to install the `jwql` package under development mode. This can be accomplished either by running the `setup.py` script, or `pip install` with the `-e` option: ``` python setup.py develop ``` + +or + +``` +pip install -e . +``` The package should now appear if you run `conda list jwql`. ### Configuration File @@ -135,11 +151,12 @@ Any questions about the `jwql` project or its software can be directed to `jwql@ - Bryan Hilbert (INS) [@bilhbert4](https://github.com/bhilbert4) - Graham Kanarek (INS) [@gkanarek](https://github.com/gkanarek) - Catherine Martlin (INS) [@catherine-martlin](https://github.com/catherine-martlin) -- Sara Ogaz (OED) [@SaOgaz](https://github.com/SaOgaz) - Johannes Sahlmann (INS) [@Johannes-Sahlmann](https://github.com/johannes-sahlmann) +- Ben Sunnquist (INS) [@bsunnquist](https://github.com/bsunnquist) ## Acknowledgments: - Faith Abney (DMD) +- Joshua Alexander (DMD) [@obviousrebel](https://github.com/obviousrebel) - Anastasia Alexov (DMD) - Sara Anderson (DMD) - Tracy Beck (INS) @@ -171,11 +188,13 @@ Any questions about the `jwql` project or its software can be directed to `jwql@ - Karen Levay (DMD) - Crystal Mannfolk (SCOPE) [@cmannfolk](https://github.com/cmannfolk) - Greg Masci (ITSD) +- Jacob Matuskey (DMD) [@jmatuskey](https://github.com/jmatuskey) - Margaret Meixner (INS) - Christain Mesh (DMD) [@cam72cam](https://github.com/cam72cam) - Prem Mishra (ITSD) - Don Mueller (ITSD) - Maria Antonia Nieto-Santisteban (SEITO) +- Sara Ogaz (DMD) [@SaOgaz](https://github.com/SaOgaz) - Brian O'Sullivan (INS) - Joe Pollizzi (JWSTMO) - Lee Quick (DMD) diff --git a/environment_python_3_5.yml b/environment_python_3_5.yml index 43b44b4cc..0baf3d237 100644 --- a/environment_python_3_5.yml +++ b/environment_python_3_5.yml @@ -11,6 +11,7 @@ dependencies: - inflection=0.3.1 - ipython=6.5.0 - jinja2=2.10 +- jsonschema>=2.6.0 - jwst=0.13.0 - matplotlib=3.0.0 - numpy=1.15.2 @@ -27,6 +28,7 @@ dependencies: - sphinx_rtd_theme=0.1.9 - sqlalchemy=1.2.11 - stsci_rtd_theme=0.0.2 +- twine=1.11.0 - pip: - authlib==0.10 - codecov==2.0.15 diff --git a/environment_python_3_6.yml b/environment_python_3_6.yml index 3d6eddfc4..3788dacf8 100644 --- a/environment_python_3_6.yml +++ b/environment_python_3_6.yml @@ -7,12 +7,13 @@ dependencies: - astroquery=0.3.9 - bokeh=1.2.0 - crds>=7.2.7 -- django=2.1.7 +- django=2.2.1 - inflection=0.3.1 -- ipython=7.5.0 +- ipython=7.6.1 - jinja2=2.10 +- jsonschema>=3.0.1 - jwst=0.13.1 -- matplotlib=3.0.2 +- matplotlib=3.1.0 - numpy=1.16.4 - numpydoc=0.9.0 - pandas=0.24.2 @@ -20,16 +21,18 @@ dependencies: - psycopg2=2.7.5 - python=3.6.4 - python-dateutil=2.7.5 -- pytest=4.5.0 +- pytest=5.0.1 - pytest-cov=2.6.1 - pytest-html=1.19.0 -- sphinx=2.0.1 +- sphinx=2.1.0 - sphinx_rtd_theme=0.1.9 -- sqlalchemy=1.3.3 +- sqlalchemy=1.3.5 +- sqlparse=0.3.0 - stsci_rtd_theme=0.0.2 +- twine=1.13.0 - pip: - authlib==0.10 - codecov==2.0.15 - jwedb>=0.0.3 - - pysiaf==0.2.5 + - pysiaf==0.3.1 - sphinx-automodapi==0.10 diff --git a/jwql/database/database_interface.py b/jwql/database/database_interface.py old mode 100644 new mode 100755 index b76a2c3b6..e376ff2db --- a/jwql/database/database_interface.py +++ b/jwql/database/database_interface.py @@ -82,6 +82,8 @@ from jwql.utils.constants import ANOMALIES, FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES from jwql.utils.utils import get_config +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') + # Monkey patch Query with data_frame method @property @@ -90,6 +92,7 @@ def data_frame(self): return pd.read_sql(self.statement, self.session.bind) + Query.data_frame = data_frame @@ -135,8 +138,9 @@ def load_connection(connection_string): return session, base, engine, meta + # Import a global session. If running from readthedocs or Jenkins, pass a dummy connection string -if 'build' and 'project' in socket.gethostname() or os.path.expanduser('~') == '/home/jenkins': +if 'build' and 'project' in socket.gethostname() or ON_JENKINS: dummy_connection_string = 'postgresql+psycopg2://account:password@hostname:0000/db_name' session, base, engine, meta = load_connection(dummy_connection_string) else: @@ -167,7 +171,8 @@ class FilesystemInstrument(base): # Name the table __tablename__ = 'filesystem_instrument' - __table_args__ = (UniqueConstraint('date', 'instrument', 'filetype', name='filesystem_instrument_uc'),) + __table_args__ = (UniqueConstraint('date', 'instrument', 'filetype', + name='filesystem_instrument_uc'),) # Define the columns id = Column(Integer, primary_key=True, nullable=False) @@ -187,6 +192,22 @@ def colnames(self): return a_list +class CentralStore(base): + """ORM for the central storage area filesystem monitor + table""" + + # Name the table + __tablename__ = 'central_storage' + + # Define the columns + id = Column(Integer, primary_key=True, nullable=False) + date = Column(DateTime, nullable=False) + area = Column(String(), nullable=False) + size = Column(Float, nullable=False) + used = Column(Float, nullable=False) + available = Column(Float, nullable=False) + + class Monitor(base): """ORM for the ``monitor`` table""" @@ -292,7 +313,8 @@ def get_monitor_columns(data_dict, table_name): # Create a new column if dtype in list(data_type_dict.keys()): if array: - data_dict[column_name.lower()] = Column(ARRAY(data_type_dict[dtype], dimensions=dimension)) + data_dict[column_name.lower()] = Column(ARRAY(data_type_dict[dtype], + dimensions=dimension)) else: data_dict[column_name.lower()] = Column(data_type_dict[dtype]) else: @@ -345,7 +367,9 @@ class : obj # Columns specific to all monitor ORMs data_dict['id'] = Column(Integer, primary_key=True, nullable=False) data_dict['entry_date'] = Column(DateTime, unique=True, nullable=False, default=datetime.now()) - data_dict['__table_args__'] = (UniqueConstraint('id', 'entry_date', name='{}_uc'.format(data_dict['__tablename__'])),) + data_dict['__table_args__'] = ( + UniqueConstraint('id', 'entry_date', name='{}_uc'.format(data_dict['__tablename__'])), + ) # Get monitor-specific columns data_dict = get_monitor_columns(data_dict, data_dict['__tablename__']) diff --git a/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt b/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt index cdd2d681d..bfed0e993 100644 --- a/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt @@ -4,7 +4,7 @@ MEAN, float STDEV, float SOURCE_FILES, string_array_1d GAUSS_AMPLITUDE, float_array_1d -GUASS_PEAK, float_array_1d +GAUSS_PEAK, float_array_1d GAUSS_WIDTH, float_array_1d GAUSS_CHISQ, float DOUBLE_GAUSS_AMPLITUDE1, float_array_1d diff --git a/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt b/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt index cdd2d681d..bfed0e993 100644 --- a/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt @@ -4,7 +4,7 @@ MEAN, float STDEV, float SOURCE_FILES, string_array_1d GAUSS_AMPLITUDE, float_array_1d -GUASS_PEAK, float_array_1d +GAUSS_PEAK, float_array_1d GAUSS_WIDTH, float_array_1d GAUSS_CHISQ, float DOUBLE_GAUSS_AMPLITUDE1, float_array_1d diff --git a/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt b/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt index cdd2d681d..bfed0e993 100644 --- a/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt @@ -4,7 +4,7 @@ MEAN, float STDEV, float SOURCE_FILES, string_array_1d GAUSS_AMPLITUDE, float_array_1d -GUASS_PEAK, float_array_1d +GAUSS_PEAK, float_array_1d GAUSS_WIDTH, float_array_1d GAUSS_CHISQ, float DOUBLE_GAUSS_AMPLITUDE1, float_array_1d diff --git a/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt b/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt index cdd2d681d..bfed0e993 100644 --- a/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt @@ -4,7 +4,7 @@ MEAN, float STDEV, float SOURCE_FILES, string_array_1d GAUSS_AMPLITUDE, float_array_1d -GUASS_PEAK, float_array_1d +GAUSS_PEAK, float_array_1d GAUSS_WIDTH, float_array_1d GAUSS_CHISQ, float DOUBLE_GAUSS_AMPLITUDE1, float_array_1d diff --git a/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt index cdd2d681d..bfed0e993 100644 --- a/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt @@ -4,7 +4,7 @@ MEAN, float STDEV, float SOURCE_FILES, string_array_1d GAUSS_AMPLITUDE, float_array_1d -GUASS_PEAK, float_array_1d +GAUSS_PEAK, float_array_1d GAUSS_WIDTH, float_array_1d GAUSS_CHISQ, float DOUBLE_GAUSS_AMPLITUDE1, float_array_1d diff --git a/jwql/instrument_monitors/common_monitors/dark_monitor.py b/jwql/instrument_monitors/common_monitors/dark_monitor.py index 8786e05fb..b6b3b06e4 100755 --- a/jwql/instrument_monitors/common_monitors/dark_monitor.py +++ b/jwql/instrument_monitors/common_monitors/dark_monitor.py @@ -728,15 +728,21 @@ def run(self): new_entries = mast_query_darks(instrument, aperture, self.query_start, self.query_end) logging.info('\tAperture: {}, new entries: {}'.format(self.aperture, len(new_entries))) + # Get full paths to the files that actually exist in filesystem + new_filenames = [] + for file_entry in new_entries: + try: + new_filenames.append(filesystem_path(file_entry['filename'])) + except FileNotFoundError: + logging.warning('\t\tUnable to locate {} in filesystem. Not including in processing.' + .format(file_entry['filename'])) + # Check to see if there are enough new files to meet the monitor's signal-to-noise requirements - if len(new_entries) >= file_count_threshold: + if len(new_filenames) >= file_count_threshold: logging.info('\tSufficient new dark files found for {}, {} to run the dark monitor.' .format(self.instrument, self.aperture)) - # Get full paths to the files - new_filenames = [filesystem_path(file_entry['filename']) for file_entry in new_entries] - # Set up directories for the copied data ensure_dir_exists(os.path.join(self.output_dir, 'data')) self.data_dir = os.path.join(self.output_dir, @@ -947,7 +953,7 @@ def stats_by_amp(self, image, amps): degrees_of_freedom = len(hist) - 3. total_pix = np.sum(hist[positive]) p_i = gauss_fit[positive] / total_pix - gaussian_chi_squared[key] = (np.sum((hist[positive] - (total_pix*p_i)**2) / (total_pix*p_i)) + gaussian_chi_squared[key] = (np.sum((hist[positive] - (total_pix * p_i) ** 2) / (total_pix * p_i)) / degrees_of_freedom) # Double Gaussian fit only for full frame data (and only for @@ -961,7 +967,7 @@ def stats_by_amp(self, image, amps): double_gauss_fit = calculations.double_gaussian(bin_centers, *double_gauss_params) degrees_of_freedom = len(bin_centers) - 6. dp_i = double_gauss_fit[positive] / total_pix - double_gaussian_chi_squared[key] = np.sum((hist[positive] - (total_pix*dp_i)**2) / (total_pix*dp_i)) / degrees_of_freedom + double_gaussian_chi_squared[key] = np.sum((hist[positive] - (total_pix * dp_i) ** 2) / (total_pix * dp_i)) / degrees_of_freedom else: double_gaussian_params[key] = [[0., 0.] for i in range(6)] diff --git a/jwql/jwql_monitors/monitor_filesystem.py b/jwql/jwql_monitors/monitor_filesystem.py index 5b97538cd..34a7c4885 100755 --- a/jwql/jwql_monitors/monitor_filesystem.py +++ b/jwql/jwql_monitors/monitor_filesystem.py @@ -1,10 +1,9 @@ #! /usr/bin/env python -""" -This module monitors and gather statistics of the filesystem that hosts -data for the ``jwql`` application. This will answer questions such as -the total number of files, how much disk space is being used, and then -plot these values over time. +"""This module monitors and gather statistics of the filesystem and +central storage area that hosts data for the ``jwql`` application. +This will answer questions such as the total number of files, how much +disk space is being used, and then plot these values over time. Authors ------- @@ -51,6 +50,7 @@ from jwql.database.database_interface import session from jwql.database.database_interface import FilesystemGeneral from jwql.database.database_interface import FilesystemInstrument +from jwql.database.database_interface import CentralStore from jwql.utils.logging_functions import configure_logging, log_info, log_fail from jwql.utils.permissions import set_permissions from jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES, JWST_INSTRUMENT_NAMES_MIXEDCASE @@ -58,6 +58,7 @@ from jwql.utils.utils import get_config FILESYSTEM = get_config()['filesystem'] +CENTRAL = get_config()['jwql_dir'] def gather_statistics(general_results_dict, instrument_results_dict): @@ -113,14 +114,14 @@ def gather_statistics(general_results_dict, instrument_results_dict): general_results_dict['total_file_size'] = general_results_dict['total_file_size'] / (2**40) general_results_dict['fits_file_size'] = general_results_dict['fits_file_size'] / (2**40) - logging.info('{} files found in filesystem'.format(general_results_dict['fits_file_count'])) + logging.info('{} fits files found in filesystem'.format(general_results_dict['fits_file_count'])) return general_results_dict, instrument_results_dict def get_global_filesystem_stats(general_results_dict): """Gathers ``used`` and ``available`` ``df``-style stats on the - entire filesystem. + entire filesystem. (Not just directory titled filesystem.) Parameters ---------- @@ -133,15 +134,94 @@ def get_global_filesystem_stats(general_results_dict): A dictionary for the ``filesystem_general`` database table """ - command = "df {}".format(FILESYSTEM) + command = "df -k {}".format(FILESYSTEM) command += " | awk '{print $3, $4}' | tail -n 1" stats = subprocess.check_output(command, shell=True).split() - general_results_dict['used'] = int(stats[0]) / (2**40) - general_results_dict['available'] = int(stats[1]) / (2**40) + general_results_dict['used'] = int(stats[0]) / (1024**3) + general_results_dict['available'] = int(stats[1]) / (1024**3) return general_results_dict +def get_area_stats(central_storage_dict): + """Gathers ``used`` and ``available`` ``df``-style stats on the + selected area. + + Parameters + ---------- + central_storage_dict : dict + A dictionary for the ``central_storage`` database table + + Returns + ------- + central_storage_dict : dict + A dictionary for the ``central_storage`` database table + """ + logging.info('Searching central storage system...') + + arealist = ['logs', 'outputs', 'test', 'preview_images', 'thumbnails', 'all'] + counteddirs = [] + + sums = 0 # to be used to count 'all' + for area in arealist: + + used = 0 + # initialize area in dictionary + if area not in central_storage_dict: + central_storage_dict[area] = {} + + if area == 'all': + fullpath = CENTRAL + else: + fullpath = os.path.join(CENTRAL, area) + + logging.info('Searching directory {}'.format(fullpath)) + counteddirs.append(fullpath) + + # to get df stats, use -k to get 1024 byte blocks + command = "df -k {}".format(fullpath) + command += " | awk '{print $2, $3, $4}' | tail -n 1" + stats = subprocess.check_output(command, shell=True).split() + # to put in TB, have to multiply values by 1024 to get in bytes, then + # divide by 1024 ^ 4 to put in TB + total = int(stats[0]) / (1024 ** 3) + free = int(stats[2]) / (1024 ** 3) + central_storage_dict[area]['size'] = total + central_storage_dict[area]['available'] = free + + # do an os.walk on each directory to count up used space + if area == 'all': + # get listing of subdirectories + subdirs = [f.path for f in os.scandir(fullpath) if f.is_dir()] + for onedir in subdirs: + if onedir not in counteddirs: + logging.info('Searching directory {}'.format(onedir)) + for dirpath, _, files in os.walk(onedir): + for filename in files: + file_path = os.path.join(dirpath, filename) + # Check if file_path exists, if so, add to used space + exists = os.path.isfile(file_path) + if exists: + filesize = os.path.getsize(file_path) + sums += filesize + use = sums / (1024 ** 4) + else: + for dirpath, _, files in os.walk(fullpath): + for filename in files: + file_path = os.path.join(dirpath, filename) + # Check if file_path exists, if so, add to used space + exists = os.path.isfile(file_path) + if exists: + filesize = os.path.getsize(file_path) + used += filesize + sums += filesize + use = used / (1024 ** 4) + central_storage_dict[area]['used'] = use + + logging.info('Finished searching central storage system') + return central_storage_dict + + def initialize_results_dicts(): """Initializes dictionaries that will hold filesystem statistics @@ -151,6 +231,8 @@ def initialize_results_dicts(): A dictionary for the ``filesystem_general`` database table instrument_results_dict : dict A dictionary for the ``filesystem_instrument`` database table + central_storage_dict : dict + A dictionary for the ``central_storage`` database table """ now = datetime.datetime.now() @@ -165,7 +247,10 @@ def initialize_results_dicts(): instrument_results_dict = {} instrument_results_dict['date'] = now - return general_results_dict, instrument_results_dict + central_storage_dict = {} + central_storage_dict['date'] = now + + return general_results_dict, instrument_results_dict, central_storage_dict @log_fail @@ -179,7 +264,7 @@ def monitor_filesystem(): logging.info('Beginning filesystem monitoring.') # Initialize dictionaries for database input - general_results_dict, instrument_results_dict = initialize_results_dicts() + general_results_dict, instrument_results_dict, central_storage_dict = initialize_results_dicts() # Walk through filesystem recursively to gather statistics general_results_dict, instrument_results_dict = gather_statistics(general_results_dict, instrument_results_dict) @@ -187,8 +272,11 @@ def monitor_filesystem(): # Get df style stats on file system general_results_dict = get_global_filesystem_stats(general_results_dict) + # Get stats on central storage areas + central_storage_dict = get_area_stats(central_storage_dict) + # Add data to database tables - update_database(general_results_dict, instrument_results_dict) + update_database(general_results_dict, instrument_results_dict, central_storage_dict) # Create the plots plot_filesystem_stats() @@ -292,17 +380,71 @@ def plot_filesystem_size(): return plot +def plot_central_store_dirs(): + """Plot central store sizes (size, used, available) versus date + + Returns + ------- + plot : bokeh.plotting.figure.Figure object + ``bokeh`` plot of total directory size versus date + """ + + # Plot system stats vs. date + results = session.query(CentralStore.date, CentralStore.size, CentralStore.available).all() + + arealist = ['logs', 'outputs', 'test', 'preview_images', 'thumbnails', 'all'] + + # Initialize plot + dates, total_sizes, availables = zip(*results) + plot = figure( + tools='pan,box_zoom,wheel_zoom,reset,save', + x_axis_type='datetime', + title='Central Store stats', + x_axis_label='Date', + y_axis_label='Size TB') + colors = itertools.cycle(palette) + + plot.line(dates, total_sizes, legend='Total size', line_color='red') + plot.circle(dates, total_sizes, color='red') + plot.line(dates, availables, legend='Free', line_color='blue') + plot.circle(dates, availables, color='blue') + + # This part of the plot should cycle through areas and plot area used values vs. date + for area, color in zip(arealist, colors): + + # Query for used sizes + results = session.query(CentralStore.date, CentralStore.used).filter(CentralStore.area == area) + + # Group by date + if results: + results_dict = defaultdict(int) + for date, value in results: + results_dict[date] += value + + # Parse results so they can be easily plotted + dates = list(results_dict.keys()) + values = list(results_dict.values()) + + # Plot the results + plot.line(dates, values, legend='{} files'.format(area), line_color=color) + plot.circle(dates, values, color=color) + + return plot + + def plot_filesystem_stats(): """ Plot various filesystem statistics using ``bokeh`` and save them to the output directory. """ + logging.info('Starting plots.') p1 = plot_total_file_counts() p2 = plot_filesystem_size() p3 = plot_by_filetype('count', 'all') p4 = plot_by_filetype('size', 'all') - plot_list = [p1, p2, p3, p4] + p5 = plot_central_store_dirs() + plot_list = [p1, p2, p3, p4, p5] for instrument in JWST_INSTRUMENT_NAMES: plot_list.append(plot_by_filetype('count', instrument)) @@ -367,7 +509,7 @@ def plot_total_file_counts(): return plot -def update_database(general_results_dict, instrument_results_dict): +def update_database(general_results_dict, instrument_results_dict, central_storage_dict): """Updates the ``filesystem_general`` and ``filesystem_instrument`` database tables. @@ -377,7 +519,11 @@ def update_database(general_results_dict, instrument_results_dict): A dictionary for the ``filesystem_general`` database table instrument_results_dict : dict A dictionary for the ``filesystem_instrument`` database table + central_storage_dict : dict + A dictionary for the ``central_storage`` database table + """ + logging.info('Updating databases.') engine.execute(FilesystemGeneral.__table__.insert(), general_results_dict) session.commit() @@ -391,10 +537,21 @@ def update_database(general_results_dict, instrument_results_dict): new_record['filetype'] = filetype new_record['count'] = instrument_results_dict[instrument][filetype]['count'] new_record['size'] = instrument_results_dict[instrument][filetype]['size'] - engine.execute(FilesystemInstrument.__table__.insert(), new_record) session.commit() + # Add data to central_storage table + arealist = ['logs', 'outputs', 'test', 'preview_images', 'thumbnails', 'all'] + for area in arealist: + new_record = {} + new_record['date'] = central_storage_dict['date'] + new_record['area'] = area + new_record['size'] = central_storage_dict[area]['size'] + new_record['used'] = central_storage_dict[area]['used'] + new_record['available'] = central_storage_dict[area]['available'] + engine.execute(CentralStore.__table__.insert(), new_record) + session.commit() + if __name__ == '__main__': diff --git a/jwql/tests/test_api_views.py b/jwql/tests/test_api_views.py index 939b79fab..25f93f62f 100644 --- a/jwql/tests/test_api_views.py +++ b/jwql/tests/test_api_views.py @@ -17,17 +17,19 @@ pytest -s test_api_views.py """ -import os +import http import json -import pytest +import os from urllib import request, error +import pytest + from jwql.utils.utils import get_base_url from jwql.utils.constants import JWST_INSTRUMENT_NAMES # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') # Determine if the local server is running try: @@ -71,7 +73,6 @@ urls.append('api/{}/thumbnails/'.format(rootname)) # thumbnails_by_rootname -# @pytest.mark.skipif(ON_JENKINS, reason='Requires access to central storage.') @pytest.mark.parametrize('url', urls) def test_api_views(url): """Test to see if the given ``url`` returns a populated JSON object @@ -104,6 +105,9 @@ def test_api_views(url): pytest.skip("Dev server problem") raise(e) - data = json.loads(url.read().decode()) - - assert len(data[data_type]) > 0 + try: + data = json.loads(url.read().decode()) + assert len(data[data_type]) > 0 + except (http.client.IncompleteRead) as e: + data = e.partial + assert len(data) > 0 diff --git a/jwql/tests/test_dark_monitor.py b/jwql/tests/test_dark_monitor.py index a30b62f00..270580e14 100644 --- a/jwql/tests/test_dark_monitor.py +++ b/jwql/tests/test_dark_monitor.py @@ -26,6 +26,8 @@ from jwql.instrument_monitors.common_monitors import dark_monitor from jwql.utils.utils import get_config +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') + def test_find_hot_dead_pixels(): """Test hot and dead pixel searches""" @@ -52,7 +54,7 @@ def test_find_hot_dead_pixels(): assert np.all(dead[1] == np.array([6, 3])) -@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', +@pytest.mark.skipif(ON_JENKINS, reason='Requires access to central storage.') def test_get_metadata(): """Test retrieval of metadata from input file""" diff --git a/jwql/tests/test_database_interface.py b/jwql/tests/test_database_interface.py index 96bb884d0..898af27fb 100755 --- a/jwql/tests/test_database_interface.py +++ b/jwql/tests/test_database_interface.py @@ -25,16 +25,50 @@ import string from jwql.database import database_interface as di +from jwql.utils.constants import ANOMALIES +from jwql.utils.utils import get_config # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @pytest.mark.skipif(ON_JENKINS, reason='Requires access to development database server.') -def test_anomaly_table(): - """Test to see that the database has an anomalies table""" +def test_all_tables_exist(): + """Test that the table ORMs defined in ``database_interface`` + actually exist as tables in the database""" - assert 'anomaly' in di.engine.table_names() + # Get list of table ORMs from database_interface + table_orms = [] + database_interface_attributes = di.__dict__.keys() + for attribute in database_interface_attributes: + table_object = getattr(di, attribute) + try: + table_orms.append(table_object.__tablename__) + except AttributeError: + pass # Not all attributes of database_interface are table ORMs + + # Get list of tables that are actually in the database + existing_tables = di.engine.table_names() + + # Ensure that the ORMs defined in database_interface actually exist + # as tables in the database + for table in table_orms: + assert table in existing_tables + + +def test_anomaly_orm_factory(): + """Test that the ``anomaly_orm_factory`` function successfully + creates an ORM and contains the appropriate columns""" + + test_table_name = 'test_anomaly_table' + TestAnomalyTable = di.anomaly_orm_factory('test_anomaly_table') + table_attributes = TestAnomalyTable.__dict__.keys() + + assert str(TestAnomalyTable) == ""\ + .format(test_table_name) + + for anomaly in ANOMALIES: + assert anomaly in table_attributes @pytest.mark.skipif(ON_JENKINS, reason='Requires access to development database server.') @@ -42,10 +76,60 @@ def test_anomaly_records(): """Test to see that new records can be entered""" # Add some data - random_string = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for _ in range(10)) - di.session.add(di.Anomaly(rootname=random_string, flag_date=datetime.datetime.today(), user='test', ghost=True)) + random_rootname = ''.join(random.SystemRandom().choice(string.ascii_lowercase + \ + string.ascii_uppercase + \ + string.digits) for _ in range(10)) + di.session.add(di.Anomaly(rootname=random_rootname, + flag_date=datetime.datetime.today(), + user='test', ghost=True)) di.session.commit() # Test the ghosts column - ghosts = di.session.query(di.Anomaly).filter(di.Anomaly.ghost == "True") + ghosts = di.session.query(di.Anomaly)\ + .filter(di.Anomaly.rootname == random_rootname)\ + .filter(di.Anomaly.ghost == "True") assert ghosts.data_frame.iloc[0]['ghost'] == True + + +@pytest.mark.skipif(ON_JENKINS, reason='Requires access to development database server.') +def test_load_connections(): + """Test to see that a connection to the database can be + established""" + + session, base, engine, meta = di.load_connection(get_config()['connection_string']) + assert str(type(session)) == "" + assert str(type(base)) == "" + assert str(type(engine)) == "" + assert str(type(meta)) == "" + + +def test_monitor_orm_factory(): + """Test that the ``monitor_orm_factory`` function successfully + creates an ORM and contains the appropriate columns""" + + test_table_name = 'instrument_test_monitor_table' + + # Create temporary table definitions file + test_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), + 'database', 'monitor_table_definitions', 'instrument') + test_filename = os.path.join(test_dir, '{}.txt'.format(test_table_name)) + if not os.path.isdir(test_dir): + os.mkdir(test_dir) + with open(test_filename, 'w') as f: + f.write('TEST_COLUMN, string') + + # Create the test table ORM + TestMonitorTable = di.monitor_orm_factory(test_table_name) + table_attributes = TestMonitorTable.__dict__.keys() + + # Ensure the ORM exists and contains appropriate columns + assert str(TestMonitorTable) == ""\ + .format(test_table_name) + for column in ['id', 'entry_date', 'test_column']: + assert column in table_attributes + + # Remove test files and directories + if os.path.isfile(test_filename): + os.remove(test_filename) + if os.path.isdir(test_dir): + os.rmdir(test_dir) diff --git a/jwql/tests/test_edb.py b/jwql/tests/test_edb.py index bc2fff119..9b13ad44d 100644 --- a/jwql/tests/test_edb.py +++ b/jwql/tests/test_edb.py @@ -24,7 +24,7 @@ import pytest # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @pytest.mark.skipif(ON_JENKINS, reason='Requires access to central storage.') diff --git a/jwql/tests/test_instrument_properties.py b/jwql/tests/test_instrument_properties.py index 5e9af4784..7f18e6cbd 100644 --- a/jwql/tests/test_instrument_properties.py +++ b/jwql/tests/test_instrument_properties.py @@ -25,8 +25,10 @@ from jwql.utils import instrument_properties from jwql.utils.utils import get_config +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') -@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + +@pytest.mark.skipif(ON_JENKINS, reason='Requires access to central storage.') def test_amplifier_info(): """Test that the correct number of amplifiers are found for a given diff --git a/jwql/tests/test_loading_times.py b/jwql/tests/test_loading_times.py index 501028133..d0ec02fe9 100644 --- a/jwql/tests/test_loading_times.py +++ b/jwql/tests/test_loading_times.py @@ -29,7 +29,7 @@ TIME_CONSTRAINT = 30 # seconds # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') urls = [] diff --git a/jwql/tests/test_logging_functions.py b/jwql/tests/test_logging_functions.py index 83eb62bc2..f523d244a 100644 --- a/jwql/tests/test_logging_functions.py +++ b/jwql/tests/test_logging_functions.py @@ -28,7 +28,7 @@ from jwql.utils.utils import get_config # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @log_fail diff --git a/jwql/tests/test_permissions.py b/jwql/tests/test_permissions.py index 01f67de64..753b867ca 100755 --- a/jwql/tests/test_permissions.py +++ b/jwql/tests/test_permissions.py @@ -30,7 +30,7 @@ TEST_DIRECTORY = os.path.join(os.environ['HOME'], 'permission_test') # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @pytest.fixture(scope="module") diff --git a/jwql/tests/test_pipeline_tools.py b/jwql/tests/test_pipeline_tools.py index cb3653841..b0ad381e0 100644 --- a/jwql/tests/test_pipeline_tools.py +++ b/jwql/tests/test_pipeline_tools.py @@ -27,7 +27,7 @@ from jwql.utils.utils import get_config # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @pytest.mark.skipif(ON_JENKINS, reason='Requires access to central storage.') diff --git a/jwql/tests/test_plotting.py b/jwql/tests/test_plotting.py index 14dcb07f4..1228a405a 100755 --- a/jwql/tests/test_plotting.py +++ b/jwql/tests/test_plotting.py @@ -18,14 +18,24 @@ pytest -s test_plotting.py """ +import glob +import os +import re +import sys + +import bokeh from pandas import DataFrame +import pytest from jwql.utils.plotting import bar_chart +__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) +JWQL_DIR = __location__.split('tests')[0] + def test_bar_chart(): """Make sure some dummy data generates a ``bokeh`` plot""" - + # Make a toy dataframe data = DataFrame({'meow': {'foo': 12, 'bar': 23, 'baz': 2}, 'mix': {'foo': 45, 'bar': 31, 'baz': 23}, @@ -36,3 +46,29 @@ def test_bar_chart(): plt = bar_chart(data, 'index') assert str(type(plt)) == "" + + +@pytest.mark.skipif(sys.version_info[:2] != (3, 6), + reason="Web server run on Python 3.6") +def test_bokeh_version(): + """Make sure that the current version of Bokeh matches the version being + used in all the web app HTML templates. + """ + env_version = bokeh.__version__ + + template_paths = os.path.join(JWQL_DIR, 'website/apps/jwql/templates', '*.html') + all_web_html_files = glob.glob(template_paths) + + for file in all_web_html_files: + with open(file, 'r+', encoding="utf-8") as f: + content = f.read() + + # Find all of the times "bokeh-#.#.#' appears in a template + html_versions = re.findall(r'(?<=bokeh-)\d+\.\d+\.\d+', content) + html_versions += re.findall(r'(?<=bokeh-widgets-)\d+\.\d+\.\d+', content) + + # Make sure they all match the environment version + for version in html_versions: + assert version == env_version, \ + 'Bokeh version ({}) in HTML template {} '.format(version, os.path.basename(file)) + \ + 'does not match current environment version ({}).'.format(env_version) diff --git a/jwql/tests/test_preview_image.py b/jwql/tests/test_preview_image.py index 5b465995f..2af6637a5 100644 --- a/jwql/tests/test_preview_image.py +++ b/jwql/tests/test_preview_image.py @@ -34,7 +34,7 @@ TEST_DIRECTORY = os.path.join(os.environ['HOME'], 'preview_image_test') # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') @pytest.fixture(scope="module") diff --git a/jwql/tests/test_utils.py b/jwql/tests/test_utils.py index 94a33572f..1446dbdc8 100644 --- a/jwql/tests/test_utils.py +++ b/jwql/tests/test_utils.py @@ -22,10 +22,11 @@ from pathlib import Path import pytest -from jwql.utils.utils import copy_files, get_config, filename_parser, filesystem_path +from jwql.utils.utils import copy_files, get_config, filename_parser, \ + filesystem_path, _validate_config # Determine if tests are being run on jenkins -ON_JENKINS = os.path.expanduser('~') == '/home/jenkins' +ON_JENKINS = '/home/jenkins' in os.path.expanduser('~') FILENAME_PARSER_TEST_DATA = [ @@ -344,3 +345,45 @@ def test_filesystem_path(): location = os.path.join(get_config()['filesystem'], 'jw96003', filename) assert check == location + + +def test_validate_config(): + """Test that the config validator works.""" + # Make sure a bad config raises an error + bad_config_dict = {"just": "one_key"} + + with pytest.raises(Exception) as excinfo: + _validate_config(bad_config_dict) + assert 'Provided config.json does not match the required JSON schema' \ + in str(excinfo.value), \ + 'Failed to reject incorrect JSON dict.' + + # Make sure a good config does not! + good_config_dict = { + "connection_string": "", + "database": { + "engine": "", + "name": "", + "user": "", + "password": "", + "host": "", + "port": "" + }, + "filesystem": "", + "preview_image_filesystem": "", + "thumbnail_filesystem": "", + "outputs": "", + "jwql_dir": "", + "admin_account": "", + "log_dir": "", + "test_dir": "", + "test_data": "", + "setup_file": "", + "auth_mast": "", + "client_id": "", + "client_secret": "", + "mast_token": "" + } + + is_valid = _validate_config(good_config_dict) + assert is_valid is None, 'Failed to validate correct JSON dict' diff --git a/jwql/utils/credentials.py b/jwql/utils/credentials.py index d307d27d4..3677cf429 100644 --- a/jwql/utils/credentials.py +++ b/jwql/utils/credentials.py @@ -20,7 +20,7 @@ from astroquery.mast import Mast -from jwql.utils.utils import get_config, check_config +from jwql.utils.utils import get_config, check_config_for_key def get_mast_token(request=None): @@ -48,7 +48,7 @@ def get_mast_token(request=None): return token try: # check if token is available via config file - check_config('mast_token') + check_config_for_key('mast_token') token = get_config()['mast_token'] print('Authenticated with config.json MAST token.') return token diff --git a/jwql/utils/utils.py b/jwql/utils/utils.py index fdb48dd7b..3ef082e1a 100644 --- a/jwql/utils/utils.py +++ b/jwql/utils/utils.py @@ -36,6 +36,8 @@ import re import shutil +import jsonschema + from jwql.utils import permissions from jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES_SHORTHAND @@ -387,6 +389,7 @@ def get_config(): """ config_file_location = os.path.join(__location__, 'config.json') + # Make sure the file exists if not os.path.isfile(config_file_location): raise FileNotFoundError('The JWQL package requires a configuration file (config.json) ' 'to be placed within the jwql/utils directory. ' @@ -394,13 +397,22 @@ def get_config(): '(https://github.com/spacetelescope/jwql/wiki/' 'Config-file) for more information.') - with open(config_file_location, 'r') as config_file: - settings = json.load(config_file) + with open(config_file_location, 'r') as config_file_object: + try: + # Load it with JSON + settings = json.load(config_file_object) + except json.JSONDecodeError as e: + # Raise a more helpful error if there is a formatting problem + raise ValueError('Incorrectly formatted config.json file. ' + 'Please fix JSON formatting: {}'.format(e)) + + # Ensure the file has all the needed entries with expected data types + _validate_config(settings) return settings -def check_config(key): +def check_config_for_key(key): """Check that the config.json file contains the specified key and that the entry is not empty @@ -426,6 +438,70 @@ def check_config(key): ) +def _validate_config(config_file_dict): + """Check that the config.json file contains all the needed entries with + expected data types + + Parameters + ---------- + config_file_dict : dict + The configuration JSON file loaded as a dictionary + + Notes + ----- + See here for more information on JSON schemas: + https://json-schema.org/learn/getting-started-step-by-step.html + """ + # Define the schema for config.json + schema = { + "type": "object", # Must be a JSON object + "properties": { # List all the possible entries and their types + "connection_string": {"type": "string"}, + "database": { + "type": "object", + "properties": { + "engine": {"type": "string"}, + "name": {"type": "string"}, + "user": {"type": "string"}, + "password": {"type": "string"}, + "host": {"type": "string"}, + "port": {"type": "string"} + }, + "required": ['engine', 'name', 'user', 'password', 'host', 'port'] + }, + "filesystem": {"type": "string"}, + "preview_image_filesystem": {"type": "string"}, + "thumbnail_filesystem": {"type": "string"}, + "outputs": {"type": "string"}, + "jwql_dir": {"type": "string"}, + "admin_account": {"type": "string"}, + "log_dir": {"type": "string"}, + "test_dir": {"type": "string"}, + "test_data": {"type": "string"}, + "setup_file": {"type": "string"}, + "auth_mast": {"type": "string"}, + "client_id": {"type": "string"}, + "client_secret": {"type": "string"}, + "mast_token": {"type": "string"}, + }, + # List which entries are needed (all of them) + "required": ["connection_string", "database", "filesystem", + "preview_image_filesystem", "thumbnail_filesystem", + "outputs", "jwql_dir", "admin_account", "log_dir", + "test_dir", "test_data", "setup_file", "auth_mast", + "client_id", "client_secret", "mast_token"] + } + + # Test that the provided config file dict matches the schema + try: + jsonschema.validate(instance=config_file_dict, schema=schema) + except jsonschema.ValidationError as e: + raise jsonschema.ValidationError( + 'Provided config.json does not match the ' + \ + 'required JSON schema: {}'.format(e.message) + ) + + def initialize_instrument_monitor(module): """Configures a log file for the instrument monitor run and captures the start time of the monitor diff --git a/jwql/website/apps/jwql/context_processors.py b/jwql/website/apps/jwql/context_processors.py index 9cadcbea7..65c40d0db 100644 --- a/jwql/website/apps/jwql/context_processors.py +++ b/jwql/website/apps/jwql/context_processors.py @@ -23,6 +23,8 @@ As such, it will automatically be executed upon each request. """ +import bokeh + import jwql from jwql.utils.constants import JWST_INSTRUMENT_NAMES, MONITORS @@ -50,5 +52,6 @@ def base_context(request, user): context['tools'] = MONITORS context['user'] = user context['version'] = jwql.__version__ + context['bokeh_version'] = bokeh.__version__ return context diff --git a/jwql/website/apps/jwql/data_containers.py b/jwql/website/apps/jwql/data_containers.py index 039bf07ba..05750df9e 100644 --- a/jwql/website/apps/jwql/data_containers.py +++ b/jwql/website/apps/jwql/data_containers.py @@ -34,8 +34,8 @@ # astroquery.mast import that depends on value of auth_mast # this import has to be made before any other import of astroquery.mast -from jwql.utils.utils import get_config, filename_parser, check_config -check_config('auth_mast') +from jwql.utils.utils import get_config, filename_parser, check_config_for_key +check_config_for_key('auth_mast') auth_mast = get_config()['auth_mast'] mast_flavour = '.'.join(auth_mast.split('.')[1:]) from astropy import config diff --git a/jwql/website/apps/jwql/oauth.py b/jwql/website/apps/jwql/oauth.py index 086334e73..70167c088 100644 --- a/jwql/website/apps/jwql/oauth.py +++ b/jwql/website/apps/jwql/oauth.py @@ -46,7 +46,7 @@ def login(request): import jwql from jwql.utils.constants import MONITORS -from jwql.utils.utils import get_base_url, get_config, check_config +from jwql.utils.utils import get_base_url, get_config, check_config_for_key PREV_PAGE = '/' @@ -64,7 +64,7 @@ def register_oauth(): # Get configuration parameters for key in ['client_id', 'client_secret', 'auth_mast']: - check_config(key) + check_config_for_key(key) client_id = get_config()['client_id'] client_secret = get_config()['client_secret'] auth_mast = get_config()['auth_mast'] @@ -168,7 +168,7 @@ def user_info(request, **kwargs): # If user is authenticated, return user credentials if cookie is not None: - check_config('auth_mast') + check_config_for_key('auth_mast') # Note: for now, this must be the development version auth_mast = get_config()['auth_mast'] diff --git a/jwql/website/apps/jwql/static/css/jwql.css b/jwql/website/apps/jwql/static/css/jwql.css index b801c8358..162bcbc8e 100644 --- a/jwql/website/apps/jwql/static/css/jwql.css +++ b/jwql/website/apps/jwql/static/css/jwql.css @@ -2,12 +2,6 @@ list-style: none; } -.anomaly_form { - position: absolute; - top: 50%; - transform: translateY(-50%); -} - .APT_parameters { width: 20% } @@ -37,6 +31,7 @@ border-color: #c85108 !important; color: white !important; border-radius: 0px; + text-decoration: none; } /*Make outline buttons and highlighted normal buttons white*/ @@ -46,12 +41,14 @@ border-color: #c85108 !important ; color: #c85108 !important; border-radius: 0px; + text-decoration: none; } /*Stop them from glowing blue*/ -.btn:focus, .btn.focus, .btn:active:focus, .btn.active:focus, .btn:active, +.btn.focus, .btn:active:focus, .btn.active:focus, .btn:active, .btn.active, .show > .btn.dropdown-toggle:focus { box-shadow: none !important; + text-decoration: none; } [class*="col-"] { @@ -67,6 +64,14 @@ margin-right: 2%; } +/* Show the dropdown menu on hover */ +/* DO NOT how the dropdown menu on hover if the navbar is collapsed */ +@media only screen and (min-width: 1200px) { + li.dropdown:hover .dropdown-menu { + display: block; + } +} + /* Make disabled dropdown items grey and unclickable */ .disabled-dropdown { color: #bec4d4 !important; @@ -76,8 +81,7 @@ /*Define dropdown menu colors*/ .dropdown-item:hover{ - color: #c85108; - background-color: #2d353c; + background-color: black; } .dropdown-menu { @@ -234,6 +238,11 @@ display : inline; display: inline-block; } +/*Make H2 header smaller for select pages*/ +#instrument_main h2, .mnemonic_trending_main h2 { + font-size: 1.75rem; +} + .instrument-name { font-size: 25px; color: white; @@ -293,12 +302,19 @@ display : inline; height: 100%; } +/* Change color of dropdown links on hover */ +li:hover .nav-link, .navbar-brand:hover { + color: #fff !important; +} + /* Define navbar color*/ .navbar { background-color: black; } -.navbar-brand { - color: white; + +/*Define navbar font color and case*/ +.nav-link { + color: #bec4d4 !important; text-transform: uppercase; } @@ -308,12 +324,6 @@ display : inline; padding-right:10px; } -/*Define navbar font color*/ -.nav-link, .nav-link.disabled { - color: #bec4d4; - text-transform: uppercase; -} - /* Get rid of padding around GitHub logo */ #github-link { padding-bottom: 0px; @@ -322,11 +332,14 @@ display : inline; /* Set username to be orange */ #oauth_user { + text-transform: uppercase; color: #c85108; + padding-right: 1rem; } #oauth_user:hover { color: white; + text-decoration: none; } .plot-container { @@ -482,12 +495,22 @@ display : inline; /*Format the version identifier text in bottom corner*/ #version-div { float: right; - width: 120px; + width: 180px; text-align: right; color: white; font-size: 12px } +/*Add underline for links*/ +a { + text-decoration: underline; +} + +/*Don't add underline for navbar and button links*/ +nav a, .btn { + text-decoration: none; +} + body { padding-top: 8rem; } @@ -506,16 +529,6 @@ h1 { letter-spacing: 0.05em; } -/* Change color of dropdown links on hover */ -li:hover .nav-link, .navbar-brand:hover { - color: #fff; -} - -/* Show the dropdown menu on hover */ -li.dropdown:hover .dropdown-menu { - display: block; -} - ul.no-bullets { list-style: none; padding-left:10px; diff --git a/jwql/website/apps/jwql/static/js/jwql.js b/jwql/website/apps/jwql/static/js/jwql.js index e47556877..a15f03a7b 100644 --- a/jwql/website/apps/jwql/static/js/jwql.js +++ b/jwql/website/apps/jwql/static/js/jwql.js @@ -331,7 +331,8 @@ function update_archive_page(inst, base_url) { // Build div content content = '
'; content += ''; + content += '' + content += ''; content += '
'; content += '
'; content += '

' + prop + '

'; diff --git a/jwql/website/apps/jwql/templates/404_space.html b/jwql/website/apps/jwql/templates/404_space.html index 1f1aa2ad9..a638f6e8f 100644 --- a/jwql/website/apps/jwql/templates/404_space.html +++ b/jwql/website/apps/jwql/templates/404_space.html @@ -19,7 +19,7 @@ diff --git a/jwql/website/apps/jwql/templates/404_spacecat.html b/jwql/website/apps/jwql/templates/404_spacecat.html index da4585539..5675ff0b1 100644 --- a/jwql/website/apps/jwql/templates/404_spacecat.html +++ b/jwql/website/apps/jwql/templates/404_spacecat.html @@ -19,7 +19,7 @@ diff --git a/jwql/website/apps/jwql/templates/about.html b/jwql/website/apps/jwql/templates/about.html index a046d9901..6cfb01ec5 100644 --- a/jwql/website/apps/jwql/templates/about.html +++ b/jwql/website/apps/jwql/templates/about.html @@ -9,7 +9,7 @@ {% block content %}
-

+
The JWQL logo


About the project

@@ -18,7 +18,12 @@

About the project

The project consists of the following components:

-

+
+ MAST Cache, MAST API, JWQL Repo, VM, Web App, Automation +

The JWQL application is currently under heavy development. The 1.0 release is expected in 2019.

@@ -58,7 +63,10 @@

Acknowledgements

Other attributions:

- The loading animation 'magnify' is provided by
loading.io +
    +
  • This website aims to be accessibile via 508 complience. Guidelines can be found on this innerspace page. +
  • The loading animation 'magnify' is provided by loading.io
  • +
diff --git a/jwql/website/apps/jwql/templates/base.html b/jwql/website/apps/jwql/templates/base.html index 315bcc540..1b7eefb06 100644 --- a/jwql/website/apps/jwql/templates/base.html +++ b/jwql/website/apps/jwql/templates/base.html @@ -18,12 +18,16 @@ + + + - - + + + {% block preamble %} @@ -33,33 +37,82 @@ + + Jump to main content + -