diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..39515b10f --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,62 @@ +name: build + +on: + release: + types: [ released ] + pull_request: + workflow_dispatch: + +defaults: + run: + shell: micromamba-shell {0} + +jobs: + build: + uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v1 + with: + upload_to_pypi: ${{ (github.event_name == 'release') && (github.event.action == 'released') }} + secrets: + pypi_token: ${{ secrets.PYPI_PASSWORD_STSCI_MAINTAINER }} + freeze: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: + [ "ubuntu-latest", "macos-latest" ] + python-version: + [ "3.11", "3.12" ] + fail-fast: false + name: freeze versions (Python ${{ matrix.python-version }}, ${{ matrix.os }}) + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: mamba-org/setup-micromamba@v1 + with: + environment-name: jwql-${{ runner.os }}-py${{ matrix.python-version }} + environment-file: environment.yml + create-args: >- + python=${{ matrix.python-version }} + conda + init-shell: none + generate-run-shell: true + - run: pip install . + - run: pip list + - id: version + uses: mtkennerly/dunamai-action@v1 + with: + args: --strict --pattern "default-unprefixed" --style semver + - id: filename + run: echo "filename=jwql_${{ steps.version.outputs.version }}_conda_${{ runner.os }}_${{ runner.arch }}_py${{ matrix.python-version }}.yml" >> $GITHUB_OUTPUT + - run: conda env export --no-build | grep -v "name:" | grep -v "prefix:" > ${{ steps.filename.outputs.filename }} + - run: cat ${{ steps.filename.outputs.filename }} + - uses: actions/upload-artifact@v4 + with: + name: ${{ steps.filename.outputs.filename }} + path: ${{ steps.filename.outputs.filename }} + - if: (github.event_name == 'release' && github.event.action == 'released') + uses: svenstaro/upload-release-action@v2 + with: + file: ${{ steps.filename.outputs.filename }} + + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 54147a822..f2f1727ea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,15 +1,13 @@ -name: JWQL CI +name: test -on: [push, pull_request] +on: + push: + pull_request: concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -defaults: - run: - shell: bash -l {0} - jobs: check: runs-on: ubuntu-latest @@ -17,7 +15,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: - python-version: "3.x" + python-version: "3.11" - run: pip install bandit - run: bandit ./jwql/ -c .bandit @@ -25,30 +23,38 @@ jobs: name: test (Python ${{ matrix.python-version }}, ${{ matrix.os }}) runs-on: ${{ matrix.os }} strategy: - max-parallel: 5 - fail-fast: false matrix: - os: [ubuntu-latest, macos-latest] - python-version: [3.9, "3.10"] - + os: [ "ubuntu-latest", "macos-latest" ] + python-version: [ "3.11", "3.12" ] + fail-fast: false + env: + PYTHONUNBUFFERED: 1 + defaults: + run: + shell: micromamba-shell {0} steps: - uses: actions/checkout@v4 - - - uses: mamba-org/provision-with-micromamba@v15 + + - uses: mamba-org/setup-micromamba@v1 with: - environment-file: ./environment_python_${{ matrix.python-version }}.yml - cache-env: true - cache-downloads: true + environment-name: jwql-${{ runner.os }}-py${{ matrix.python-version }} + environment-file: environment.yml + create-args: >- + python=${{ matrix.python-version }} + init-shell: none + generate-run-shell: true + + - run: pip install -e .[test] pytest-xdist - - run: pip install -e .[test] + - run: pip list - - run: conda env export + - run: micromamba env export - - uses: supercharge/redis-github-action@1.4.0 + - if: runner.os == 'Linux' + uses: supercharge/redis-github-action@1.4.0 with: redis-version: 5.0 - if: runner.os == 'Linux' - run: python -c "import jwql; print('Version ' + jwql.__version__); print('Path ' + jwql.__path__[0])" - - run: pytest jwql/tests/ + - run: pytest -n auto jwql/tests/ diff --git a/.gitignore b/.gitignore index 64320a824..a6c451e08 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,4 @@ jwql/website/apps/jwql/static/filesystem jwql/website/apps/jwql/static/outputs jwql/website/apps/jwql/static/preview_images jwql/website/apps/jwql/static/thumbnails +jwql-current.yml diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 069f15a23..82276d712 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -12,10 +12,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.9" - jobs: - post_install: - - pip install sqlalchemy==1.4.46 + python: "3.11" # Build documentation in the docs/ directory with Sphinx sphinx: @@ -27,6 +24,7 @@ sphinx: # Optionally declare the Python requirements required to build your docs python: install: - - requirements: rtd_requirements.txt - method: pip path: . + extra_requirements: + - docs diff --git a/CHANGES.rst b/CHANGES.rst index 4f9834578..b29b79840 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,138 @@ ## What's Changed +1.2.11 (2024-08-26) +=================== + +Web Application +~~~~~~~~~~~~~~~ +- Customize colorbar location and size based on aperture size by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1592 +- Add keyword to specify program IDs for preview image creation by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1602 +- Adjust view image and view exposure to allow for larger preview images by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1600 +- filter thumbnails by filter/pupil/grating by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1623 +- Add Image and Exposure level Comments by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1624 +- Update NRS TA Monitor Plotting by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1633 +- Fix readnoise monitor plot y range by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1634 + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- Update Bokeh `filter` Keyword in NRS TA Monitors by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1620 +- Switch dark monitor to use django models by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1493 +- Fix log cleanup script by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1628 +- Fix missing data in readnoise monitor plots by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1631 +- filename_parser() no longer raises an exception for unrecognized files by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1614 + + +1.2.10 (2024-07-10) +=================== + +Duplicate of 1.2.9, caused by changes to versioning conflicts with pypi. + + +1.2.9 (2024-07-10) +================== + +Web Application +~~~~~~~~~~~~~~~ +- Add Download CSV button to query page by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1561 +- show file anomalies on exposure group page by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1564 +- create generic error page to handle exceptions in views. by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1549 + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- final model define for faking by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1544 +- Update Redis Package Names in Environment Files by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1546 +- [SCSB-145] require Python 3.10 by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1515 +- debug false by default by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1550 +- Update NIRSpec TA Monitors to use Django DB Models by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1499 +- Update NIRSpec TA Models by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1565 +- Remove codecov.yml by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1588 +- Remove filename parser test over filesystem by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1586 +- Update remote to upstream in pull_jwql_branch.sh by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1591 +- Add Dependencies for Servers in `pyproject.toml` by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1568 +- fix release upload step condition to match workflow trigger by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1593 +- fix environment freeze workflow not picking up tag by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1594 +- fix version matching pattern by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1595 +- updating freeze matrix to include linux, mac and python 3.12 by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1596 +- Remove P750L from list of NIRSpec filters by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1598 +- [build] fix `runs-on:` and update build filename for easier parsing by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1599 +- upload to PyPI on release by @zacharyburnett in https://github.com/spacetelescope/jwql/pull/1601 +- Updating jwst_reffiles version number by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1606 +- Remove old presentations from repo by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1607 +- Num results fix by @BradleySappington in https://github.com/spacetelescope/jwql/pull/1608 +- Add Environment Update Script by @mfixstsci in https://github.com/spacetelescope/jwql/pull/1609 +- Add new NIRISS AMI-related suffixes by @bhilbert4 in https://github.com/spacetelescope/jwql/pull/1613 + + +1.2.8 (2024-04-18) +================== + +Web Application +~~~~~~~~~~~~~~~ + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- final model define for faking by @BradleySappington + + +1.2.7 (2024-04-18) +================== + +Web Application +~~~~~~~~~~~~~~~ + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- Import monitor models by @bhilbert4 + + +1.2.6 (2024-04-15) +================== + +Web Application +~~~~~~~~~~~~~~~ +- Update NIRCam Background Monitor plots to handle new Claw Monitor columns by @bsunnquist + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- add default to read_patt_num by @BradleySappington +- migration work for next RC by @BradleySappington +- Fix database empty fields by @BradleySappington + + +1.2.5 (2024-03-19) +================== + +Web Application +~~~~~~~~~~~~~~~ +- Fix Bokeh `file_html` Call by @mfixstsci +- Update Bad Pix Exclude Line by @mfixstsci +- Interactive preview image - updates for Bokeh 3 by @bhilbert4 + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- Allow creation of pngs from 3D and 4D arrays by @bhilbert4 +- Add max length to charfield by @BradleySappington +- Header fix by @BradleySappington + + +1.2.4 (2024-03-11) +================== + +Web Application +~~~~~~~~~~~~~~~ +- Fix Broken Dashboard - ReImplement Anomaly Dash @BradleySappington +- Add more info to image- and exposure-level pages @bhilbert4 + +Project & API Documentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +- create pull_jwql_branch.sh by @BradleySappington +- Claw monitor migrate by @york-stsci +- Update readnoise monitor to use django database models by @york-stsci +- Migration to django management by @york-stsci +- Get subarray lists from datamodels schema by @bhilbert4 +- Retrieve program number and category efficiently by @bhilbert4 + + 1.2.3 (2024-01-26) ================== diff --git a/README.md b/README.md index 377005a11..fcfa65b49 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Getting `jwql` up and running on your own computer requires four steps, detailed ### Prerequisites -It is highly suggested that contributors have a working installation of `anaconda` or `miniconda` for Python 3.9+. Downloads and installation instructions are available here: +It is highly suggested that contributors have a working installation of `anaconda` or `miniconda` for Python 3.10+. Downloads and installation instructions are available here: - [Miniconda](https://conda.io/miniconda.html) - [Anaconda](https://www.continuum.io/downloads) @@ -86,13 +86,13 @@ source activate base/root Lastly, create the `jwql` environment via one of the `environment.yml` files (currently `environment_python_3.9.yml`, for python 3.9, and `environment_python_3.10.yml`, for python 3.10, are supported by `jwql`): ``` -conda env create -f environment_python_3.9.yml +conda env create -f environment_python_3.10.yml ``` or ``` -conda env create -f environment_python_3.10.yml +conda env create -f environment_python_3.11.yml ``` ### Configuration File diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index 237a28191..000000000 --- a/codecov.yml +++ /dev/null @@ -1,36 +0,0 @@ -codecov: - notify: - require_ci_to_pass: no - -coverage: - precision: 2 - round: down - range: "0...75" - - status: - project: off - patch: off - changes: off - -parsers: - gcov: - branch_detection: - conditional: yes - loop: yes - method: no - macro: no - -comment: - layout: "header, diff, files" - behavior: default - require_changes: no - -ignore: - - "jwql/database/" - - "jwql/tests/" - - "jwql/instrument_monitors/miri_monitors/data_trending/plots/" - - "jwql/instrument_monitors/nirspec_monitors/data_trending/plots/" - - "*__init__.py*" - - "**/*.html" - - "**/*.js" - - "**/*.css" diff --git a/environment.yml b/environment.yml new file mode 100644 index 000000000..53e6986a7 --- /dev/null +++ b/environment.yml @@ -0,0 +1,3 @@ +dependencies: + - firefox + - python diff --git a/environment_python_3.10.yml b/environment_python_3.10.yml deleted file mode 100644 index e322ea9ca..000000000 --- a/environment_python_3.10.yml +++ /dev/null @@ -1,74 +0,0 @@ -# This file describes a conda environment that can be to install jwql -# -# Run the following command to set up this environment: -# $ conda env create -f environment_python_3.10.yml -# -# The environment name can be overridden with the following command: -# $ conda env create -n -f environment_python_3.10.yml -# -# Run the following command to activate the environment: -# $ source activate jwql-3.10 -# -# To deactivate the environment run the following command: -# $ source deactivate -# -# To remove the environment entirely, run the following command: -# $ conda env remove -n jwql-3.10 - -name: jwql-3.10 - -channels: - - conda-forge - - defaults - -dependencies: - - astropy=5.3.4 - - beautifulsoup4=4.12.2 - - bokeh=3.3.0 - - celery=5.3.4 - - cryptography=41.0.4 - - django=4.2.6 - - inflection=0.5.1 - - ipython=8.16.1 - - jinja2=3.1.2 - - jsonschema=4.19.1 - - matplotlib=3.8.0 - - nodejs=20.8.0 - - numpy=1.25.2 - - numpydoc=1.5.0 - - pandas=2.1.1 - - pip=23.2.1 - - postgresql=15.4 - - psycopg2=2.9.7 - - pytest=7.4.2 - - pytest-cov=4.1.0 - - pytest-mock=3.11.1 - - python=3.10.12 - - pyyaml=6.0.1 - - redis=5.0.0 - - ruff=0.0.292 - - scipy=1.9.3 - - setuptools=68.2.2 - - sphinx=7.2.6 - - sphinx_rtd_theme=1.3.0 - - sqlalchemy=2.0.21 - - twine=4.0.2 - - wtforms=3.0.1 - - - pip: - - astroquery==0.4.6 - - bandit==1.7.5 - - jwst==1.12.3 - - jwst_backgrounds==1.2.0 - - pysiaf==0.20.0 - - pysqlite3==0.5.2 - - pyvo==1.4.2 - - redis==5.0.0 - - selenium==4.13.0 - - stdatamodels==1.8.3 - - stsci_rtd_theme==1.0.0 - - vine==5.0.0 - - git+https://github.com/spacetelescope/jwst_reffiles - - # Current package - - -e . diff --git a/environment_python_3.9.yml b/environment_python_3.9.yml deleted file mode 100644 index a68f005c5..000000000 --- a/environment_python_3.9.yml +++ /dev/null @@ -1,74 +0,0 @@ -# This file describes a conda environment that can be to install jwql -# -# Run the following command to set up this environment: -# $ conda env create -f environment_python_3.9.yml -# -# The environment name can be overridden with the following command: -# $ conda env create -n -f environment_python_3.9.yml -# -# Run the following command to activate the environment: -# $ source activate jwql-3.9 -# -# To deactivate the environment run the following command: -# $ source deactivate -# -# To remove the environment entirely, run the following command: -# $ conda env remove -n jwql-3.9 - -name: jwql-3.9 - -channels: - - conda-forge - - defaults - -dependencies: - - astropy=5.3.3 - - beautifulsoup4=4.12.2 - - bokeh=3.3.0 - - celery=5.3.4 - - cryptography=41.0.4 - - django=4.2.5 - - inflection=0.5.1 - - ipython=8.16.1 - - jinja2=3.1.2 - - jsonschema=4.19.1 - - matplotlib=3.8.0 - - nodejs=20.8.0 - - numpy=1.25.2 - - numpydoc=1.5.0 - - pandas=2.1.1 - - pip=23.2.1 - - postgresql=15.4 - - psycopg2=2.9.7 - - pytest=7.4.2 - - pytest-cov=4.1.0 - - pytest-mock=3.11.1 - - python=3.9.17 - - pyyaml=6.0.1 - - redis=5.0.0 - - ruff=0.0.292 - - scipy=1.9.3 - - setuptools=68.2.2 - - sphinx=7.2.6 - - sphinx_rtd_theme=1.3.0 - - sqlalchemy=2.0.21 - - twine=4.0.2 - - wtforms=3.0.1 - - - pip: - - astroquery==0.4.6 - - bandit==1.7.5 - - jwst==1.12.3 - - jwst_backgrounds==1.2.0 - - pysiaf==0.20.0 - - pysqlite3==0.5.2 - - pyvo==1.4.2 - - redis==5.0.0 - - selenium==4.13.0 - - stdatamodels==1.8.3 - - stsci_rtd_theme==1.0.0 - - vine==5.0.0 - - git+https://github.com/spacetelescope/jwst_reffiles - - # Current package - - -e . diff --git a/jwql/bokeh_templating/__init__.py b/jwql/bokeh_templating/__init__.py deleted file mode 100644 index 2cf1818b4..000000000 --- a/jwql/bokeh_templating/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .template import BokehTemplate diff --git a/jwql/bokeh_templating/example/example_interface.yaml b/jwql/bokeh_templating/example/example_interface.yaml deleted file mode 100644 index 4aec297c7..000000000 --- a/jwql/bokeh_templating/example/example_interface.yaml +++ /dev/null @@ -1,26 +0,0 @@ -- !Slider: &a_slider # a slider for the a value - ref: "a_slider" - title: "A" - value: 4 - range: !!python/tuple [1, 20, 0.1] - on_change: ['value', !self.controller ] -- !Slider: &b_slider # a slider for the b value - ref: "b_slider" - title: "B" - value: 2 - range: !!python/tuple [1, 20, 0.1] - on_change: ['value', !self.controller ] -- !ColumnDataSource: &figure_source # the ColumnDataSource for the figure - ref: "figure_source" - data: - x: !self.x - y: !self.y -- !Figure: &the_figure # the Figure itself, which includes a single line element. - ref: 'the_figure' - elements: - - {'kind': 'line', 'source': *figure_source, 'line_color': 'orange', 'line_width': 2} -- !Document: # the Bokeh document layout: a single column with the figure and two sliders - - !column: - - *the_figure # note the use of YAML anchors to add the Bokeh objects to the Document layout directly. - - *a_slider - - *b_slider \ No newline at end of file diff --git a/jwql/bokeh_templating/example/main.py b/jwql/bokeh_templating/example/main.py deleted file mode 100644 index bd91d4e87..000000000 --- a/jwql/bokeh_templating/example/main.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -This is a minimal example demonstrating how to create a Bokeh app using -the ``bokeh-templating`` package and the associated YAML template files. - -Author -------- - - - Graham Kanarek - -Dependencies ------------- - - The user must have PyYAML, Bokeh, and the ``bokeh-templating`` - packages installed. -""" - -import os -import numpy as np - -from jwql.bokeh_templating import BokehTemplate - -file_dir = os.path.dirname(os.path.realpath(__file__)) - - -class TestBokehApp(BokehTemplate): - """This is a minimal ``BokehTemplate`` app.""" - - _embed = True - - def pre_init(self): - """Before creating the Bokeh interface (by parsing the interface - file), we must initialize our ``a`` and ``b`` variables, and set - the path to the interface file. - """ - - self.a, self.b = 4, 2 - - self.format_string = None - self.interface_file = os.path.join(file_dir, "example_interface.yaml") - - # No post-initialization tasks are required. - post_init = None - - @property - def x(self): - """The x-value of the Lissajous curves.""" - return 4. * np.sin(self.a * np.linspace(0, 2 * np.pi, 500)) - - @property - def y(self): - """The y-value of the Lissajous curves.""" - return 3. * np.sin(self.b * np.linspace(0, 2 * np.pi, 500)) - - def controller(self, attr, old, new): - """This is the controller function which is used to update the - curves when the sliders are adjusted. Note the use of the - ``self.refs`` dictionary for accessing the Bokeh object - attributes.""" - self.a = self.refs["a_slider"].value - self.b = self.refs["b_slider"].value - - self.refs["figure_source"].data = {'x': self.x, 'y': self.y} - - -TestBokehApp() diff --git a/jwql/bokeh_templating/factory.py b/jwql/bokeh_templating/factory.py deleted file mode 100644 index 7c77bfa5d..000000000 --- a/jwql/bokeh_templating/factory.py +++ /dev/null @@ -1,270 +0,0 @@ -""" -This module defines YAML constructors and factory functions which are -used to create Bokeh objects parsed from YAML template files. - -The ``mapping_factory`` and ``sequence_factory`` functions are used to -create a constructor function for each of the mappings (i.e., classes) -and sequences (i.e., functions) included in the keyword map. The -``document_constructor`` and ``figure_constructor`` functions are -stand-alone constructors for the ``!Document`` and ``!Figure`` tag, -respectively. - -Author -------- - - - Graham Kanarek - -Use ---- - - The functions in this file are not intended to be called by the user - directly; users should subclass the ``BokehTemplate`` class found in - ``template.py`` instead. However, they can be used as a model for - creating new constructors for user-defined tags, which can then be - registered using the ``BokehTemplate.register_mapping_constructor`` - and ``BokehTemplate.register_sequence_constructor`` classmethods. - -Dependencies ------------- - - The user must have Bokeh installed. -""" - -from bokeh.io import curdoc - -from .keyword_map import bokeh_mappings as mappings, bokeh_sequences as sequences - -# Figures get their own constructor so we remove references to Figures from -# the keyword maps. -Figure = mappings.pop("figure") - - -def mapping_factory(tool, element_type): - """ - Create a mapping constructor for the given tool, used to parse the - given element tag. - - Parameters - ---------- - tool : BokehTemplate instance - The web app class instance to which the constructor will be - attached. This will become ``self`` when the factory is a method, - and is used to both store the Bokeh objects in the - ``BokehTemplate.refs`` dictionary, and allow for app-wide - formatting choices via ``BokehTemplate.format_string``. - - element_type : str - The Bokeh element name for which a constructor is desired. For - example, an ``element_type`` of ``'Slider'`` will create a - constructor for a Bokeh ``Slider`` widget, designated by the - ``!Slider`` tag in the YAML template file. - - Notes - ----- - See the ``BokehTemplate`` class implementation in ``template.py`` - for an example of how this function is used. - """ - - def mapping_constructor(loader, node): # docstring added below - fmt = tool.formats.get(element_type, {}) - value = loader.construct_mapping(node, deep=True) - ref = value.pop("ref", "") - callback = value.pop("on_change", []) - selection_callback = value.pop("selection_on_change", []) - onclick = value.pop("on_click", None) - fmt.update(value) - # convert "range" YAML keyword of slider into something Bokeh can read - if element_type == "Slider": - fmt["start"], fmt["end"], fmt["step"] = fmt.pop("range", [0, 1, 0.1]) - - # Many of these have hybrid signatures, with both positional and - # keyword arguments, so we need to convert an "args" keyword into - # positional arguments - arg = fmt.pop("arg", None) - if arg is not None: - obj = mappings[element_type](*arg, **fmt) - else: - obj = mappings[element_type](**fmt) - - # Store the object in the tool's "refs" dictionary - if ref: - tool.refs[ref] = obj - - # Handle callbacks and on_clicks - if callback: - obj.on_change(*callback) - if onclick: - obj.on_click(onclick) - if selection_callback: - obj.selected.on_change(*selection_callback) - - yield obj - - mapping_constructor.__name__ = element_type.lower() + '_' + mapping_constructor.__name__ - mapping_constructor.__doc__ = """ - A YAML constructor for the ``{et}`` Bokeh object. This will create a - ``{et}`` object wherever the ``!{et}`` tag appears in the YAML template - file. If a ``ref`` tag is specified, the object will then be stored in - the ``BokehTemplate.refs`` dictionary. - - This constructor is used for mappings -- i.e., classes or functions - which primarily have keyword arguments in their signatures. If - positional arguments appear, they can be included in the YAML file - with the `args` keyword. - """.format(et=element_type) - - return mapping_constructor - - -def sequence_factory(tool, element_type): - """ Create a sequence constructor for the given tool, used to parse - the given element tag. - - Parameters - ---------- - tool : BokehTemplate instance - The web app class instance to which the constructor will be - attached. This will become ``self`` when the factory is a method, - and is used to both store the Bokeh objects in the - ``BokehTemplate.refs`` dictionary, and allow for app-wide - formatting choices via ``BokehTemplate.format_string``. - - element_type : str - The Bokeh element name for which a constructor is desired. For - example, an ``element_type`` of ``'Slider'`` will create a - constructor for a Bokeh ``Slider`` widget, designated by the - ``!Slider`` tag in the YAML template file. - - Notes - ----- - See the ``BokehTemplate`` class implementation in ``template.py`` - for an example of how this function is used. - """ - - def sequence_constructor(loader, node): - fmt = tool.formats.get(element_type, {}) - value = loader.construct_sequence(node, deep=True) - obj = sequences[element_type](*value, **fmt) - yield obj - - sequence_constructor.__name__ = element_type.lower() + '_' + sequence_constructor.__name__ - sequence_constructor.__doc__ = """ - A YAML constructor for the ``{et}`` Bokeh object. This will create a - ``{et}`` object wherever the ``!{et}`` tag appears in the YAML template - file. If a ``ref`` tag is specified, the object will then be stored in - the ``BokehTemplate.refs`` dictionary. - - This constructor is used for sequences -- i.e., classes or functions - which have only positional arguments in their signatures (which for - Bokeh is only functions, no classes). - """.format(et=element_type) - - return sequence_constructor - - -# These constructors need more specialized treatment - -def document_constructor(tool, loader, node): - """ A YAML constructor for the Bokeh document, which is grabbed via - the Bokeh ``curdoc()`` function. When laying out a Bokeh document - with a YAML template, the ``!Document`` tag should be used as the - top-level tag in the layout. - """ - - layout = loader.construct_sequence(node, deep=True) - for element in layout: - curdoc().add_root(element) - tool.document = curdoc() - yield tool.document - - -def figure_constructor(tool, loader, node): - """ A YAML constructor for Bokeh Figure objects, which are - complicated enough to require their own (non-factory) constructor. - Each ``!Figure`` tag in the YAML template file will be turned into a - ``Figure`` object via this constructor (once it's been registered by - the ``BokehTemplate`` class). - """ - - fig = loader.construct_mapping(node, deep=True) - fmt = tool.formats.get('Figure', {}) - - elements = fig.pop('elements', []) - cmds = [] - ref = fig.pop("ref", "") - callback = fig.pop("on_change", []) - axis = tool.formats.get("Axis", {}) - axis.update(fig.pop("axis", {})) - - for key in fig: - val = fig[key] - if key in ['text', 'add_tools', 'js_on_event']: - cmds.append((key, val)) - else: - fmt[key] = val - - figure = Figure(**fmt) - - for key, cmd in cmds: - if key == 'add_tools': - figure.add_tools(*cmd) - elif key == 'text': - figure.text(*cmd.pop('loc'), **cmd) - elif key == 'js_on_event': - for event in cmd: - figure.js_on_event(*event) - - for element in elements: - key = element.pop('kind', 'diamond') - shape = {'line': ('Line', figure.line), - 'circle': ('Circle', figure.circle), - 'step': ('Step', figure.step), - 'diamond': ('Diamond', figure.diamond), - 'triangle': ('Triangle', figure.triangle), - 'square': ('Square', figure.square), - 'asterisk': ('Asterisk', figure.asterisk), - 'x': ('XGlyph', figure.x), - 'vbar': ('VBar', figure.vbar)} - if key in shape: - fmt_key, glyph = shape[key] - shape_fmt = tool.formats.get(fmt_key, {}) - shape_fmt.update(element) - x = shape_fmt.pop('x', 'x') - y = shape_fmt.pop('y', 'y') - glyph(x, y, **shape_fmt) - elif key == 'rect': - rect_fmt = tool.formats.get('Rect', {}) - rect_fmt.update(element) - figure.rect('rx', 'ry', 'rw', 'rh', **rect_fmt) - elif key == 'quad': - quad_fmt = tool.formats.get('Quad', {}) - quad_fmt.update(element) - figure.quad(**quad_fmt) - elif key == 'image': - image_fmt = tool.formats.get('Image', {}) - image_fmt.update(element) - arg = image_fmt.pop("image", None) - figure.image(arg, **image_fmt) - elif key == 'image_rgba': - image_fmt = tool.formats.get('ImageRGBA', {}) - image_fmt.update(element) - arg = image_fmt.pop("image", None) - figure.image_rgba(arg, **image_fmt) - elif key == 'multi_line': - multi_fmt = tool.formats.get('MultiLine', {}) - multi_fmt.update(element) - figure.multi_line(**multi_fmt) - elif key == 'layout': - obj = element.pop('obj', None) - figure.add_layout(obj, **element) - - for attr, val in axis.items(): - # change axis attributes, hopefully - setattr(figure.axis, attr, val) - - if ref: - tool.refs[ref] = figure - if callback: - figure.on_change(*callback) - - yield figure diff --git a/jwql/bokeh_templating/keyword_map.py b/jwql/bokeh_templating/keyword_map.py deleted file mode 100644 index 8f1be71ce..000000000 --- a/jwql/bokeh_templating/keyword_map.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -A script to scrape the Bokeh package and collate dictionaries of -classes and functions. - -The ``_parse_module`` function iterates over a module, and uses the -``inspect`` package to sort everything in the module's namespace (as -identified by ``inspect.getmembers``) into a dictionary of mappings -(requiring primarily keyword arguments) and sequences (requiring -primarily positional arguments). - -Note that thhe files ``surface3d.py`` and ``surface3d.ts``, used to -create 3D surface plots, were downloaded from the Bokeh ``surface3d`` -example. - -Author -------- - - - Graham Kanarek - -Use ---- - - To access the Bokeh elements, the user should import as follows: - - :: - - from jwql.bokeh_templating.keyword_map import bokeh_sequences, bokeh_mappings - -Dependencies ------------- - - The user must have Bokeh installed. -""" - -from bokeh import layouts, models, palettes, plotting, transform -from inspect import getmembers, isclass, isfunction - -bokeh_sequences = {} -bokeh_mappings = {} # Note that abstract base classes *are* included - - -def _parse_module(module): - """ - Sort the members of a module into dictionaries of functions (sequences) - and classes (mappings). - """ - - def accessible_member(name, member): - return (not name.startswith("_")) and (module.__name__ in member.__module__) - - seqs = {nm: mem for nm, mem in getmembers(module, isfunction) if accessible_member(nm, mem)} - maps = {nm: mem for nm, mem in getmembers(module, isclass) if accessible_member(nm, mem)} - - # these need to be mappings - if 'gridplot' in seqs: - maps['gridplot'] = seqs.pop('gridplot') - if 'Donut' in seqs: - maps['Donut'] = seqs.pop('Donut') - return (seqs, maps) - - -for module in [models, plotting, layouts, palettes, transform]: - seqs, maps = _parse_module(module) - bokeh_sequences.update(seqs) - bokeh_mappings.update(maps) diff --git a/jwql/bokeh_templating/template.py b/jwql/bokeh_templating/template.py deleted file mode 100644 index 4f854fd30..000000000 --- a/jwql/bokeh_templating/template.py +++ /dev/null @@ -1,302 +0,0 @@ -#! /usr/bin/env python - -"""This module defines the ``BokehTemplate`` class, which can be subclassed -to create a Bokeh web app with a YAML templating file. - - -Author -------- - - - Graham Kanarek - -Use ---- - - The user should subclass the ``BokehTemplate`` class to create an - app, as demonstrated in ``example.py``. - - (A full tutorial on developing Bokeh apps with ``BokehTemplate`` is - forthcoming.) - - -Dependencies ------------- - - The user must have Bokeh and PyYAML installed. -""" - -import yaml -import os -from . import factory -from bokeh.embed import components -from inspect import signature - - -class BokehTemplateParserError(Exception): - """ - A custom error for problems with parsing the interface files. - """ - - -class BokehTemplateEmbedError(Exception): - """ - A custom error for problems with embedding components. - """ - - -class BokehTemplate(object): - """The base class for creating Bokeh web apps using a YAML - templating framework. - - Attributes - ---------- - _embed : bool - A flag to indicate whether or not the individual widgets will be - embedded in a webpage. If ``False``, the YAML interface file - must include a !Document tag. Defaults to ``False``. - document: obje - The Bokeh Dpcument object (if any), equivalent to the result of - calling ``curdoc()``. - formats: dict - A dictionary of widget formating specifications, parsed from - ``format_string`` (if one exists). - format_string: str - A string of YAML formatting specifications, using the same - syntax as the interface file, for Bokeh widgets. Note that - formatting choices present in individual widget instances in the - interface file override these. - interface_file: str - The path to the YAML interface file. - refs : dict - A dictionary of Bokeh objects which are given ``ref`` strings in - the interface file. Use this to store and interact with the - Bokeh data sources and widgets in callback methods. - - Methods - ------- - _mapping_factory() - Used by the interface parser to construct Bokeh widgets - _sequence_factory() - Used by the interface parser to construct Bokeh widgets - _figure_constructor() - Used by the interface parser to construct Bokeh widgets - _document_constructor() - Used by the interface parser to construct Bokeh widgets - """ - - # Each of these functions has a ``tool`` argument, which becomes ``self`` - # when they are stored as methods. This way, the YAML constructors can - # store the Bokeh objects in the ``tool.ref`` dictionary, and can access - # the formatting string, if any. See ``factory.py`` for more details. - _mapping_factory = factory.mapping_factory - _sequence_factory = factory.sequence_factory - _figure_constructor = factory.figure_constructor - _document_constructor = factory.document_constructor - - _embed = False - document = None - format_string = "" - formats = {} - interface_file = "" - refs = {} - - def _self_constructor(self, loader, tag_suffix, node): - """ - A multi_constructor for `!self` tag in the interface file. - """ - yield eval("self" + tag_suffix, globals(), locals()) - - def _register_default_constructors(self): - """ - Register all the default constructors with ``yaml.add_constructor``. - """ - for m in factory.mappings: - yaml.add_constructor("!" + m + ":", self._mapping_factory(m)) - - for s in factory.sequences: - yaml.add_constructor("!" + s + ":", self._sequence_factory(s)) - - yaml.add_constructor("!Figure:", self._figure_constructor) - yaml.add_constructor("!Document:", self._document_constructor) - yaml.add_multi_constructor(u"!self", self._self_constructor) - - def pre_init(self, **kwargs): - """ - This should be implemented by the app subclass, to do any pre- - initialization steps that it requires (setting defaults, loading - data, etc). - - If this is not required, subclass should set `pre_init = None` - in the class definition. - """ - - raise NotImplementedError - - def post_init(self): - """ - This should be implemented by the app subclass, to do any post- - initialization steps that the tool requires. - - If this is not required, subclass should set `post_init = None` - in the class definition. - """ - - raise NotImplementedError - - def __init__(self, **kwargs): - """ - Keyword arguments are passed to self.pre_init(). - """ - self._register_default_constructors() - - # Allow for pre-initialization code from the subclass. - if self.pre_init is not None: - if signature(self.pre_init).parameters: - # If we try to call pre_init with keyword parameters when none - # are included, it will throw an error - # thus, we use inspect.signature - self.pre_init(**kwargs) - else: - self.pre_init() - - # Initialize attributes for YAML parsing - self.formats = {} - self.refs = {} - - # Parse formatting string, if any, and the interface YAML file - self.include_formatting() - self.parse_interface() - - # Allow for post-init code from the subclass. - if self.post_init is not None: - self.post_init() - - def include_formatting(self): - """ - This should simply be a dictionary of formatting keywords at the end. - """ - if not self.format_string: - return - - self.formats = yaml.load(self.format_string, Loader=yaml.SafeLoader) - - def parse_interface(self): - """ - This is the workhorse YAML parser, which creates the interface based - on the layout file. - - `interface_file` is the path to the interface .yaml file to be parsed. - """ - - if not self.interface_file: - raise NotImplementedError("Interface file required.") - - # Read the interface file into a string - filepath = os.path.abspath(os.path.expanduser(self.interface_file)) - if not os.path.exists(filepath): - raise BokehTemplateParserError("Interface file path does not exist.") - with open(filepath) as f: - interface = f.read() - - # If necessary, verify that the interface string contains !Document tag - if not self._embed and '!Document' not in interface: - raise BokehTemplateParserError("Interface file must contain a Document tag") - - # Now, since we've registered all the constructors, we can parse the - # entire string with yaml. We don't need to assign the result to a - # variable, since the constructors store everything in self.refs - # (and self.document, for the document). - try: - self.full_stream = list(yaml.load_all(interface, Loader=yaml.FullLoader)) - except yaml.YAMLError as exc: - raise BokehTemplateParserError(exc) - - def embed(self, ref): - """A wrapper for ``bokeh.embed.components`` to return embeddable - code for the given widget reference.""" - element = self.refs.get(ref, None) - if element is None: - raise BokehTemplateEmbedError("Undefined component reference") - return components(element) - - @staticmethod - def parse_string(yaml_string): - """ A utility functon to parse any YAML string using the - registered constructors. (Usually used for debugging.)""" - return list(yaml.load_all(yaml_string)) - - @classmethod - def register_sequence_constructor(cls, tag, parse_func): - """ - Register a new sequence constructor with YAML. - - Parameters - ---------- - tag : str - The YAML tag string to be used for the constructor. - parse_func: object - The parsing function to be registered with YAML. This - function should accept a multi-line string, and return a - python object. - - Notes - ----- - This classmethod should be used to register a new constructor - *before* creating & instantiating a subclass of BokehTemplate : - - :: - - from bokeh_template import BokehTemplate - BokehTemplate.register_sequence_constructor("my_tag", my_parser) - - class myTool(BokehTemplate): - pass - - myTool() - """ - if tag.startswith("!"): - tag = tag[1:] - - def user_constructor(loader, node): - value = loader.construct_sequence(node, deep=True) - yield parse_func(value) - user_constructor.__name__ = tag.lower() + "_constructor" - yaml.add_constructor("!" + tag, user_constructor) - - @classmethod - def register_mapping_constructor(cls, tag, parse_func): - """ - Register a new mapping constructor with YAML. - - Parameters - ---------- - tag : str - The YAML tag string to be used for the constructor. - parse_func: object - The parsing function to be registered with YAML. This - function should accept a multi-line string, and return a - python object. - - Notes - ----- - This classmethod should be used to register a new constructor - *before* creating & instantiating a subclass of BokehTemplate : - - :: - - from bokeh_template import BokehTemplate - BokehTemplate.register_mapping_constructor("my_tag", my_parser) - - class myTool(BokehTemplate): - pass - - myTool() - """ - if tag.startswith("!"): - tag = tag[1:] - - def user_constructor(loader, node): - value = loader.construct_mapping(node, deep=True) - yield parse_func(value) - user_constructor.__name__ = tag.lower() + "_constructor" - yaml.add_constructor("!" + tag, user_constructor) diff --git a/jwql/edb/engineering_database.py b/jwql/edb/engineering_database.py index b4812d81b..ad51af223 100644 --- a/jwql/edb/engineering_database.py +++ b/jwql/edb/engineering_database.py @@ -799,7 +799,7 @@ def bokeh_plot_text_data(self, show_plot=False): dates = abscissa[index].astype(np.datetime64) y_values = list(np.ones(len(index), dtype=int) * i) p1.line(dates, y_values, line_width=1, line_color='blue', line_dash='dashed') - p1.circle(dates, y_values, color='blue') + p1.circle(dates, y_values, color='blue', radius=5, radius_dimension='y', radius_units='screen') p1.yaxis.ticker = list(override_dict.keys()) p1.yaxis.major_label_overrides = override_dict diff --git a/jwql/example_config.json b/jwql/example_config.json index 95bc8c8cf..adcf87555 100644 --- a/jwql/example_config.json +++ b/jwql/example_config.json @@ -28,6 +28,7 @@ "PORT" : "" } }, + "django_debug" : false, "jwql_dir" : "", "jwql_version": "", "server_type": "", @@ -46,5 +47,50 @@ "cores" : "", "redis_host": "", "redis_port": "", - "transfer_dir": "" + "transfer_dir": "", + "logging": { + "version": 1, + "disable_existing_loggers": true, + "formatters": { + "simple": { + "format": "%(asctime)s %(levelname)s: %(message)s", + "datefmt": "%m/%d/%Y %H:%M:%S %p" + } + }, + "filters": { + "warnings_and_below": { + "()" : "jwql.utils.logging_functions.filter_maker", + "level": "WARNING" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "simple", + "stream": "ext://sys.stdout", + "filters": ["warnings_and_below"] + }, + "stderr": { + "class": "logging.StreamHandler", + "level": "ERROR", + "formatter": "simple", + "stream": "ext://sys.stderr" + }, + "file": { + "class": "logging.FileHandler", + "formatter": "simple", + "filename": "app.log", + "mode": "a" + } + }, + "root": { + "level": "DEBUG", + "handlers": [ + "stderr", + "stdout", + "file" + ] + } + } } diff --git a/jwql/install-env.sh b/jwql/install-env.sh new file mode 100644 index 000000000..130f65754 --- /dev/null +++ b/jwql/install-env.sh @@ -0,0 +1,70 @@ +branchname=$1 +python_version=$2 + +printf "UPDATING JWQL ENVIRONMENT\n\n" + +# Check operating system to obtain proper substring for environment +if [[ "$OSTYPE" == "darwin"* ]]; then + os_str="macOS_ARM64" + printf "INFORMATION: \n \t MAC OS DETECTED, USING MAC ENVIRONMENT FILE\n" +elif [[ "$OSTYPE" == "linux-gnu"* ]]; then + os_str="Linux_X64" + printf "INFORMATION: \n \t LINUX OS DETECTED, USING LINUX ENVIRONMENT FILE\n" +else + printf "EXCEPTION: \n \t $OSTYPE NOT SUPPORTED, EXITING" + return +fi + +# Check if branch name starts with "v" for our major releases +# Our branch names contain v prior to version number, but version names on git +# do not contain v prior to the number. +if [[ $branchname == v* ]]; then + jwql_version=${branchname:1:${#branchname}} + environment_url=https://github.com/spacetelescope/jwql/releases/download/$jwql_version/ + environment_name=jwql_${jwql_version}_conda_${os_str}_py${python_version} + environment_filename="${environment_name}.yml" +else + printf "EXCEPTION: \n \t RELEASE DOESNT FOLLOW RELEASE VERSIONING NAMING CONVENTION, EXITING" + return +fi + +# Download asset from release and install it. +if curl --head --silent --fail "${environment_url}${environment_filename}" 2> /dev/null; + then + # Reset back to base first before generating environment (incase one is currently activated) + eval "$(conda shell.bash deactivate)" + eval "$(conda shell.bash activate base)" + printf "\n SUCESSFULLY LOCATED ENVIRONMENT FILE ${environment_url}${environment_filename} \n" + curl -L "${environment_url}/${environment_filename}" > jwql-current.yml + $CONDA_EXE env create --name $environment_name --file jwql-current.yml + else + printf "EXCEPTION:\n" + printf "\t ${environment_url}${environment_filename} DOES NOT EXIST, EXITING\n" + printf "\t \nENSURE THAT: \n" + printf "\t https://github.com/spacetelescope/jwql/releases/tag/$branchname \n" + printf "EXISTS AND VERIFY ASSET FOR ${jwql_version}, ${python_version} FOR OS ${os_str}" + return +fi + +# Update symlink +cd ${CONDA_PREFIX}/envs/ + +env_symlink="jwql-current" + +if [[ -L $env_symlink || -e $env_symlink ]]; then + printf "INFORMATION:\n" + printf "\tjwql-current SYMLINK EXISTS, UNLINKING\n" + unlink jwql-current +fi + +printf "INFORMATION:\n\tLINKING NEW ENVIRONMENT\n" +ln -s $environment_name jwql-current + +printf "\tjwql-current HAS BEEN SET TO: ${environment_name}\n" +printf "\tTO SEE CHANGES, EXIT/RESTART SHELL\n" + +# return to original directory +cd - + +# Conda commands change shell prompt, this just returns it to the default +export PS1="\n(base)\h:\W \u\$ " diff --git a/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py b/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py index d3ae2e795..55001117c 100755 --- a/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py +++ b/jwql/instrument_monitors/common_monitors/bad_pixel_monitor.py @@ -95,28 +95,44 @@ from jwst_reffiles.bad_pixel_mask import bad_pixel_mask import numpy as np -from jwql.database.database_interface import engine, session -from jwql.database.database_interface import NIRCamBadPixelQueryHistory, NIRCamBadPixelStats -from jwql.database.database_interface import NIRISSBadPixelQueryHistory, NIRISSBadPixelStats -from jwql.database.database_interface import MIRIBadPixelQueryHistory, MIRIBadPixelStats -from jwql.database.database_interface import NIRSpecBadPixelQueryHistory, NIRSpecBadPixelStats -from jwql.database.database_interface import FGSBadPixelQueryHistory, FGSBadPixelStats from jwql.instrument_monitors import pipeline_tools from jwql.shared_tasks.shared_tasks import only_one, run_pipeline, run_parallel_pipeline from jwql.utils import crds_tools, instrument_properties, monitor_utils -from jwql.utils.constants import DARKS_BAD_PIXEL_TYPES, DARK_EXP_TYPES, FLATS_BAD_PIXEL_TYPES, FLAT_EXP_TYPES -from jwql.utils.constants import JWST_INSTRUMENT_NAMES, JWST_INSTRUMENT_NAMES_MIXEDCASE, ON_GITHUB_ACTIONS -from jwql.utils.constants import ON_READTHEDOCS +from jwql.utils.constants import ( + DARKS_BAD_PIXEL_TYPES, + DARK_EXP_TYPES, + FLATS_BAD_PIXEL_TYPES, + FLAT_EXP_TYPES, +) +from jwql.utils.constants import JWST_INSTRUMENT_NAMES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.logging_functions import log_info, log_fail from jwql.utils.mast_utils import mast_query from jwql.utils.permissions import set_permissions -from jwql.utils.utils import copy_files, create_png_from_fits, ensure_dir_exists, get_config, filesystem_path +from jwql.utils.utils import ( + copy_files, + create_png_from_fits, + ensure_dir_exists, + get_config, + filesystem_path, +) if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: - from jwql.website.apps.jwql.monitor_pages.monitor_bad_pixel_bokeh import BadPixelPlots + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) -THRESHOLDS_FILE = os.path.join(os.path.split(__file__)[0], 'bad_pixel_file_thresholds.txt') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + from jwql.website.apps.jwql.monitor_models.bad_pixel import * + from jwql.website.apps.jwql.monitor_pages.monitor_bad_pixel_bokeh import ( + BadPixelPlots, + ) + +THRESHOLDS_FILE = os.path.join( + os.path.split(__file__)[0], "bad_pixel_file_thresholds.txt" +) def bad_map_to_list(badpix_image, mnemonic): @@ -158,7 +174,9 @@ def bad_map_to_list(badpix_image, mnemonic): return x_location, y_location -def check_for_sufficient_files(uncal_files, instrument_name, aperture_name, threshold_value, file_type): +def check_for_sufficient_files( + uncal_files, instrument_name, aperture_name, threshold_value, file_type +): """From a list of files of a given type (flats or darks), check to see if there are enough files to call the bad pixel monitor. The number of files must be equal to or greater than the provided @@ -195,27 +213,40 @@ def check_for_sufficient_files(uncal_files, instrument_name, aperture_name, thre Whether or not the bad pixel monitor will be called on these files. """ - if file_type not in ['darks', 'flats']: + if file_type not in ["darks", "flats"]: raise ValueError('Input file_type must be "darks" or "flats"') - file_type_singular = file_type.strip('s') + file_type_singular = file_type.strip("s") if len(uncal_files) > 0: uncal_files = sorted(list(set(uncal_files))) if len(uncal_files) < threshold_value: - logging.info(('\tBad pixels from {} skipped. {} new {} files for {},' - '{} found. {} new files are required to run bad pixels' - 'from {} portion of monitor.') - .format(file_type, len(uncal_files), file_type_singular, - instrument_name, aperture_name, threshold_value, file_type)) + logging.info( + ( + "\tBad pixels from {} skipped. {} new {} files for {}," + "{} found. {} new files are required to run bad pixels" + "from {} portion of monitor." + ).format( + file_type, + len(uncal_files), + file_type_singular, + instrument_name, + aperture_name, + threshold_value, + file_type, + ) + ) uncal_files = None run_data = False else: - logging.info('\tSufficient new files found for {}, {} to run the' - 'bad pixel from {} portion of the monitor.' - .format(instrument_name, aperture_name, file_type)) - logging.info('\tNew entries: {}'.format(len(uncal_files))) + logging.info( + "\tSufficient new files found for {}, {} to run the" + "bad pixel from {} portion of the monitor.".format( + instrument_name, aperture_name, file_type + ) + ) + logging.info("\tNew entries: {}".format(len(uncal_files))) run_data = True return uncal_files, run_data @@ -273,18 +304,18 @@ def locate_rate_files(uncal_files): rate_files = [] rate_files_to_copy = [] for uncal in uncal_files: - base = uncal.split('_uncal.fits')[0] - constructed_ratefile = '{}_rateints.fits'.format(base) + base = uncal.split("_uncal.fits")[0] + constructed_ratefile = "{}_rateints.fits".format(base) try: rate_files.append(filesystem_path(constructed_ratefile)) rate_files_to_copy.append(filesystem_path(constructed_ratefile)) except FileNotFoundError: - constructed_ratefile = '{}_rate.fits'.format(base) + constructed_ratefile = "{}_rate.fits".format(base) try: rate_files.append(filesystem_path(constructed_ratefile)) rate_files_to_copy.append(filesystem_path(constructed_ratefile)) except FileNotFoundError: - rate_files.append('None') + rate_files.append("None") return rate_files, rate_files_to_copy @@ -304,20 +335,23 @@ def locate_uncal_files(query_result): """ uncal_files = [] for entry in query_result: - filename = entry['filename'] - suffix = filename.split('_')[-1].replace('.fits', '') - uncal_file = filename.replace(suffix, 'uncal') + filename = entry["filename"] + suffix = filename.split("_")[-1].replace(".fits", "") + uncal_file = filename.replace(suffix, "uncal") # Look for uncal file try: uncal_files.append(filesystem_path(uncal_file)) except FileNotFoundError: - logging.warning('\t\tUnable to locate {} in filesystem. Not including in processing.' - .format(uncal_file)) + logging.warning( + "\t\tUnable to locate {} in filesystem. Not including in processing.".format( + uncal_file + ) + ) return uncal_files -class BadPixels(): +class BadPixels: """Class for executing the bad pixel monitor. This class will search for new (since the previous instance of the @@ -392,7 +426,16 @@ class BadPixels(): def __init__(self): """Initialize an instance of the ``BadPixels`` class.""" - def add_bad_pix(self, coordinates, pixel_type, files, obs_start_time, obs_mid_time, obs_end_time, baseline_file): + def add_bad_pix( + self, + coordinates, + pixel_type, + files, + obs_start_time, + obs_mid_time, + obs_end_time, + baseline_file, + ): """Add a set of bad pixels to the bad pixel database table Parameters @@ -422,21 +465,25 @@ def add_bad_pix(self, coordinates, pixel_type, files, obs_start_time, obs_mid_ti pixel population was compared """ - logging.info('Adding {} {} pixels to database.'.format(len(coordinates[0]), pixel_type)) + logging.info( + "Adding {} {} pixels to database.".format(len(coordinates[0]), pixel_type) + ) source_files = [os.path.basename(item) for item in files] - entry = {'detector': self.detector, - 'x_coord': coordinates[0], - 'y_coord': coordinates[1], - 'type': pixel_type, - 'source_files': source_files, - 'obs_start_time': obs_start_time, - 'obs_mid_time': obs_mid_time, - 'obs_end_time': obs_end_time, - 'baseline_file': baseline_file, - 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.pixel_table.__table__.insert(), entry) + entry = { + "detector": self.detector, + "x_coord": coordinates[0], + "y_coord": coordinates[1], + "type": pixel_type, + "source_files": source_files, + "obs_start_time": obs_start_time, + "obs_mid_time": obs_mid_time, + "obs_end_time": obs_end_time, + "baseline_file": baseline_file, + "entry_date": datetime.datetime.now(datetime.timezone.utc), + } + entry = self.pixel_table(**entry) + entry.save() def filter_query_results(self, results, datatype): """Filter MAST query results. For input flats, keep only those @@ -460,29 +507,33 @@ def filter_query_results(self, results, datatype): # Need to filter all instruments' results by filter. # Choose filter with the most files # Only for flats - if ((datatype == 'flat') and (self.instrument != 'fgs')): - if self.instrument in ['nircam', 'niriss']: - filter_on = 'pupil' - elif self.instrument == 'nirspec': - filter_on = 'grating' - elif self.instrument == 'miri': - filter_on = 'filter' - - filter_list = ['{}:{}'.format(entry['filter'], entry[filter_on]) for entry in results] + if (datatype == "flat") and (self.instrument != "fgs"): + if self.instrument in ["nircam", "niriss"]: + filter_on = "pupil" + elif self.instrument == "nirspec": + filter_on = "grating" + elif self.instrument == "miri": + filter_on = "filter" + + filter_list = [ + "{}:{}".format(entry["filter"], entry[filter_on]) for entry in results + ] filter_set = list(set(filter_list)) # Find the filter with the largest number of entries maxnum = 0 - maxfilt = '' + maxfilt = "" for filt in filter_set: if filter_list.count(filt) > maxnum: maxnum = filter_list.count(filt) maxfilt = filt - filter_name, other_name = maxfilt.split(':') + filter_name, other_name = maxfilt.split(":") filtered = [] for entry in results: - if ((str(entry['filter']) == filter_name) and (str(entry[filter_on]) == other_name)): + if (str(entry["filter"]) == filter_name) and ( + str(entry[filter_on]) == other_name + ): filtered.append(entry) results = deepcopy(filtered) @@ -490,20 +541,20 @@ def filter_query_results(self, results, datatype): # All instruments: need to filter by readout pattern. # Any pattern name not containing "IRS2" is ok # choose readout pattern with the most entries - readpatt_list = [entry['readpatt'] for entry in results] + readpatt_list = [entry["readpatt"] for entry in results] readpatt_set = list(set(readpatt_list)) maxnum = 0 - maxpatt = '' + maxpatt = "" for patt in readpatt_set: - if ((readpatt_list.count(patt) > maxnum) and ('IRS2' not in patt)): + if (readpatt_list.count(patt) > maxnum) and ("IRS2" not in patt): maxnum = readpatt_list.count(patt) maxpatt = patt # Find the readpattern with the largest number of entries readpatt_filtered = [] for entry in results: - if entry['readpatt'] == maxpatt: + if entry["readpatt"] == maxpatt: readpatt_filtered.append(entry) return readpatt_filtered @@ -520,8 +571,8 @@ def get_metadata(self, filename): header = fits.getheader(filename) try: - self.detector = header['DETECTOR'] - self.nints = header['NINTS'] + self.detector = header["DETECTOR"] + self.nints = header["NINTS"] except KeyError as e: logging.error(e) @@ -535,25 +586,27 @@ def get_possible_apertures(self): possible_aperture : list List of acceptible apertures for self.instrument """ - if self.instrument == 'nircam': + if self.instrument == "nircam": possible_apertures = [] for i in range(1, 6): - possible_apertures.append('NRCA{}_FULL'.format(i)) - possible_apertures.append('NRCB{}_FULL'.format(i)) - if self.instrument == 'niriss': - possible_apertures = ['NIS_CEN'] - if self.instrument == 'miri': + possible_apertures.append("NRCA{}_FULL".format(i)) + possible_apertures.append("NRCB{}_FULL".format(i)) + if self.instrument == "niriss": + possible_apertures = ["NIS_CEN"] + if self.instrument == "miri": # Since MIRI is organized a little bit differently than the # other instruments, you can't use aperture names to uniquely # identify the full frame darks/flats from a given detector. # Instead you must use detector names. - possible_apertures = [('MIRIMAGE', 'MIRIM_FULL'), - ('MIRIFULONG', 'MIRIM_FULL'), - ('MIRIFUSHORT', 'MIRIM_FULL')] - if self.instrument == 'fgs': - possible_apertures = ['FGS1_FULL', 'FGS2_FULL'] - if self.instrument == 'nirspec': - possible_apertures = ['NRS1_FULL', 'NRS2_FULL'] + possible_apertures = [ + ("MIRIMAGE", "MIRIM_FULL"), + ("MIRIFULONG", "MIRIM_FULL"), + ("MIRIFUSHORT", "MIRIM_FULL"), + ] + if self.instrument == "fgs": + possible_apertures = ["FGS1_FULL", "FGS2_FULL"] + if self.instrument == "nirspec": + possible_apertures = ["NRS1_FULL", "NRS2_FULL"] return possible_apertures def exclude_existing_badpix(self, badpix, pixel_type): @@ -580,17 +633,16 @@ def exclude_existing_badpix(self, badpix, pixel_type): List of y coordinates of new bad pixels """ - if pixel_type not in ['hot', 'dead', 'noisy']: - raise ValueError('Unrecognized bad pixel type: {}'.format(pixel_type)) + if pixel_type not in ["hot", "dead", "noisy"]: + raise ValueError("Unrecognized bad pixel type: {}".format(pixel_type)) - db_entries = session.query(self.pixel_table) \ - .filter(self.pixel_table.type == pixel_type) \ - .filter(self.pixel_table.detector == self.detector) \ - .all() + filters = {"type__iexact": pixel_type, "detector__iexact": self.detector} + records = self.pixel_table.objects.filter(**filters).all() already_found = [] - if len(db_entries) != 0: - for _row in db_entries: + + if len(records) != 0: + for _row in records: x_coords = _row.x_coord y_coords = _row.y_coord for x, y in zip(x_coords, y_coords): @@ -606,8 +658,6 @@ def exclude_existing_badpix(self, badpix, pixel_type): new_pixels_x.append(x) new_pixels_y.append(y) - session.close() - return (new_pixels_x, new_pixels_y) def identify_tables(self): @@ -615,10 +665,12 @@ def identify_tables(self): monitor """ mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] - self.query_table = eval('{}BadPixelQueryHistory'.format(mixed_case_name)) - self.pixel_table = eval('{}BadPixelStats'.format(mixed_case_name)) + self.query_table = eval(f"{mixed_case_name}BadPixelQueryHistory") + self.pixel_table = eval(f"{mixed_case_name}BadPixelStats") - def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_copy, obs_type): + def map_uncal_and_rate_file_lists( + self, uncal_files, rate_files, rate_files_to_copy, obs_type + ): """Copy uncal and rate files from the filesystem to the working directory. Any requested files that are not in the filesystem are noted and skipped. Return the file lists with skipped files @@ -653,14 +705,18 @@ def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_c the rate file failed) """ # Copy files from filesystem - uncal_copied_files, uncal_not_copied = copy_files(uncal_files, self.working_data_dir) - rate_copied_files, rate_not_copied = copy_files(rate_files_to_copy, self.working_data_dir) + uncal_copied_files, uncal_not_copied = copy_files( + uncal_files, self.working_data_dir + ) + rate_copied_files, rate_not_copied = copy_files( + rate_files_to_copy, self.working_data_dir + ) # Set any rate files that failed to copy to None so # that we can regenerate them if len(rate_not_copied) > 0: for badfile in rate_not_copied: - rate_files[rate_files.index(badfile)] = 'None' + rate_files[rate_files.index(badfile)] = "None" # Any uncal files that failed to copy must be removed # entirely from the uncal and rate lists @@ -670,20 +726,28 @@ def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_c del uncal_files[bad_index] del rate_files[bad_index] - logging.info('\tNew {} observations: '.format(obs_type)) - logging.info('\tData dir: {}'.format(self.working_data_dir)) - logging.info('\tCopied to data dir: {}'.format(uncal_copied_files)) - logging.info('\tNot copied (failed, or missing from filesystem): {}'.format(uncal_not_copied)) + logging.info("\tNew {} observations: ".format(obs_type)) + logging.info("\tData dir: {}".format(self.working_data_dir)) + logging.info("\tCopied to data dir: {}".format(uncal_copied_files)) + logging.info( + "\tNot copied (failed, or missing from filesystem): {}".format( + uncal_not_copied + ) + ) # After all this, the lists should be the same length # and have a 1-to-1 correspondence if len(uncal_files) != len(rate_files): - print('Lists of {} uncal and rate files have different lengths!!'.format(obs_type)) + print( + "Lists of {} uncal and rate files have different lengths!!".format( + obs_type + ) + ) raise ValueError return uncal_files, rate_files - def most_recent_search(self, file_type='dark'): + def most_recent_search(self, file_type="dark"): """Query the query history database and return the information on the most recent query for the given ``aperture_name`` where the dark monitor was executed. @@ -700,31 +764,34 @@ def most_recent_search(self, file_type='dark'): Date (in MJD) of the ending range of the previous MAST query where the dark monitor was run. """ - if file_type.lower() == 'dark': - run_field = self.query_table.run_bpix_from_darks - elif file_type.lower() == 'flat': - run_field = self.query_table.run_bpix_from_flats - - query = session.query(self.query_table).filter(self.query_table.aperture == self.aperture). \ - filter(run_field == True) # noqa: E712 (comparison to true) - - dates = np.zeros(0) - if file_type.lower() == 'dark': - for instance in query: - dates = np.append(dates, instance.dark_end_time_mjd) - elif file_type.lower() == 'flat': - for instance in query: - dates = np.append(dates, instance.flat_end_time_mjd) - - query_count = len(dates) - if query_count == 0: + if file_type.lower() == "dark": + run_field = "run_bpix_from_darks" + sort_field = "-dark_end_time_mjd" + elif file_type.lower() == "flat": + run_field = "run_bpix_from_flats" + sort_field = "-flat_end_time_mjd" + + filters = {"aperture__iexact": self.aperture, run_field: True} + + record = self.query_table.objects.filter(**filters).order_by(sort_field).first() + + # Record is django QuerySet object, when empty QuerySet object is returned () + # the result of record.first() is None + if record is None: query_result = 59607.0 # a.k.a. Jan 28, 2022 == First JWST images (MIRI) - logging.info(('\tNo query history for {}. Beginning search date will be set to {}.' - .format(self.aperture, query_result))) + logging.info( + ( + "\tNo query history for {}. Beginning search date will be set to {}.".format( + self.aperture, query_result + ) + ) + ) else: - query_result = np.max(dates) + if file_type.lower() == "dark": + query_result = record.dark_end_time_mjd + elif file_type.lower() == "flat": + query_result = record.flat_end_time_mjd - session.close() return query_result def make_crds_parameter_dict(self): @@ -736,20 +803,28 @@ def make_crds_parameter_dict(self): Dictionary of parameters, in the format expected by CRDS """ parameters = {} - parameters['INSTRUME'] = self.instrument.upper() - parameters['SUBARRAY'] = 'FULL' - parameters['DATE-OBS'] = datetime.date.today().isoformat() - current_date = datetime.datetime.now() - parameters['TIME-OBS'] = current_date.time().isoformat() - parameters['DETECTOR'] = self.detector.upper() - if self.instrument.upper() == 'NIRCAM': - if parameters['DETECTOR'] in ['NRCALONG', 'NRCBLONG']: - parameters['CHANNEL'] = 'LONG' + parameters["INSTRUME"] = self.instrument.upper() + parameters["SUBARRAY"] = "FULL" + parameters["DATE-OBS"] = datetime.date.today().isoformat() + current_date = datetime.datetime.now(datetime.timezone.utc) + parameters["TIME-OBS"] = current_date.time().isoformat() + parameters["DETECTOR"] = self.detector.upper() + if self.instrument.upper() == "NIRCAM": + if parameters["DETECTOR"] in ["NRCALONG", "NRCBLONG"]: + parameters["CHANNEL"] = "LONG" else: - parameters['CHANNEL'] = 'SHORT' + parameters["CHANNEL"] = "SHORT" return parameters - def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_count_threshold, dark_raw_files, dark_slope_files, dark_file_count_threshold): + def process( + self, + illuminated_raw_files, + illuminated_slope_files, + flat_file_count_threshold, + dark_raw_files, + dark_slope_files, + dark_file_count_threshold, + ): """The main method for processing darks. See module docstrings for further details. @@ -785,71 +860,115 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun badpix_types = [] illuminated_obstimes = [] if illuminated_raw_files: - logging.info("Found {} uncalibrated flat fields".format(len(illuminated_raw_files))) + logging.info( + "Found {} uncalibrated flat fields".format(len(illuminated_raw_files)) + ) badpix_types.extend(FLATS_BAD_PIXEL_TYPES) - out_exts = defaultdict(lambda: ['jump', '0_ramp_fit']) + out_exts = defaultdict(lambda: ["jump", "0_ramp_fit"]) in_files = [] - for uncal_file, rate_file in zip(illuminated_raw_files, illuminated_slope_files): - logging.info("\tChecking illuminated raw file {} with rate file {}".format(uncal_file, rate_file)) + for uncal_file, rate_file in zip( + illuminated_raw_files, illuminated_slope_files + ): + logging.info( + "\tChecking illuminated raw file {} with rate file {}".format( + uncal_file, rate_file + ) + ) self.get_metadata(uncal_file) - if rate_file == 'None': - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - logging.info('Calling pipeline for {}'.format(uncal_file)) + if rate_file == "None": + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + logging.info("Calling pipeline for {}".format(uncal_file)) logging.info("Copying raw file to {}".format(self.working_data_dir)) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', '1_ramp_fit'] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "1_ramp_fit"] needs_calibration = False for file_type in out_exts[short_name]: - if not os.path.isfile(local_uncal_file.replace("uncal", file_type)): + if not os.path.isfile( + local_uncal_file.replace("uncal", file_type) + ): needs_calibration = True if needs_calibration: in_files.append(local_uncal_file) else: - logging.info("\t\tCalibrated files already exist for {}".format(short_name)) + logging.info( + "\t\tCalibrated files already exist for {}".format( + short_name + ) + ) else: logging.info("\tRate file found for {}".format(uncal_file)) if os.path.isfile(rate_file): copy_files([rate_file], self.working_data_dir) else: - logging.warning("\tRate file {} doesn't actually exist".format(rate_file)) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - logging.info('Calling pipeline for {}'.format(uncal_file)) - logging.info("Copying raw file to {}".format(self.working_data_dir)) + logging.warning( + "\tRate file {} doesn't actually exist".format(rate_file) + ) + short_name = os.path.basename(uncal_file).replace( + "_uncal.fits", "" + ) + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + logging.info("Calling pipeline for {}".format(uncal_file)) + logging.info( + "Copying raw file to {}".format(self.working_data_dir) + ) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', '1_ramp_fit'] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "1_ramp_fit"] needs_calibration = False for file_type in out_exts[short_name]: - if not os.path.isfile(local_uncal_file.replace("uncal", file_type)): + if not os.path.isfile( + local_uncal_file.replace("uncal", file_type) + ): needs_calibration = True if needs_calibration: in_files.append(local_uncal_file) else: - logging.info("\t\tCalibrated files already exist for {}".format(short_name)) + logging.info( + "\t\tCalibrated files already exist for {}".format( + short_name + ) + ) outputs = {} if len(in_files) > 0: logging.info("Running pipeline for {} files".format(len(in_files))) - outputs = run_parallel_pipeline(in_files, "uncal", out_exts, self.instrument, jump_pipe=True) + outputs = run_parallel_pipeline( + in_files, "uncal", out_exts, self.instrument, jump_pipe=True + ) index = 0 logging.info("Checking files post-calibration") - for uncal_file, rate_file in zip(illuminated_raw_files, illuminated_slope_files): - logging.info("\tChecking files {}, {}".format(os.path.basename(uncal_file), os.path.basename(rate_file))) - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) + for uncal_file, rate_file in zip( + illuminated_raw_files, illuminated_slope_files + ): + logging.info( + "\tChecking files {}, {}".format( + os.path.basename(uncal_file), os.path.basename(rate_file) + ) + ) + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) if local_uncal_file in outputs: logging.info("\t\tAdding calibrated file.") - illuminated_slope_files[index] = deepcopy(outputs[local_uncal_file][1]) + illuminated_slope_files[index] = deepcopy( + outputs[local_uncal_file][1] + ) else: logging.info("\t\tCalibration was skipped for file") self.get_metadata(illuminated_raw_files[index]) local_ramp_file = local_uncal_file.replace("uncal", "0_ramp_fit") local_rateints_file = local_uncal_file.replace("uncal", "rateints") - if hasattr(self, 'nints') and self.nints > 1: - local_ramp_file = local_ramp_file.replace("0_ramp_fit", "1_ramp_fit") + if hasattr(self, "nints") and self.nints > 1: + local_ramp_file = local_ramp_file.replace( + "0_ramp_fit", "1_ramp_fit" + ) if os.path.isfile(local_ramp_file): logging.info("\t\t\tFound local ramp file") illuminated_slope_files[index] = local_ramp_file @@ -862,11 +981,16 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun index += 1 # Get observation time for all files - illuminated_obstimes.append(instrument_properties.get_obstime(uncal_file)) + illuminated_obstimes.append( + instrument_properties.get_obstime(uncal_file) + ) logging.info("Trimming unfound files.") index = 0 while index < len(illuminated_raw_files): - if illuminated_slope_files[index] is None or illuminated_slope_files[index] == 'None': + if ( + illuminated_slope_files[index] is None + or illuminated_slope_files[index] == "None" + ): logging.info("\tRemoving {}".format(illuminated_raw_files[index])) del illuminated_raw_files[index] del illuminated_slope_files[index] @@ -874,9 +998,9 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun else: index += 1 - min_illum_time = 0. - max_illum_time = 0. - mid_illum_time = 0. + min_illum_time = 0.0 + max_illum_time = 0.0 + mid_illum_time = 0.0 if len(illuminated_obstimes) > 0: min_illum_time = min(illuminated_obstimes) max_illum_time = max(illuminated_obstimes) @@ -896,21 +1020,33 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun # even if the rate file is present, because we also need the jump # and fitops files, which are not saved by default in_files = [] - out_exts = defaultdict(lambda: ['jump', 'fitopt', '0_ramp_fit']) + out_exts = defaultdict(lambda: ["jump", "fitopt", "0_ramp_fit"]) for uncal_file, rate_file in zip(dark_raw_files, dark_slope_files): - logging.info("Checking dark file {} with rate file {}".format(uncal_file, rate_file)) + logging.info( + "Checking dark file {} with rate file {}".format( + uncal_file, rate_file + ) + ) self.get_metadata(uncal_file) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) if not os.path.isfile(local_uncal_file): - logging.info("\tCopying raw file to {}".format(self.working_data_dir)) + logging.info( + "\tCopying raw file to {}".format(self.working_data_dir) + ) copy_files([uncal_file], self.working_data_dir) - if hasattr(self, 'nints') and self.nints > 1: - out_exts[short_name] = ['jump', 'fitopt', '1_ramp_fit'] - local_processed_files = [local_uncal_file.replace("uncal", x) for x in out_exts[short_name]] + if hasattr(self, "nints") and self.nints > 1: + out_exts[short_name] = ["jump", "fitopt", "1_ramp_fit"] + local_processed_files = [ + local_uncal_file.replace("uncal", x) for x in out_exts[short_name] + ] calibrated_data = [os.path.isfile(x) for x in local_processed_files] if not all(calibrated_data): - logging.info('\tCalling pipeline for {} {}'.format(uncal_file, rate_file)) + logging.info( + "\tCalling pipeline for {} {}".format(uncal_file, rate_file) + ) in_files.append(local_uncal_file) dark_jump_files.append(None) dark_fitopt_files.append(None) @@ -926,14 +1062,18 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun outputs = {} if len(in_files) > 0: logging.info("Running pipeline for {} files".format(len(in_files))) - outputs = run_parallel_pipeline(in_files, "uncal", out_exts, self.instrument, jump_pipe=True) + outputs = run_parallel_pipeline( + in_files, "uncal", out_exts, self.instrument, jump_pipe=True + ) index = 0 logging.info("Checking files post-calibration") for uncal_file, rate_file in zip(dark_raw_files, dark_slope_files): logging.info("\tChecking files {}, {}".format(uncal_file, rate_file)) - local_uncal_file = os.path.join(self.working_data_dir, os.path.basename(uncal_file)) - short_name = os.path.basename(uncal_file).replace('_uncal.fits', '') + local_uncal_file = os.path.join( + self.working_data_dir, os.path.basename(uncal_file) + ) + short_name = os.path.basename(uncal_file).replace("_uncal.fits", "") if local_uncal_file in outputs: logging.info("\t\tAdding calibrated files") dark_jump_files[index] = outputs[local_uncal_file][0] @@ -943,21 +1083,31 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun logging.info("\t\tCalibration skipped for file") self.get_metadata(local_uncal_file) local_ramp_file = local_uncal_file.replace("uncal", "0_ramp_fit") - if hasattr(self, 'nints') and self.nints > 1: - local_ramp_file = local_ramp_file.replace("0_ramp_fit", "1_ramp_fit") + if hasattr(self, "nints") and self.nints > 1: + local_ramp_file = local_ramp_file.replace( + "0_ramp_fit", "1_ramp_fit" + ) if not os.path.isfile(local_uncal_file.replace("uncal", "jump")): logging.info("\t\t\tJump file not found") dark_jump_files[index] = None else: - dark_jump_files[index] = local_uncal_file.replace("uncal", "jump") + dark_jump_files[index] = local_uncal_file.replace( + "uncal", "jump" + ) if not os.path.isfile(local_uncal_file.replace("uncal", "fitopt")): logging.info("\t\t\tFitopt file not found") dark_fitopt_files[index] = None else: - dark_fitopt_files[index] = local_uncal_file.replace("uncal", "fitopt") + dark_fitopt_files[index] = local_uncal_file.replace( + "uncal", "fitopt" + ) if not os.path.isfile(local_ramp_file): - if os.path.isfile(local_uncal_file.replace("uncal", "rateints")): - dark_slope_files[index] = local_uncal_file.replace("uncal", "rateints") + if os.path.isfile( + local_uncal_file.replace("uncal", "rateints") + ): + dark_slope_files[index] = local_uncal_file.replace( + "uncal", "rateints" + ) else: logging.info("\t\t\tRate file not found") dark_slope_files[index] = None @@ -968,7 +1118,11 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun index = 0 logging.info("Trimming unfound files.") while index < len(dark_raw_files): - if dark_jump_files[index] is None or dark_fitopt_files[index] is None or dark_slope_files[index] is None: + if ( + dark_jump_files[index] is None + or dark_fitopt_files[index] is None + or dark_slope_files[index] is None + ): logging.info("\tRemoving {}".format(dark_raw_files[index])) del dark_raw_files[index] del dark_jump_files[index] @@ -992,7 +1146,9 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun dark_length = 0 else: dark_length = len(dark_slope_files) - if (flat_length < flat_file_count_threshold) and (dark_length < dark_file_count_threshold): + if (flat_length < flat_file_count_threshold) and ( + dark_length < dark_file_count_threshold + ): logging.info("After removing failed files, not enough new files remian.") return @@ -1001,46 +1157,62 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun dead_flux_files = [] if illuminated_raw_files is not None: for illum_file in illuminated_raw_files: - ngroup = fits.getheader(illum_file)['NGROUPS'] + ngroup = fits.getheader(illum_file)["NGROUPS"] if ngroup >= 4: dead_flux_files.append(illum_file) if len(dead_flux_files) == 0: dead_flux_files = None # Instrument-specific preferences from jwst_reffiles meetings - if self.instrument in ['nircam', 'niriss', 'fgs']: - dead_search_type = 'sigma_rate' - elif self.instrument in ['miri', 'nirspec']: - dead_search_type = 'absolute_rate' + if self.instrument in ["nircam", "niriss", "fgs"]: + dead_search_type = "sigma_rate" + elif self.instrument in ["miri", "nirspec"]: + dead_search_type = "absolute_rate" - flat_mean_normalization_method = 'smoothed' + flat_mean_normalization_method = "smoothed" # Call the bad pixel search module from jwst_reffiles. Lots of # other possible parameters. Only specify the non-default params # in order to make things easier to read. - query_string = 'darks_{}_flats_{}_to_{}'.format(self.dark_query_start, self.flat_query_start, self.query_end) - output_file = '{}_{}_{}_bpm.fits'.format(self.instrument, self.aperture, query_string) + query_string = "darks_{}_flats_{}_to_{}".format( + self.dark_query_start, self.flat_query_start, self.query_end + ) + output_file = "{}_{}_{}_bpm.fits".format( + self.instrument, self.aperture, query_string + ) output_file = os.path.join(self.output_dir, output_file) -# logging.info("Calling bad_pixel_mask.bad_pixels") -# logging.info("\tflat_slope_files are: {}".format(illuminated_slope_files)) -# logging.info("\tdead__search_type={}".format(dead_search_type)) -# logging.info("\tflat_mean_normalization_method={}".format(flat_mean_normalization_method)) -# logging.info("\tdead_flux_check_files are: {}".format(dead_flux_files)) -# logging.info("\tdark_slope_files are: {}".format(dark_slope_files)) -# logging.info("\tdark_uncal_files are: {}".format(dark_raw_files)) -# logging.info("\tdark_jump_files are: {}".format(dark_jump_files)) -# logging.info("\tdark_fitopt_files are: {}".format(dark_fitopt_files)) -# logging.info("\toutput_file={}".format(output_file)) - - bad_pixel_mask.bad_pixels(flat_slope_files=illuminated_slope_files, dead_search_type=dead_search_type, - flat_mean_normalization_method=flat_mean_normalization_method, - run_dead_flux_check=True, dead_flux_check_files=dead_flux_files, flux_check=35000, - dark_slope_files=dark_slope_files, dark_uncal_files=dark_raw_files, - dark_jump_files=dark_jump_files, dark_fitopt_files=dark_fitopt_files, plot=False, - output_file=output_file, author='jwst_reffiles', description='A bad pix mask', - pedigree='GROUND', useafter='2222-04-01 00:00:00', - history='This file was created by JWQL', quality_check=False) + # logging.info("Calling bad_pixel_mask.bad_pixels") + # logging.info("\tflat_slope_files are: {}".format(illuminated_slope_files)) + # logging.info("\tdead__search_type={}".format(dead_search_type)) + # logging.info("\tflat_mean_normalization_method={}".format(flat_mean_normalization_method)) + # logging.info("\tdead_flux_check_files are: {}".format(dead_flux_files)) + # logging.info("\tdark_slope_files are: {}".format(dark_slope_files)) + # logging.info("\tdark_uncal_files are: {}".format(dark_raw_files)) + # logging.info("\tdark_jump_files are: {}".format(dark_jump_files)) + # logging.info("\tdark_fitopt_files are: {}".format(dark_fitopt_files)) + # logging.info("\toutput_file={}".format(output_file)) + + bad_pixel_mask.bad_pixels( + flat_slope_files=illuminated_slope_files, + dead_search_type=dead_search_type, + flat_mean_normalization_method=flat_mean_normalization_method, + run_dead_flux_check=True, + dead_flux_check_files=dead_flux_files, + flux_check=35000, + dark_slope_files=dark_slope_files, + dark_uncal_files=dark_raw_files, + dark_jump_files=dark_jump_files, + dark_fitopt_files=dark_fitopt_files, + plot=False, + output_file=output_file, + author="jwst_reffiles", + description="A bad pix mask", + pedigree="GROUND", + useafter="2222-04-01 00:00:00", + history="This file was created by JWQL", + quality_check=False, + ) # Read in the newly-created bad pixel file set_permissions(output_file) @@ -1048,17 +1220,23 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun # Locate and read in the current bad pixel mask parameters = self.make_crds_parameter_dict() - mask_dictionary = crds_tools.get_reffiles(parameters, ['mask'], download=True) - baseline_file = mask_dictionary['mask'] - - if 'NOT FOUND' in baseline_file: - logging.warning(('\tNo baseline bad pixel file for {} {}. Any bad ' - 'pixels found as part of this search will be considered new'.format(self.instrument, self.aperture))) + mask_dictionary = crds_tools.get_reffiles(parameters, ["mask"], download=True) + baseline_file = mask_dictionary["mask"] + + if "NOT FOUND" in baseline_file: + logging.warning( + ( + "\tNo baseline bad pixel file for {} {}. Any bad " + "pixels found as part of this search will be considered new".format( + self.instrument, self.aperture + ) + ) + ) baseline_file = new_badpix_file yd, xd = badpix_mask.shape baseline_badpix_mask = np.zeros((yd, xd), type=np.int) else: - logging.info('\tBaseline bad pixel file is {}'.format(baseline_file)) + logging.info("\tBaseline bad pixel file is {}".format(baseline_file)) baseline_badpix_mask = fits.getdata(baseline_file) # Exclude hot and dead pixels in the current bad pixel mask @@ -1070,21 +1248,43 @@ def process(self, illuminated_raw_files, illuminated_slope_files, flat_file_coun bad_location_list = bad_map_to_list(new_since_reffile, bad_type) # Add new hot and dead pixels to the database - logging.info('\tFound {} new {} pixels'.format(len(bad_location_list[0]), bad_type)) + logging.info( + "\tFound {} new {} pixels".format(len(bad_location_list[0]), bad_type) + ) if bad_type in FLATS_BAD_PIXEL_TYPES: - self.add_bad_pix(bad_location_list, bad_type, illuminated_slope_files, - min_illum_time, mid_illum_time, max_illum_time, baseline_file) - flat_png = create_png_from_fits(illuminated_slope_files[0], self.output_dir) + self.add_bad_pix( + bad_location_list, + bad_type, + illuminated_slope_files, + min_illum_time, + mid_illum_time, + max_illum_time, + baseline_file, + ) + flat_png = create_png_from_fits( + illuminated_slope_files[0], self.output_dir + ) elif bad_type in DARKS_BAD_PIXEL_TYPES: - self.add_bad_pix(bad_location_list, bad_type, dark_slope_files, - min_dark_time, mid_dark_time, max_dark_time, baseline_file) + self.add_bad_pix( + bad_location_list, + bad_type, + dark_slope_files, + min_dark_time, + mid_dark_time, + max_dark_time, + baseline_file, + ) dark_png = create_png_from_fits(dark_slope_files[0], self.output_dir) else: - raise ValueError("Unrecognized type of bad pixel: {}. Cannot update database table.".format(bad_type)) + raise ValueError( + "Unrecognized type of bad pixel: {}. Cannot update database table.".format( + bad_type + ) + ) # Remove raw files, rate files, and pipeline products in order to save disk space - files_to_remove = glob(f'{self.working_data_dir}/*.fits') + files_to_remove = glob(f"{self.working_data_dir}/*.fits") for filename in files_to_remove: os.remove(filename) @@ -1101,11 +1301,11 @@ def run(self): For each, we will query MAST, copy new files from the filesystem and pass the list of copied files into the ``process()`` method. """ - logging.info('Begin logging for bad_pixel_monitor') + logging.info("Begin logging for bad_pixel_monitor") # Get the output directory - self.working_dir = os.path.join(get_config()['working'], 'bad_pixel_monitor') - self.output_dir = os.path.join(get_config()['outputs'], 'bad_pixel_monitor') + self.working_dir = os.path.join(get_config()["working"], "bad_pixel_monitor") + self.output_dir = os.path.join(get_config()["outputs"], "bad_pixel_monitor") # Read in config file that defines the thresholds for the number # of dark files that must be present in order for the monitor to run @@ -1131,13 +1331,13 @@ def run(self): lamp = None # NIRSpec flats use the MIRROR grating. - if self.instrument == 'nirspec': - grating = 'MIRROR' + if self.instrument == "nirspec": + grating = "MIRROR" # MIRI is unlike the other instruments. We basically treat # the detector as the aperture name because there is no # aperture name for a full frame MRS exposure. - if self.instrument == 'miri': + if self.instrument == "miri": detector_name, aperture_name = aperture self.aperture = detector_name else: @@ -1145,41 +1345,64 @@ def run(self): aperture_name = aperture # In flight, NIRISS plans to take darks using the LINE2 lamp - if self.instrument == 'niriss': - lamp = 'LINE2' + if self.instrument == "niriss": + lamp = "LINE2" # What lamp is most appropriate for NIRSpec? - if self.instrument == 'nirspec': - lamp = 'LINE2' + if self.instrument == "nirspec": + lamp = "LINE2" # What lamp is most appropriate for FGS? # if self.instrument == 'fgs': # lamp = 'G2LAMP1' - logging.info('') - logging.info('Working on aperture {} in {}'.format(aperture, self.instrument)) + logging.info("") + logging.info( + "Working on aperture {} in {}".format(aperture, self.instrument) + ) # Find the appropriate threshold for number of new files needed - match = self.aperture == limits['Aperture'] - flat_file_count_threshold = limits['FlatThreshold'][match].data[0] - dark_file_count_threshold = limits['DarkThreshold'][match].data[0] + match = self.aperture == limits["Aperture"] + flat_file_count_threshold = limits["FlatThreshold"][match].data[0] + dark_file_count_threshold = limits["DarkThreshold"][match].data[0] # Locate the record of the most recent MAST search - self.flat_query_start = self.most_recent_search(file_type='flat') - self.dark_query_start = self.most_recent_search(file_type='dark') - logging.info('\tFlat field query times: {} {}'.format(self.flat_query_start, self.query_end)) - logging.info('\tDark current query times: {} {}'.format(self.dark_query_start, self.query_end)) + self.flat_query_start = self.most_recent_search(file_type="flat") + self.dark_query_start = self.most_recent_search(file_type="dark") + logging.info( + "\tFlat field query times: {} {}".format( + self.flat_query_start, self.query_end + ) + ) + logging.info( + "\tDark current query times: {} {}".format( + self.dark_query_start, self.query_end + ) + ) # Query MAST using the aperture and the time of the most # recent previous search as the starting time. flat_templates = FLAT_EXP_TYPES[instrument] dark_templates = DARK_EXP_TYPES[instrument] - new_flat_entries = mast_query(instrument, flat_templates, self.flat_query_start, self.query_end, - aperture=aperture_name, grating=grating, detector=detector_name, - lamp=lamp) - new_dark_entries = mast_query(instrument, dark_templates, self.dark_query_start, self.query_end, - aperture=aperture_name, detector=detector_name) + new_flat_entries = mast_query( + instrument, + flat_templates, + self.flat_query_start, + self.query_end, + aperture=aperture_name, + grating=grating, + detector=detector_name, + lamp=lamp, + ) + new_dark_entries = mast_query( + instrument, + dark_templates, + self.dark_query_start, + self.query_end, + aperture=aperture_name, + detector=detector_name, + ) # Filter the results # Filtering could be different for flats vs darks. @@ -1210,58 +1433,141 @@ def run(self): if new_flat_entries: # Exclude ASIC tuning data len_new_flats = len(new_flat_entries) - new_flat_entries = monitor_utils.exclude_asic_tuning(new_flat_entries) + new_flat_entries = monitor_utils.exclude_asic_tuning( + new_flat_entries + ) len_no_asic = len(new_flat_entries) num_asic = len_new_flats - len_no_asic - logging.info("\tFiltering out ASIC tuning files removed {} flat files.".format(num_asic)) - - new_flat_entries = self.filter_query_results(new_flat_entries, datatype='flat') - apcheck_flat_entries = pipeline_tools.aperture_size_check(new_flat_entries, instrument, aperture) - lost_to_bad_metadata = len(new_flat_entries) - len(apcheck_flat_entries) - logging.info('\t{} flat field files ignored due to inconsistency in array size and metadata.'.format(lost_to_bad_metadata)) + logging.info( + "\tFiltering out ASIC tuning files removed {} flat files.".format( + num_asic + ) + ) + + new_flat_entries = self.filter_query_results( + new_flat_entries, datatype="flat" + ) + apcheck_flat_entries = pipeline_tools.aperture_size_check( + new_flat_entries, instrument, aperture + ) + lost_to_bad_metadata = len(new_flat_entries) - len( + apcheck_flat_entries + ) + logging.info( + "\t{} flat field files ignored due to inconsistency in array size and metadata.".format( + lost_to_bad_metadata + ) + ) flat_uncal_files = locate_uncal_files(apcheck_flat_entries) - flat_uncal_files, run_flats = check_for_sufficient_files(flat_uncal_files, instrument, aperture, flat_file_count_threshold, 'flats') - flat_rate_files, flat_rate_files_to_copy = locate_rate_files(flat_uncal_files) + flat_uncal_files, run_flats = check_for_sufficient_files( + flat_uncal_files, + instrument, + aperture, + flat_file_count_threshold, + "flats", + ) + flat_rate_files, flat_rate_files_to_copy = locate_rate_files( + flat_uncal_files + ) else: run_flats = False - flat_uncal_files, flat_rate_files, flat_rate_files_to_copy = None, None, None + flat_uncal_files, flat_rate_files, flat_rate_files_to_copy = ( + None, + None, + None, + ) if new_dark_entries: # Exclude ASIC tuning data len_new_darks = len(new_dark_entries) - new_dark_entries = monitor_utils.exclude_asic_tuning(new_dark_entries) + new_dark_entries = monitor_utils.exclude_asic_tuning( + new_dark_entries + ) len_no_asic = len(new_dark_entries) num_asic = len_new_darks - len_no_asic - logging.info("\tFiltering out ASIC tuning files removed {} dark files.".format(num_asic)) - - new_dark_entries = self.filter_query_results(new_dark_entries, datatype='dark') - apcheck_dark_entries = pipeline_tools.aperture_size_check(new_dark_entries, instrument, aperture) - lost_to_bad_metadata = len(new_dark_entries) - len(apcheck_dark_entries) - logging.info('\t{} dark files ignored due to inconsistency in array size and metadata.'.format(lost_to_bad_metadata)) + logging.info( + "\tFiltering out ASIC tuning files removed {} dark files.".format( + num_asic + ) + ) + + new_dark_entries = self.filter_query_results( + new_dark_entries, datatype="dark" + ) + apcheck_dark_entries = pipeline_tools.aperture_size_check( + new_dark_entries, instrument, aperture + ) + lost_to_bad_metadata = len(new_dark_entries) - len( + apcheck_dark_entries + ) + logging.info( + "\t{} dark files ignored due to inconsistency in array size and metadata.".format( + lost_to_bad_metadata + ) + ) dark_uncal_files = locate_uncal_files(apcheck_dark_entries) - dark_uncal_files, run_darks = check_for_sufficient_files(dark_uncal_files, instrument, aperture, dark_file_count_threshold, 'darks') - dark_rate_files, dark_rate_files_to_copy = locate_rate_files(dark_uncal_files) + dark_uncal_files, run_darks = check_for_sufficient_files( + dark_uncal_files, + instrument, + aperture, + dark_file_count_threshold, + "darks", + ) + dark_rate_files, dark_rate_files_to_copy = locate_rate_files( + dark_uncal_files + ) else: run_darks = False - dark_uncal_files, dark_rate_files, dark_rate_files_to_copy = None, None, None + dark_uncal_files, dark_rate_files, dark_rate_files_to_copy = ( + None, + None, + None, + ) # Set up directories for the copied data - ensure_dir_exists(os.path.join(self.working_dir, 'data')) - ensure_dir_exists(os.path.join(self.output_dir, 'data')) - self.working_data_dir = os.path.join(self.working_dir, 'data/{}_{}'.format(self.instrument.lower(), self.aperture.lower())) - self.output_data_dir = os.path.join(self.output_dir, 'data/{}_{}'.format(self.instrument.lower(), self.aperture.lower())) + ensure_dir_exists(os.path.join(self.working_dir, "data")) + ensure_dir_exists(os.path.join(self.output_dir, "data")) + self.working_data_dir = os.path.join( + self.working_dir, + "data/{}_{}".format(self.instrument.lower(), self.aperture.lower()), + ) + self.output_data_dir = os.path.join( + self.output_dir, + "data/{}_{}".format(self.instrument.lower(), self.aperture.lower()), + ) ensure_dir_exists(self.working_data_dir) ensure_dir_exists(self.output_data_dir) # Copy files from filesystem if run_flats: - flat_uncal_files, flat_rate_files = self.map_uncal_and_rate_file_lists(flat_uncal_files, flat_rate_files, flat_rate_files_to_copy, 'flat') + flat_uncal_files, flat_rate_files = ( + self.map_uncal_and_rate_file_lists( + flat_uncal_files, + flat_rate_files, + flat_rate_files_to_copy, + "flat", + ) + ) if run_darks: - dark_uncal_files, dark_rate_files = self.map_uncal_and_rate_file_lists(dark_uncal_files, dark_rate_files, dark_rate_files_to_copy, 'dark') + dark_uncal_files, dark_rate_files = ( + self.map_uncal_and_rate_file_lists( + dark_uncal_files, + dark_rate_files, + dark_rate_files_to_copy, + "dark", + ) + ) # Run the bad pixel monitor if run_flats or run_darks: - self.process(flat_uncal_files, flat_rate_files, flat_file_count_threshold, dark_uncal_files, dark_rate_files, dark_file_count_threshold) + self.process( + flat_uncal_files, + flat_rate_files, + flat_file_count_threshold, + dark_uncal_files, + dark_rate_files, + dark_file_count_threshold, + ) updated_instruments.append(self.instrument) # Update the query history @@ -1275,34 +1581,35 @@ def run(self): else: num_flat_files = len(flat_uncal_files) - new_entry = {'instrument': self.instrument.upper(), - 'aperture': self.aperture, - 'dark_start_time_mjd': self.dark_query_start, - 'dark_end_time_mjd': self.query_end, - 'flat_start_time_mjd': self.flat_query_start, - 'flat_end_time_mjd': self.query_end, - 'dark_files_found': num_dark_files, - 'flat_files_found': num_flat_files, - 'run_bpix_from_darks': run_darks, - 'run_bpix_from_flats': run_flats, - 'run_monitor': run_flats or run_darks, - 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.query_table.__table__.insert(), new_entry) - logging.info('\tUpdated the query history table') + new_entry = { + "instrument": self.instrument.upper(), + "aperture": self.aperture, + "dark_start_time_mjd": self.dark_query_start, + "dark_end_time_mjd": self.query_end, + "flat_start_time_mjd": self.flat_query_start, + "flat_end_time_mjd": self.query_end, + "dark_files_found": num_dark_files, + "flat_files_found": num_flat_files, + "run_bpix_from_darks": run_darks, + "run_bpix_from_flats": run_flats, + "run_monitor": run_flats or run_darks, + "entry_date": datetime.datetime.now(datetime.timezone.utc), + } + entry = self.query_table(**new_entry) + entry.save() + logging.info("\tUpdated the query history table") # Update the figures to be shown in the web app. Only update figures # for instruments where the monitor ran for instrument in updated_instruments: BadPixelPlots(instrument) - logging.info(f'Updating web pages for: {updated_instruments}') - logging.info('Bad Pixel Monitor completed successfully.') - + logging.info(f"Updating web pages for: {updated_instruments}") + logging.info("Bad Pixel Monitor completed successfully.") -if __name__ == '__main__': - module = os.path.basename(__file__).strip('.py') +if __name__ == "__main__": + module = os.path.basename(__file__).strip(".py") start_time, log_file = monitor_utils.initialize_instrument_monitor(module) monitor = BadPixels() diff --git a/jwql/instrument_monitors/common_monitors/dark_monitor.py b/jwql/instrument_monitors/common_monitors/dark_monitor.py index 9a1968921..ab4ff673b 100755 --- a/jwql/instrument_monitors/common_monitors/dark_monitor.py +++ b/jwql/instrument_monitors/common_monitors/dark_monitor.py @@ -81,7 +81,7 @@ import os from astropy.io import ascii, fits -from astropy.modeling import models +from astropy.modeling.models import Gaussian1D from astropy.stats import sigma_clipped_stats from astropy.time import Time from bokeh.models import ColorBar, ColumnDataSource, HoverTool, Legend @@ -92,22 +92,26 @@ from sqlalchemy import func from sqlalchemy.sql.expression import and_ -from jwql.database.database_interface import session, engine -from jwql.database.database_interface import NIRCamDarkQueryHistory, NIRCamDarkPixelStats, NIRCamDarkDarkCurrent -from jwql.database.database_interface import NIRISSDarkQueryHistory, NIRISSDarkPixelStats, NIRISSDarkDarkCurrent -from jwql.database.database_interface import MIRIDarkQueryHistory, MIRIDarkPixelStats, MIRIDarkDarkCurrent -from jwql.database.database_interface import NIRSpecDarkQueryHistory, NIRSpecDarkPixelStats, NIRSpecDarkDarkCurrent -from jwql.database.database_interface import FGSDarkQueryHistory, FGSDarkPixelStats, FGSDarkDarkCurrent from jwql.instrument_monitors import pipeline_tools from jwql.shared_tasks.shared_tasks import only_one, run_pipeline, run_parallel_pipeline from jwql.utils import calculations, instrument_properties, mast_utils, monitor_utils from jwql.utils.constants import ASIC_TEMPLATES, DARK_MONITOR_BETWEEN_EPOCH_THRESHOLD_TIME, DARK_MONITOR_MAX_BADPOINTS_TO_PLOT from jwql.utils.constants import JWST_INSTRUMENT_NAMES, FULL_FRAME_APERTURES, JWST_INSTRUMENT_NAMES_MIXEDCASE -from jwql.utils.constants import JWST_DATAPRODUCTS, MINIMUM_DARK_CURRENT_GROUPS, RAPID_READPATTERNS +from jwql.utils.constants import JWST_DATAPRODUCTS, MINIMUM_DARK_CURRENT_GROUPS, ON_GITHUB_ACTIONS, ON_READTHEDOCS, RAPID_READPATTERNS from jwql.utils.logging_functions import log_info, log_fail from jwql.utils.permissions import set_permissions from jwql.utils.utils import copy_files, ensure_dir_exists, get_config, filesystem_path, save_png +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + # Import * is okay here because this module specifically only contains database models + # for this monitor + from jwql.website.apps.jwql.monitor_models.dark_current import * # noqa: E402 (module level import not at top of file) + THRESHOLDS_FILE = os.path.join(os.path.split(__file__)[0], 'dark_monitor_file_thresholds.txt') @@ -230,9 +234,9 @@ def add_bad_pix(self, coordinates, pixel_type, files, mean_filename, baseline_fi 'obs_end_time': observation_end_time, 'mean_dark_image_file': os.path.basename(mean_filename), 'baseline_file': os.path.basename(baseline_filename), - 'entry_date': datetime.datetime.now()} - with engine.begin() as connection: - connection.execute(self.pixel_table.__table__.insert(), entry) + 'entry_date': datetime.datetime.now(datetime.timezone.utc)} + entry = self.pixel_table(**entry) + entry.save() def create_mean_slope_figure(self, image, num_files, hotxy=None, deadxy=None, noisyxy=None, baseline_file=None, min_time='', max_time=''): @@ -412,14 +416,15 @@ def exclude_existing_badpix(self, badpix, pixel_type): raise ValueError('Unrecognized bad pixel type: {}'.format(pixel_type)) logging.info("\t\tRunning database query") - db_entries = session.query(self.pixel_table) \ - .filter(self.pixel_table.type == pixel_type) \ - .filter(self.pixel_table.detector == self.detector) \ - .all() + + filters = {"type__iexact": pixel_type, + "detector__iexact": self.detector + } + records = self.pixel_table.objects.filter(**filters).all() already_found = [] - if len(db_entries) != 0: - for _row in db_entries: + if records is not None: + for _row in records: x_coords = _row.x_coord y_coords = _row.y_coord for x, y in zip(x_coords, y_coords): @@ -442,7 +447,6 @@ def exclude_existing_badpix(self, badpix, pixel_type): logging.info("\t\tKeeping {} {} pixels".format(len(new_pixels_x), pixel_type)) - session.close() return (new_pixels_x, new_pixels_y) def exclude_too_few_groups(self, result_list): @@ -521,29 +525,15 @@ def get_baseline_filename(self): filename : str Name of fits file containing the baseline image """ - - subq = session.query(self.pixel_table.detector, - func.max(self.pixel_table.entry_date).label('maxdate') - ).group_by(self.pixel_table.detector).subquery('t2') - - query = session.query(self.pixel_table).join( - subq, - and_( - self.pixel_table.detector == self.detector, - self.pixel_table.entry_date == subq.c.maxdate - ) - ) - - count = query.count() - if not count: - filename = None - else: - filename = query.all()[0].baseline_file + record = self.pixel_table.objects.filter(detector__iexact=self.detector).order_by("-obs_end_time").first() + if record is not None: + filename = record.baseline_file # Specify the full path filename = os.path.join(get_config()['outputs'], 'dark_monitor', 'mean_slope_images', filename) logging.info('Baseline filename: {}'.format(filename)) + else: + filename = None - session.close() return filename def identify_tables(self): @@ -552,9 +542,9 @@ def identify_tables(self): """ mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] - self.query_table = eval('{}DarkQueryHistory'.format(mixed_case_name)) - self.pixel_table = eval('{}DarkPixelStats'.format(mixed_case_name)) - self.stats_table = eval('{}DarkDarkCurrent'.format(mixed_case_name)) + self.query_table = eval(f'{mixed_case_name}DarkQueryHistory') + self.pixel_table = eval(f'{mixed_case_name}DarkPixelStats') + self.stats_table = eval(f'{mixed_case_name}DarkDarkCurrent') def most_recent_search(self): """Query the query history database and return the information @@ -567,23 +557,18 @@ def most_recent_search(self): Date (in MJD) of the ending range of the previous MAST query where the dark monitor was run. """ - query = session.query(self.query_table).filter(self.query_table.aperture == self.aperture, - self.query_table.readpattern == self.readpatt). \ - filter(self.query_table.run_monitor == True) # noqa: E348 (comparison to true) - - dates = np.zeros(0) - for instance in query: - dates = np.append(dates, instance.end_time_mjd) + filters = {"aperture__iexact": self.aperture, + "readpattern__iexact": self.readpatt, + "run_monitor": True} + record = self.query_table.objects.filter(**filters).order_by("-end_time_mjd").first() - query_count = len(dates) - if query_count == 0: + if record is None: query_result = 59607.0 # a.k.a. Jan 28, 2022 == First JWST images (MIRI) logging.info(('\tNo query history for {} with {}. Beginning search date will be set to {}.' .format(self.aperture, self.readpatt, query_result))) else: - query_result = np.max(dates) + query_result = record.end_time_mjd - session.close() return query_result def noise_check(self, new_noise_image, baseline_noise_image, threshold=1.5): @@ -671,7 +656,8 @@ def overplot_bad_pix(self, pix_type, coords, values): # Overplot the bad pixel locations badpixplots[pix_type] = self.plot.circle(x=f'pixels_x', y=f'pixels_y', - source=sources[pix_type], color=colors[pix_type]) + source=sources[pix_type], color=colors[pix_type], radius=0.5, + radius_dimension='y', radius_units='data') # Add to the legend if numpix > 0: @@ -895,12 +881,12 @@ def process(self, file_list): 'double_gauss_width2': double_gauss_params[key][5], 'double_gauss_chisq': double_gauss_chisquared[key], 'mean_dark_image_file': os.path.basename(mean_slope_file), - 'hist_dark_values': bins[key], - 'hist_amplitudes': histogram[key], - 'entry_date': datetime.datetime.now() + 'hist_dark_values': list(bins[key]), + 'hist_amplitudes': list(histogram[key]), + 'entry_date': datetime.datetime.now(datetime.timezone.utc) } - with engine.begin() as connection: - connection.execute(self.stats_table.__table__.insert(), dark_db_entry) + entry = self.stats_table(**dark_db_entry) + entry.save() def read_baseline_slope_image(self, filename): """Read in a baseline mean slope image and associated standard @@ -951,7 +937,7 @@ def run(self): self.query_end = Time.now().mjd # Loop over all instruments - for instrument in ['miri', 'nircam']: # JWST_INSTRUMENT_NAMES: + for instrument in JWST_INSTRUMENT_NAMES: self.instrument = instrument logging.info(f'\n\nWorking on {instrument}') @@ -981,6 +967,7 @@ def run(self): # Locate the record of the most recent MAST search self.query_start = self.most_recent_search() + logging.info(f'\tQuery times: {self.query_start} {self.query_end}') # Query MAST using the aperture and the time of the @@ -1124,11 +1111,10 @@ def run(self): 'end_time_mjd': batch_end_time, 'files_found': len(dark_files), 'run_monitor': monitor_run, - 'entry_date': datetime.datetime.now()} + 'entry_date': datetime.datetime.now(datetime.timezone.utc)} - with engine.begin() as connection: - connection.execute( - self.query_table.__table__.insert(), new_entry) + entry = self.query_table(**new_entry) + entry.save() logging.info('\tUpdated the query history table') logging.info('NEW ENTRY: ') logging.info(new_entry) @@ -1146,11 +1132,10 @@ def run(self): 'end_time_mjd': self.query_end, 'files_found': len(new_entries), 'run_monitor': monitor_run, - 'entry_date': datetime.datetime.now()} + 'entry_date': datetime.datetime.now(datetime.timezone.utc)} - with engine.begin() as connection: - connection.execute( - self.query_table.__table__.insert(), new_entry) + entry = self.query_table(**new_entry) + entry.save() logging.info('\tUpdated the query history table') logging.info('NEW ENTRY: ') logging.info(new_entry) @@ -1546,7 +1531,7 @@ def stats_by_amp(self, image, amps): amplitude, peak, width = calculations.gaussian1d_fit(bin_centers, hist, initial_params) gaussian_params[key] = [amplitude, peak, width] - gauss_fit_model = models.Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0]) + gauss_fit_model = Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0]) gauss_fit = gauss_fit_model(bin_centers) positive = hist > 0 diff --git a/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py b/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py index f07e48875..12e593f7c 100755 --- a/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py +++ b/jwql/instrument_monitors/common_monitors/edb_telemetry_monitor.py @@ -2097,7 +2097,8 @@ def plot_every_change_data(data, mnem_name, units, show_plot=False, savefig=True source = ColumnDataSource(data={'x': val_times, 'y': val_data, 'dep': dependency_val}) ldata = fig.line(x='x', y='y', line_width=1, line_color=Turbo256[color], source=source, legend_label=key) - cdata = fig.circle(x='x', y='y', fill_color=Turbo256[color], size=8, source=source, legend_label=key) + cdata = fig.circle(x='x', y='y', fill_color=Turbo256[color], source=source, legend_label=key, radius=4, + radius_dimension='y', radius_units='screen') hover_tool = HoverTool(tooltips=[('Value', '@dep'), ('Data', '@y{1.11111}'), diff --git a/jwql/instrument_monitors/nircam_monitors/claw_monitor.py b/jwql/instrument_monitors/nircam_monitors/claw_monitor.py old mode 100644 new mode 100755 index b73d916c0..9978ec182 --- a/jwql/instrument_monitors/nircam_monitors/claw_monitor.py +++ b/jwql/instrument_monitors/nircam_monitors/claw_monitor.py @@ -192,6 +192,7 @@ def make_background_plots(self, plot_type='bkg'): df = df[df['stddev'] != 0] # older data has no accurate stddev measures plot_data = df['stddev'].values if plot_type == 'model': + df = df[np.isfinite(df['total_bkg'])] # the claw monitor did not track model measurements at first plot_data = df['median'].values / df['total_bkg'].values plot_expstarts = df['expstart_mjd'].values @@ -300,7 +301,11 @@ def process(self): # Get predicted background level using JWST background tool ra, dec = hdu[1].header['RA_V1'], hdu[1].header['DEC_V1'] - wv = self.filter_wave[self.fltr.upper()] + if ('N' in self.pupil.upper()) | ('M' in self.pupil.upper()): + fltr_wv = self.pupil.upper() + else: + fltr_wv = self.fltr.upper() + wv = self.filter_wave[fltr_wv] date = hdu[0].header['DATE-BEG'] doy = int(Time(date).yday.split(':')[1]) try: @@ -332,7 +337,7 @@ def process(self): 'skyflat_filename': os.path.basename(self.outfile), 'doy': float(doy), 'total_bkg': float(total_bkg), - 'entry_date': datetime.datetime.now() + 'entry_date': datetime.datetime.now(datetime.timezone.utc) } entry = self.stats_table(**claw_db_entry) entry.save() @@ -423,11 +428,13 @@ def run(self): mast_table = self.query_mast() logging.info('{} files found between {} and {}.'.format(len(mast_table), self.query_start_mjd, self.query_end_mjd)) - # Define pivot wavelengths - self.filter_wave = {'F070W': 0.704, 'F090W': 0.902, 'F115W': 1.154, 'F150W': 1.501, 'F150W2': 1.659, - 'F200W': 1.989, 'F212N': 2.121, 'F250M': 2.503, 'F277W': 2.762, 'F300M': 2.989, - 'F322W2': 3.232, 'F356W': 3.568, 'F410M': 4.082, 'F430M': 4.281, 'F444W': 4.408, - 'F480M': 4.874} + # Define pivot wavelengths - last downloaded March 8 2024 from: + # https://jwst-docs.stsci.edu/jwst-near-infrared-camera/nircam-instrumentation/nircam-filters + self.filter_wave = {'F070W': 0.704, 'F090W': 0.901, 'F115W': 1.154, 'F140M': 1.404, 'F150W': 1.501, 'F162M': 1.626, 'F164N': 1.644, + 'F150W2': 1.671, 'F182M': 1.845, 'F187N': 1.874, 'F200W': 1.99, 'F210M': 2.093, 'F212N': 2.12, 'F250M': 2.503, + 'F277W': 2.786, 'F300M': 2.996, 'F322W2': 3.247, 'F323N': 3.237, 'F335M': 3.365, 'F356W': 3.563, 'F360M': 3.621, + 'F405N': 4.055, 'F410M': 4.092, 'F430M': 4.28, 'F444W': 4.421, 'F460M': 4.624, 'F466N': 4.654, 'F470N': 4.707, + 'F480M': 4.834} # Create observation-level median stacks for each filter/pupil combo, in pixel-space combos = np.array(['{}_{}_{}_{}'.format(str(row['program']), row['observtn'], row['filter'], row['pupil']).lower() for row in mast_table]) @@ -469,7 +476,7 @@ def run(self): 'start_time_mjd': self.query_start_mjd, 'end_time_mjd': self.query_end_mjd, 'run_monitor': monitor_run, - 'entry_date': datetime.datetime.now()} + 'entry_date': datetime.datetime.now(datetime.timezone.utc)} entry = self.query_table(**new_entry) entry.save() diff --git a/jwql/instrument_monitors/nirspec_monitors/ta_monitors/msata_monitor.py b/jwql/instrument_monitors/nirspec_monitors/ta_monitors/msata_monitor.py index 55abcf95e..16f11460f 100755 --- a/jwql/instrument_monitors/nirspec_monitors/ta_monitors/msata_monitor.py +++ b/jwql/instrument_monitors/nirspec_monitors/ta_monitors/msata_monitor.py @@ -9,7 +9,7 @@ # Sep 2022 - Vr. 1.3: Modified ColumnDataSource so that data could be recovered # from an html file of a previous run of the monitor and # included the code to read and format the data from the html file - +# Apr 2024 - Vr. 1.4: Removed html webscraping and now store data in django models """ This module contains the code for the NIRSpec Multi Shutter Array Target @@ -25,6 +25,7 @@ ______ - Maria Pena-Guerrero - Melanie Clarke + - Mees Fix Use --- @@ -33,13 +34,11 @@ """ - # general imports -import json import os import logging -import shutil from datetime import datetime, timezone, timedelta +from dateutil import parser from random import randint import numpy as np @@ -49,24 +48,40 @@ from bokeh.embed import components from bokeh.layouts import gridplot, layout from bokeh.models import ( - ColumnDataSource, Range1d, CustomJS, CustomJSFilter, CDSView, - Span, Label, DateRangeSlider) + ColumnDataSource, + Range1d, + CustomJS, + CustomJSFilter, + CDSView, + Span, + Label, + DateRangeSlider, +) from bokeh.models.tools import HoverTool, BoxSelectTool from bokeh.plotting import figure, save, output_file -from bs4 import BeautifulSoup -from sqlalchemy.sql.expression import and_ # jwql imports -from jwql.database.database_interface import session, engine -from jwql.database.database_interface import NIRSpecTAQueryHistory, NIRSpecTAStats +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils import monitor_utils from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE from jwql.utils.logging_functions import log_info, log_fail from jwql.utils.utils import ensure_dir_exists, filesystem_path, get_config +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + from jwql.website.apps.jwql.monitor_models.ta import ( + NIRSpecMsataStats, + NIRSpecTaQueryHistory, + ) # noqa: E402 (module level import not at top of file) + -class MSATA(): - """ Class for executing the NIRSpec MSATA monitor. +class MSATA: + """Class for executing the NIRSpec MSATA monitor. This class will search for new MSATA current files in the file systems for NIRSpec and will run the monitor on these files. The monitor will @@ -94,50 +109,241 @@ class MSATA(): """ def __init__(self): - """ Initialize an instance of the MSATA class """ + """Initialize an instance of the MSATA class""" # Very beginning of intake of images: Jan 28, 2022 == First JWST images (MIRI) self.query_very_beginning = 59607.0 + # Set instrument and aperture + self.instrument = "nirspec" + self.aperture = "NRS_FULL_MSA" + # dictionary to define required keywords to extract MSATA data and where it lives - self.keywds2extract = {'FILENAME': {'loc': 'main_hdr', 'alt_key': None, 'name': 'filename', 'type': str}, - 'DATE-BEG': {'loc': 'main_hdr', 'alt_key': None, 'name': 'date_obs', 'type': str}, - 'OBS_ID': {'loc': 'main_hdr', 'alt_key': None, 'name': 'visit_id', 'type': str}, - 'FILTER': {'loc': 'main_hdr', 'alt_key': 'FWA_POS', 'name': 'tafilter', 'type': str}, - 'DETECTOR': {'loc': 'main_hdr', 'alt_key': None, 'name': 'detector', 'type': str}, - 'READOUT': {'loc': 'main_hdr', 'alt_key': 'READPATT', 'name': 'readout', 'type': str}, - 'SUBARRAY': {'loc': 'main_hdr', 'alt_key': None, 'name': 'subarray', 'type': str}, - 'NUMREFST': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'num_refstars', 'type': int}, - 'TASTATUS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'ta_status', 'type': str}, - 'STAT_RSN': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'status_rsn', 'type': str}, - 'V2HFOFFS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'v2halffacet', 'type': float}, - 'V3HFOFFS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'v3halffacet', 'type': float}, - 'V2MSACTR': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'v2msactr', 'type': float}, - 'V3MSACTR': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'v3msactr', 'type': float}, - 'FITXOFFS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsv2offset', 'type': float}, - 'FITYOFFS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsv3offset', 'type': float}, - 'OFFSTMAG': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsoffsetmag', 'type': float}, - 'FITROFFS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsrolloffset', 'type': float}, - 'FITXSIGM': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsv2sigma', 'type': float}, - 'FITYSIGM': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsv3sigma', 'type': float}, - 'ITERATNS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'lsiterations', 'type': int}, - 'GUIDERID': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'guidestarid', 'type': str}, - 'IDEAL_X': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'guidestarx', 'type': float}, - 'IDEAL_Y': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'guidestary', 'type': float}, - 'IDL_ROLL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'guidestarroll', 'type': float}, - 'SAM_X': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'samx', 'type': float}, - 'SAM_Y': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'samy', 'type': float}, - 'SAM_ROLL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'samroll', 'type': float}, - 'box_peak_value': {'loc': 'ta_table', 'alt_key': None, 'name': 'box_peak_value', 'type': float}, - 'reference_star_mag': {'loc': 'ta_table', 'alt_key': None, 'name': 'reference_star_mag', 'type': float}, - 'convergence_status': {'loc': 'ta_table', 'alt_key': None, 'name': 'convergence_status', 'type': str}, - 'reference_star_number': {'loc': 'ta_table', 'alt_key': None, 'name': 'reference_star_number', 'type': int}, - 'lsf_removed_status': {'loc': 'ta_table', 'alt_key': None, 'name': 'lsf_removed_status', 'type': str}, - 'lsf_removed_reason': {'loc': 'ta_table', 'alt_key': None, 'name': 'lsf_removed_reason', 'type': str}, - 'lsf_removed_x': {'loc': 'ta_table', 'alt_key': None, 'name': 'lsf_removed_x', 'type': float}, - 'lsf_removed_y': {'loc': 'ta_table', 'alt_key': None, 'name': 'lsf_removed_y', 'type': float}, - 'planned_v2': {'loc': 'ta_table', 'alt_key': None, 'name': 'planned_v2', 'type': float}, - 'planned_v3': {'loc': 'ta_table', 'alt_key': None, 'name': 'planned_v3', 'type': float}, - 'FITTARGS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'stars_in_fit', 'type': int}} + self.keywds2extract = { + "FILENAME": { + "loc": "main_hdr", + "alt_key": None, + "name": "filename", + "type": str, + }, + "DATE-BEG": { + "loc": "main_hdr", + "alt_key": None, + "name": "date_obs", + "type": str, + }, + "OBS_ID": { + "loc": "main_hdr", + "alt_key": None, + "name": "visit_id", + "type": str, + }, + "FILTER": { + "loc": "main_hdr", + "alt_key": "FWA_POS", + "name": "tafilter", + "type": str, + }, + "DETECTOR": { + "loc": "main_hdr", + "alt_key": None, + "name": "detector", + "type": str, + }, + "READOUT": { + "loc": "main_hdr", + "alt_key": "READPATT", + "name": "readout", + "type": str, + }, + "SUBARRAY": { + "loc": "main_hdr", + "alt_key": None, + "name": "subarray", + "type": str, + }, + "NUMREFST": { + "loc": "ta_hdr", + "alt_key": None, + "name": "num_refstars", + "type": int, + }, + "TASTATUS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "ta_status", + "type": str, + }, + "STAT_RSN": { + "loc": "ta_hdr", + "alt_key": None, + "name": "status_rsn", + "type": str, + }, + "V2HFOFFS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "v2halffacet", + "type": float, + }, + "V3HFOFFS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "v3halffacet", + "type": float, + }, + "V2MSACTR": { + "loc": "ta_hdr", + "alt_key": None, + "name": "v2msactr", + "type": float, + }, + "V3MSACTR": { + "loc": "ta_hdr", + "alt_key": None, + "name": "v3msactr", + "type": float, + }, + "FITXOFFS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsv2offset", + "type": float, + }, + "FITYOFFS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsv3offset", + "type": float, + }, + "OFFSTMAG": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsoffsetmag", + "type": float, + }, + "FITROFFS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsrolloffset", + "type": float, + }, + "FITXSIGM": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsv2sigma", + "type": float, + }, + "FITYSIGM": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsv3sigma", + "type": float, + }, + "ITERATNS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "lsiterations", + "type": int, + }, + "GUIDERID": { + "loc": "ta_hdr", + "alt_key": None, + "name": "guidestarid", + "type": str, + }, + "IDEAL_X": { + "loc": "ta_hdr", + "alt_key": None, + "name": "guidestarx", + "type": float, + }, + "IDEAL_Y": { + "loc": "ta_hdr", + "alt_key": None, + "name": "guidestary", + "type": float, + }, + "IDL_ROLL": { + "loc": "ta_hdr", + "alt_key": None, + "name": "guidestarroll", + "type": float, + }, + "SAM_X": {"loc": "ta_hdr", "alt_key": None, "name": "samx", "type": float}, + "SAM_Y": {"loc": "ta_hdr", "alt_key": None, "name": "samy", "type": float}, + "SAM_ROLL": { + "loc": "ta_hdr", + "alt_key": None, + "name": "samroll", + "type": float, + }, + "box_peak_value": { + "loc": "ta_table", + "alt_key": None, + "name": "box_peak_value", + "type": float, + }, + "reference_star_mag": { + "loc": "ta_table", + "alt_key": None, + "name": "reference_star_mag", + "type": float, + }, + "convergence_status": { + "loc": "ta_table", + "alt_key": None, + "name": "convergence_status", + "type": str, + }, + "reference_star_number": { + "loc": "ta_table", + "alt_key": None, + "name": "reference_star_number", + "type": int, + }, + "lsf_removed_status": { + "loc": "ta_table", + "alt_key": None, + "name": "lsf_removed_status", + "type": str, + }, + "lsf_removed_reason": { + "loc": "ta_table", + "alt_key": None, + "name": "lsf_removed_reason", + "type": str, + }, + "lsf_removed_x": { + "loc": "ta_table", + "alt_key": None, + "name": "lsf_removed_x", + "type": float, + }, + "lsf_removed_y": { + "loc": "ta_table", + "alt_key": None, + "name": "lsf_removed_y", + "type": float, + }, + "planned_v2": { + "loc": "ta_table", + "alt_key": None, + "name": "planned_v2", + "type": float, + }, + "planned_v3": { + "loc": "ta_table", + "alt_key": None, + "name": "planned_v3", + "type": float, + }, + "FITTARGS": { + "loc": "ta_hdr", + "alt_key": None, + "name": "stars_in_fit", + "type": int, + }, + } # initialize attributes to be set later self.source = None @@ -147,7 +353,7 @@ def __init__(self): self.date_view = None def get_tainfo_from_fits(self, fits_file): - """ Get the TA information from the fits file + """Get the TA information from the fits file Parameters ---------- fits_file: str @@ -161,23 +367,23 @@ def get_tainfo_from_fits(self, fits_file): with fits.open(fits_file) as ff: # make sure this is a MSATA file for hdu in ff: - if 'MSA_TARG_ACQ' in hdu.name: + if "MSA_TARG_ACQ" in hdu.name: msata = True break if not msata: return None main_hdr = ff[0].header try: - ta_hdr = ff['MSA_TARG_ACQ'].header - ta_table = ff['MSA_TARG_ACQ'].data + ta_hdr = ff["MSA_TARG_ACQ"].header + ta_table = ff["MSA_TARG_ACQ"].data except KeyError: - no_ta_ext_msg = 'No TARG_ACQ extension in file '+fits_file + no_ta_ext_msg = "No TARG_ACQ extension in file " + fits_file return no_ta_ext_msg msata_info = [main_hdr, ta_hdr, ta_table] return msata_info def get_msata_data(self, new_filenames): - """ Get the TA information from the MSATA text table + """Get the TA information from the MSATA text table Parameters ---------- new_filenames: list @@ -200,28 +406,30 @@ def get_msata_data(self, new_filenames): main_hdr, ta_hdr, ta_table = msata_info file_data_dict, file_errs = {}, [] for key, key_dict in self.keywds2extract.items(): - key_name = key_dict['name'] + key_name = key_dict["name"] if key_name not in file_data_dict: file_data_dict[key_name] = [] ext = main_hdr - if key_dict['loc'] == 'ta_hdr': + if key_dict["loc"] == "ta_hdr": ext = ta_hdr - if key_dict['loc'] == 'ta_table': + if key_dict["loc"] == "ta_table": ext = ta_table try: val = ext[key] - if key == 'filename': + if key == "filename": val = fits_file except KeyError: - if key_dict['alt_key'] is not None: + if key_dict["alt_key"] is not None: try: - val = ext[key_dict['alt_key']] + val = ext[key_dict["alt_key"]] except (NameError, TypeError) as error: - msg = error+' in file '+fits_file + msg = error + " in file " + fits_file file_errs.append(msg) break else: - msg = 'Keyword '+key+' not found. Skipping file '+fits_file + msg = ( + "Keyword " + key + " not found. Skipping file " + fits_file + ) file_errs.append(msg) break """ UNCOMMENT THIS BLOCK IN CASE WE DO WANT TO GET RID OF the 999.0 values @@ -243,7 +451,9 @@ def get_msata_data(self, new_filenames): # if msata_dict is not empty then extend the lists else: for msata_dict_key in msata_dict: - msata_dict[msata_dict_key].extend(file_data_dict[msata_dict_key]) + msata_dict[msata_dict_key].extend( + file_data_dict[msata_dict_key] + ) else: no_ta_ext_msgs.extend(file_errs) # create the pandas dataframe @@ -252,19 +462,17 @@ def get_msata_data(self, new_filenames): def add_time_column(self): """Add time column to data source, to be used by all plots.""" - date_obs = self.source.data['date_obs'] - if 'time_arr' not in self.source.data: - time_arr = [] - for do_str in date_obs: - # convert time string into an array of time (this is in UT) - t = datetime.fromisoformat(do_str) - time_arr.append(t) + date_obs = self.source.data["date_obs"].astype(str) + time_arr = [self.add_timezone(do_str) for do_str in date_obs] + self.source.data["time_arr"] = time_arr - # add to the bokeh data structure - self.source.data["time_arr"] = time_arr + def add_timezone(self, date_str): + """Method to bypass timezone warning from Django""" + dt_timezone = parser.parse(date_str).replace(tzinfo=timezone.utc) + return dt_timezone def plt_status(self): - """ Plot the MSATA status versus time. + """Plot the MSATA status versus time. Parameters ---------- None @@ -272,44 +480,58 @@ def plt_status(self): ------- plot: bokeh plot object """ - ta_status = self.source.data['ta_status'] + ta_status = self.source.data["ta_status"] # check if this column exists in the data already (the other 2 will exist too), else create it - if 'bool_status' not in self.source.data: + if "bool_status" not in self.source.data: # bokeh does not like to plot strings, turn into numbers number_status, status_colors = [], [] for tas in ta_status: - if tas.lower() == 'unsuccessful': + if tas.lower() == "unsuccessful": number_status.append(0.0) - status_colors.append('red') - elif 'progress' in tas.lower(): + status_colors.append("red") + elif "progress" in tas.lower(): number_status.append(0.5) - status_colors.append('gray') + status_colors.append("gray") else: number_status.append(1.0) - status_colors.append('blue') + status_colors.append("blue") # add these to the bokeh data structure self.source.data["number_status"] = number_status self.source.data["status_colors"] = status_colors # create a new bokeh plot - plot = figure(title="MSATA Status [Success=1, In Progress=0.5, Fail=0]", x_axis_label='Time', - y_axis_label='MSATA Status', x_axis_type='datetime',) + plot = figure( + title="MSATA Status [Success=1, In Progress=0.5, Fail=0]", + x_axis_label="Time", + y_axis_label="MSATA Status", + x_axis_type="datetime", + ) plot.y_range = Range1d(-0.5, 1.5) - plot.circle(x='time_arr', y='number_status', source=self.source, - color='status_colors', size=7, fill_alpha=0.3, view=self.date_view) + plot.scatter( + marker="circle", + x="time_arr", + y="number_status", + source=self.source, + color="status_colors", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -318,7 +540,7 @@ def plt_status(self): return plot def plt_residual_offsets(self): - """ Plot the residual Least Squares V2 and V3 offsets + """Plot the residual Least Squares V2 and V3 offsets Parameters ---------- None @@ -327,41 +549,72 @@ def plt_residual_offsets(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares Residual V2-V3 Offsets", - x_axis_label='Least Squares Residual V2 Offset', - y_axis_label='Least Squares Residual V3 Offset') - plot.circle(x='lsv2offset', y='lsv3offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) - - v2halffacet, v3halffacet = self.source.data['v2halffacet'], self.source.data['v3halffacet'] + plot = figure( + title="MSATA Least Squares Residual V2-V3 Offsets", + x_axis_label="Least Squares Residual V2 Offset", + y_axis_label="Least Squares Residual V3 Offset", + ) + plot.scatter( + marker="circle", + x="lsv2offset", + y="lsv3offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) + + v2halffacet, v3halffacet = ( + self.source.data["v2halffacet"], + self.source.data["v3halffacet"], + ) xstart, ystart, ray_length = -1 * v2halffacet[0], -1 * v3halffacet[0], 0.05 - plot.ray(x=xstart - ray_length / 2.0, y=ystart, length=ray_length, angle_units="deg", - angle=0, line_color='purple', line_width=3) - plot.ray(x=xstart, y=ystart - ray_length / 2.0, length=ray_length, angle_units="deg", - angle=90, line_color='purple', line_width=3) - hflabel = Label(x=xstart / 3.0, y=ystart, y_units='data', text='-V2, -V3 half-facets values') + plot.ray( + x=xstart - ray_length / 2.0, + y=ystart, + length=ray_length, + angle_units="deg", + angle=0, + line_color="purple", + line_width=3, + ) + plot.ray( + x=xstart, + y=ystart - ray_length / 2.0, + length=ray_length, + angle_units="deg", + angle=90, + line_color="purple", + line_width=3, + ) + hflabel = Label( + x=xstart / 3.0, y=ystart, y_units="data", text="-V2, -V3 half-facets values" + ) plot.add_layout(hflabel) plot.x_range = Range1d(-0.5, 0.5) plot.y_range = Range1d(-0.5, 0.5) # mark origin lines - vline = Span(location=0, dimension='height', line_color='black', line_width=0.7) - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + vline = Span(location=0, dimension="height", line_color="black", line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([vline, hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -370,7 +623,7 @@ def plt_residual_offsets(self): return plot def plt_v2offset_time(self): - """ Plot the residual V2 versus time + """Plot the residual V2 versus time Parameters ---------- None @@ -379,33 +632,60 @@ def plt_v2offset_time(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares V2 Offset vs Time", x_axis_label='Time', - y_axis_label='Least Squares Residual V2 Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='lsv2offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares V2 Offset vs Time", + x_axis_label="Time", + y_axis_label="Least Squares Residual V2 Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsv2offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.5, 0.5) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) - time_arr, v2halffacet = self.source.data['time_arr'], self.source.data['v2halffacet'] - hfline = Span(location=-1 * v2halffacet[0], dimension='width', line_color='green', line_width=3) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) + time_arr, v2halffacet = ( + self.source.data["time_arr"], + self.source.data["v2halffacet"], + ) + hfline = Span( + location=-1 * v2halffacet[0], + dimension="width", + line_color="green", + line_width=3, + ) plot.renderers.extend([hline, hfline]) - hflabel = Label(x=time_arr[-1], y=-1 * v2halffacet[0], y_units='data', text='-V2 half-facet value') + hflabel = Label( + x=time_arr[-1], + y=-1 * v2halffacet[0], + y_units="data", + text="-V2 half-facet value", + ) plot.add_layout(hflabel) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -414,7 +694,7 @@ def plt_v2offset_time(self): return plot def plt_v3offset_time(self): - """ Plot the residual V3 versus time + """Plot the residual V3 versus time Parameters ---------- None @@ -423,33 +703,60 @@ def plt_v3offset_time(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares V3 Offset vs Time", x_axis_label='Time', - y_axis_label='Least Squares Residual V3 Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='lsv3offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares V3 Offset vs Time", + x_axis_label="Time", + y_axis_label="Least Squares Residual V3 Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsv3offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.5, 0.5) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) - time_arr, v3halffacet = self.source.data['time_arr'], self.source.data['v3halffacet'] - hfline = Span(location=-1 * v3halffacet[0], dimension='width', line_color='green', line_width=3) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) + time_arr, v3halffacet = ( + self.source.data["time_arr"], + self.source.data["v3halffacet"], + ) + hfline = Span( + location=-1 * v3halffacet[0], + dimension="width", + line_color="green", + line_width=3, + ) plot.renderers.extend([hline, hfline]) - hflabel = Label(x=time_arr[-1], y=-1 * v3halffacet[0], y_units='data', text='-V3 half-facet value') + hflabel = Label( + x=time_arr[-1], + y=-1 * v3halffacet[0], + y_units="data", + text="-V3 half-facet value", + ) plot.add_layout(hflabel) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -458,7 +765,7 @@ def plt_v3offset_time(self): return plot def plt_lsv2v3offsetsigma(self): - """ Plot the residual Least Squares V2 and V3 sigma offsets + """Plot the residual Least Squares V2 and V3 sigma offsets Parameters ---------- None @@ -467,34 +774,46 @@ def plt_lsv2v3offsetsigma(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares Residual V2-V3 Sigma Offsets", - x_axis_label='Least Squares Residual V2 Sigma Offset', - y_axis_label='Least Squares Residual V3 Sigma Offset') - plot.circle(x='lsv2sigma', y='lsv3sigma', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares Residual V2-V3 Sigma Offsets", + x_axis_label="Least Squares Residual V2 Sigma Offset", + y_axis_label="Least Squares Residual V3 Sigma Offset", + ) + plot.scatter( + marker="circle", + x="lsv2sigma", + y="lsv3sigma", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.x_range = Range1d(-0.1, 0.1) plot.y_range = Range1d(-0.1, 0.1) # mark origin lines - vline = Span(location=0, dimension='height', line_color='black', line_width=0.7) - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + vline = Span(location=0, dimension="height", line_color="black", line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([vline, hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V2 sigma', '@lsv2sigma'), - ('LS V3 offset', '@lsv3offset'), - ('LS V3 sigma', '@lsv3sigma'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V2 sigma", "@lsv2sigma"), + ("LS V3 offset", "@lsv3offset"), + ("LS V3 sigma", "@lsv3sigma"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -503,7 +822,7 @@ def plt_lsv2v3offsetsigma(self): return plot def plt_res_offsets_corrected(self): - """ Plot the residual Least Squares V2 and V3 offsets corrected by the half-facet + """Plot the residual Least Squares V2 and V3 offsets corrected by the half-facet Parameters ---------- None @@ -511,11 +830,17 @@ def plt_res_offsets_corrected(self): ------- plot: bokeh plot object """ - lsv2offset, lsv3offset = self.source.data['lsv2offset'], self.source.data['lsv3offset'] - v2halffacet, v3halffacet = self.source.data['v2halffacet'], self.source.data['v3halffacet'] + lsv2offset, lsv3offset = ( + self.source.data["lsv2offset"], + self.source.data["lsv3offset"], + ) + v2halffacet, v3halffacet = ( + self.source.data["v2halffacet"], + self.source.data["v3halffacet"], + ) # check if this column exists in the data already, else create it - if 'v2_half_fac_corr' not in self.source.data: + if "v2_half_fac_corr" not in self.source.data: v2_half_fac_corr, v3_half_fac_corr = [], [] for idx, v2hf in enumerate(v2halffacet): v3hf = v3halffacet[idx] @@ -527,41 +852,69 @@ def plt_res_offsets_corrected(self): self.source.data["v3_half_fac_corr"] = v3_half_fac_corr # create a new bokeh plot - plot = figure(title="MSATA Least Squares Residual V2-V3 Offsets Half-facet corrected", - x_axis_label='Least Squares Residual V2 Offset + half-facet', - y_axis_label='Least Squares Residual V3 Offset + half-facet') - plot.circle(x='v2_half_fac_corr', y='v3_half_fac_corr', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares Residual V2-V3 Offsets Half-facet corrected", + x_axis_label="Least Squares Residual V2 Offset + half-facet", + y_axis_label="Least Squares Residual V3 Offset + half-facet", + ) + plot.scatter( + marker="circle", + x="v2_half_fac_corr", + y="v3_half_fac_corr", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.x_range = Range1d(-0.5, 0.5) plot.y_range = Range1d(-0.5, 0.5) # mark origin lines - vline = Span(location=0, dimension='height', line_color='black', line_width=0.7) - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + vline = Span(location=0, dimension="height", line_color="black", line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([vline, hline]) xstart, ystart, ray_length = -1 * v2halffacet[0], -1 * v3halffacet[0], 0.05 - plot.ray(x=xstart - ray_length / 2.0, y=ystart, length=ray_length, angle_units="deg", - angle=0, line_color='purple', line_width=3) - plot.ray(x=xstart, y=ystart - ray_length / 2.0, length=ray_length, angle_units="deg", - angle=90, line_color='purple', line_width=3) - hflabel = Label(x=xstart / 3.0, y=ystart, y_units='data', text='-V2, -V3 half-facets values') + plot.ray( + x=xstart - ray_length / 2.0, + y=ystart, + length=ray_length, + angle_units="deg", + angle=0, + line_color="purple", + line_width=3, + ) + plot.ray( + x=xstart, + y=ystart - ray_length / 2.0, + length=ray_length, + angle_units="deg", + angle=90, + line_color="purple", + line_width=3, + ) + hflabel = Label( + x=xstart / 3.0, y=ystart, y_units="data", text="-V2, -V3 half-facets values" + ) plot.add_layout(hflabel) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('V2 half-facet', '@v2halffacet'), - ('V3 half-facet', '@v3halffacet'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("V2 half-facet", "@v2halffacet"), + ("V3 half-facet", "@v3halffacet"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -577,28 +930,42 @@ def plt_v2offsigma_time(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares V2 Sigma Offset vs Time", x_axis_label='Time', - y_axis_label='Least Squares Residual V2 Sigma Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='lsv2sigma', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares V2 Sigma Offset vs Time", + x_axis_label="Time", + y_axis_label="Least Squares Residual V2 Sigma Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsv2sigma", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.1, 0.1) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS V2 offset', '@lsv2offset'), - ('LS V2 sigma', '@lsv2sigma'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS V2 offset", "@lsv2offset"), + ("LS V2 sigma", "@lsv2sigma"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -614,31 +981,43 @@ def plt_v3offsigma_time(self): p: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares V3 Sigma Offset vs Time", - x_axis_label='Time', - y_axis_label='Least Squares Residual V3 Sigma Offset', - x_axis_type='datetime') - plot.circle(x='time_arr', y='lsv3sigma', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares V3 Sigma Offset vs Time", + x_axis_label="Time", + y_axis_label="Least Squares Residual V3 Sigma Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsv3sigma", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.1, 0.1) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V3 offset', '@lsv3offset'), - ('LS V3 sigma', '@lsv3sigma'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V3 offset", "@lsv3offset"), + ("LS V3 sigma", "@lsv3sigma"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -647,7 +1026,7 @@ def plt_v3offsigma_time(self): return plot def plt_roll_offset(self): - """ Plot the residual Least Squares roll Offset versus time + """Plot the residual Least Squares roll Offset versus time Parameters ---------- None @@ -656,36 +1035,54 @@ def plt_roll_offset(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares Roll Offset vs Time", x_axis_label='Time', - y_axis_label='Least Squares Residual Roll Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='lsrolloffset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares Roll Offset vs Time", + x_axis_label="Time", + y_axis_label="Least Squares Residual Roll Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsrolloffset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-600.0, 600.0) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) # Maximum accepted roll line and label - time_arr = self.source.data['time_arr'] - arlinepos = Span(location=120, dimension='width', line_color='green', line_width=3) - arlineneg = Span(location=-120, dimension='width', line_color='green', line_width=3) - arlabel = Label(x=time_arr[-1], y=125, y_units='data', text='Max accepted roll') + time_arr = self.source.data["time_arr"] + arlinepos = Span( + location=120, dimension="width", line_color="green", line_width=3 + ) + arlineneg = Span( + location=-120, dimension="width", line_color="green", line_width=3 + ) + arlabel = Label(x=time_arr[-1], y=125, y_units="data", text="Max accepted roll") plot.add_layout(arlabel) plot.renderers.extend([hline, arlinepos, arlineneg]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -694,7 +1091,7 @@ def plt_roll_offset(self): return plot def plt_lsoffsetmag(self): - """ Plot the residual Least Squares Total Slew Magnitude Offset versus time + """Plot the residual Least Squares Total Slew Magnitude Offset versus time Parameters ---------- None @@ -703,30 +1100,44 @@ def plt_lsoffsetmag(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="MSATA Least Squares Total Magnitude of the Linear V2, V3 Offset Slew vs Time", x_axis_label='Time', - y_axis_label='sqrt((V2_off)**2 + (V3_off)**2)', x_axis_type='datetime') - plot.circle(x='time_arr', y='lsoffsetmag', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="MSATA Least Squares Total Magnitude of the Linear V2, V3 Offset Slew vs Time", + x_axis_label="Time", + y_axis_label="sqrt((V2_off)**2 + (V3_off)**2)", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="lsoffsetmag", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.5, 0.5) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('LS roll offset', '@lsrolloffset'), - ('LS slew mag offset', '@lsoffsetmag'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("LS roll offset", "@lsrolloffset"), + ("LS slew mag offset", "@lsoffsetmag"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -735,7 +1146,7 @@ def plt_lsoffsetmag(self): return plot def plt_tot_number_of_stars(self): - """ Plot the total number of stars used versus time + """Plot the total number of stars used versus time Parameters ---------- None @@ -744,18 +1155,18 @@ def plt_tot_number_of_stars(self): plot: bokeh plot object """ # get the number of stars per array - visit_id = self.source.data['visit_id'] - reference_star_number = self.source.data['reference_star_number'] + visit_id = self.source.data["visit_id"] + reference_star_number = self.source.data["reference_star_number"] # check if this column exists in the data already, else create it - if 'tot_number_of_stars' not in self.source.data: + if "tot_number_of_stars" not in self.source.data: # create the list of color per visit and tot_number_of_stars colors_list, tot_number_of_stars = [], [] color_dict = {} for i, vid in enumerate(visit_id): tot_stars = len(reference_star_number[i]) tot_number_of_stars.append(tot_stars) - ci = '#%06X' % randint(0, 0xFFFFFF) + ci = "#%06X" % randint(0, 0xFFFFFF) if vid not in color_dict: color_dict[vid] = ci colors_list.append(color_dict[vid]) @@ -765,35 +1176,57 @@ def plt_tot_number_of_stars(self): self.source.data["colors_list"] = colors_list # create a new bokeh plot - plot = figure(title="Total Number of Measurements vs Time", x_axis_label='Time', - y_axis_label='Total number of measurements', x_axis_type='datetime') - plot.circle(x='time_arr', y='tot_number_of_stars', source=self.source, - color='colors_list', size=7, fill_alpha=0.3, view=self.date_view) - plot.triangle(x='time_arr', y='stars_in_fit', source=self.source, - color='black', size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="Total Number of Measurements vs Time", + x_axis_label="Time", + y_axis_label="Total number of measurements", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="tot_number_of_stars", + source=self.source, + color="colors_list", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) + plot.scatter( + marker="triangle", + x="time_arr", + y="stars_in_fit", + source=self.source, + color="black", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(0.0, 40.0) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('Detector', '@detector'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Subarray', '@subarray'), - ('Stars in fit', '@stars_in_fit'), - ('LS roll offset', '@lsrolloffset'), - ('LS slew mag offset', '@lsoffsetmag'), - ('LS V2 offset', '@lsv2offset'), - ('LS V3 offset', '@lsv3offset'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("Detector", "@detector"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Subarray", "@subarray"), + ("Stars in fit", "@stars_in_fit"), + ("LS roll offset", "@lsrolloffset"), + ("LS slew mag offset", "@lsoffsetmag"), + ("LS V2 offset", "@lsv2offset"), + ("LS V3 offset", "@lsv3offset"), + ("--------", "----------------"), + ] plot.add_tools(hover) return plot def plt_mags_time(self): - """ Plot the star magnitudes versus time + """Plot the star magnitudes versus time Parameters ---------- source: bokeh data object for plotting @@ -801,19 +1234,19 @@ def plt_mags_time(self): ------- plot: bokeh plot object """ - visit_id = self.source.data['visit_id'] - lsf_removed_status = self.source.data['lsf_removed_status'] - lsf_removed_reason = self.source.data['lsf_removed_reason'] - lsf_removed_x = self.source.data['lsf_removed_x'] - lsf_removed_y = self.source.data['lsf_removed_y'] - planned_v2 = self.source.data['planned_v2'] - planned_v3 = self.source.data['planned_v3'] - reference_star_number = self.source.data['reference_star_number'] - box_peak_value = self.source.data['box_peak_value'] - date_obs, time_arr = self.source.data['date_obs'], self.source.data['time_arr'] - colors_list = self.source.data['colors_list'] - detector_list = self.source.data['detector'] - filename = self.source.data['filename'] + visit_id = self.source.data["visit_id"] + lsf_removed_status = self.source.data["lsf_removed_status"] + lsf_removed_reason = self.source.data["lsf_removed_reason"] + lsf_removed_x = self.source.data["lsf_removed_x"] + lsf_removed_y = self.source.data["lsf_removed_y"] + planned_v2 = self.source.data["planned_v2"] + planned_v3 = self.source.data["planned_v3"] + reference_star_number = self.source.data["reference_star_number"] + box_peak_value = self.source.data["box_peak_value"] + date_obs, time_arr = self.source.data["date_obs"], self.source.data["time_arr"] + colors_list = self.source.data["colors_list"] + detector_list = self.source.data["detector"] + filename = self.source.data["filename"] # create the structure matching the number of visits and reference stars new_colors_list, vid, dobs, tarr, star_no, status = [], [], [], [], [], [] @@ -827,8 +1260,8 @@ def plt_mags_time(self): c.append(colors_list[i]) dt.append(detector_list[i]) fn.append(filename[i]) - if 'not_removed' in lsf_removed_status[i][j]: - s.append('SUCCESS') + if "not_removed" in lsf_removed_status[i][j]: + s.append("SUCCESS") x.append(planned_v2[i][j]) y.append(planned_v3[i][j]) else: @@ -848,35 +1281,62 @@ def plt_mags_time(self): fnames.extend(fn) # now create the mini ColumnDataSource for this particular plot - mini_source = {'vid': vid, 'star_no': star_no, 'status': status, - 'dobs': dobs, 'time_arr': tarr, 'det': det, 'fname': fnames, - 'peaks': peaks, 'colors_list': new_colors_list, - 'stars_v2': stars_v2, 'stars_v3': stars_v3} + mini_source = { + "vid": vid, + "star_no": star_no, + "status": status, + "dobs": dobs, + "time_arr": tarr, + "det": det, + "fname": fnames, + "peaks": peaks, + "colors_list": new_colors_list, + "stars_v2": stars_v2, + "stars_v3": stars_v3, + } mini_source = ColumnDataSource(data=mini_source) # hook up the date range slider to this source as well - callback = CustomJS(args=dict(s=mini_source), code=""" + callback = CustomJS( + args=dict(s=mini_source), + code=""" s.change.emit(); - """) - self.date_range.js_on_change('value', callback) - mini_view = CDSView(source=mini_source, filters=[self.date_filter]) + """, + ) + self.date_range.js_on_change("value", callback) + mini_view = CDSView(filter=self.date_filter) # create the bokeh plot - plot = figure(title="MSATA Counts vs Time", x_axis_label='Time', - y_axis_label='box_peak [Counts]', - x_axis_type='datetime') - plot.circle(x='time_arr', y='peaks', source=mini_source, - color='colors_list', size=7, fill_alpha=0.3, view=mini_view) + plot = figure( + title="MSATA Counts vs Time", + x_axis_label="Time", + y_axis_label="box_peak [Counts]", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="peaks", + source=mini_source, + color="colors_list", + size=7, + fill_alpha=0.3, + view=mini_view, + ) # add count saturation warning lines loc1, loc2, loc3 = 45000.0, 50000.0, 60000.0 - hline1 = Span(location=loc1, dimension='width', line_color='green', line_width=3) - hline2 = Span(location=loc2, dimension='width', line_color='yellow', line_width=3) - hline3 = Span(location=loc3, dimension='width', line_color='red', line_width=3) + hline1 = Span( + location=loc1, dimension="width", line_color="green", line_width=3 + ) + hline2 = Span( + location=loc2, dimension="width", line_color="yellow", line_width=3 + ) + hline3 = Span(location=loc3, dimension="width", line_color="red", line_width=3) plot.renderers.extend([hline1, hline2, hline3]) - label1 = Label(x=time_arr[-1], y=loc1, y_units='data', text='45000 counts') - label2 = Label(x=time_arr[-1], y=loc2, y_units='data', text='50000 counts') - label3 = Label(x=time_arr[-1], y=loc3, y_units='data', text='60000 counts') + label1 = Label(x=time_arr[-1], y=loc1, y_units="data", text="45000 counts") + label2 = Label(x=time_arr[-1], y=loc2, y_units="data", text="50000 counts") + label3 = Label(x=time_arr[-1], y=loc3, y_units="data", text="60000 counts") plot.add_layout(label1) plot.add_layout(label2) plot.add_layout(label3) @@ -884,16 +1344,18 @@ def plt_mags_time(self): # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@fname'), - ('Visit ID', '@vid'), - ('Detector', '@det'), - ('Star No.', '@star_no'), - ('LS Status', '@status'), - ('Date-Obs', '@dobs'), - ('Box peak', '@peaks'), - ('Measured V2', '@stars_v2'), - ('Measured V3', '@stars_v3'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@fname"), + ("Visit ID", "@vid"), + ("Detector", "@det"), + ("Star No.", "@star_no"), + ("LS Status", "@status"), + ("Date-Obs", "@dobs"), + ("Box peak", "@peaks"), + ("Measured V2", "@stars_v2"), + ("Measured V3", "@stars_v3"), + ("--------", "----------------"), + ] plot.add_tools(hover) return plot @@ -902,8 +1364,8 @@ def setup_date_range(self): """Set up a date range filter, defaulting to the last week of data.""" end_date = datetime.now(tz=timezone.utc) one_week_ago = end_date.date() - timedelta(days=7) - first_data_point = np.min(self.source.data['time_arr']).date() - last_data_point = np.max(self.source.data['time_arr']).date() + first_data_point = np.min(self.source.data["time_arr"]).date() + last_data_point = np.max(self.source.data["time_arr"]).date() if last_data_point < one_week_ago: # keep at least one point in the plot if there was # no TA data this week @@ -913,21 +1375,30 @@ def setup_date_range(self): # allowed range is from the first ever data point to today self.date_range = DateRangeSlider( - title="Date range displayed", start=first_data_point, - end=end_date, value=(start_date, end_date), step=1) - - callback = CustomJS(args=dict(s=self.source), code=""" + title="Date range displayed", + start=first_data_point, + end=end_date, + value=(start_date, end_date), + step=1, + ) + + callback = CustomJS( + args=dict(s=self.source), + code=""" s.change.emit(); - """) - self.date_range.js_on_change('value', callback) + """, + ) + self.date_range.js_on_change("value", callback) - self.date_filter = CustomJSFilter(args=dict(slider=self.date_range), code=""" + self.date_filter = CustomJSFilter( + args=dict(slider=self.date_range), + code=""" var indices = []; var start = slider.value[0]; var end = slider.value[1]; for (var i=0; i < source.get_length(); i++) { - if (source.data['time_arr'][i] >= start + if (source.data['time_arr'][i] >= start && source.data['time_arr'][i] <= end) { indices.push(true); } else { @@ -935,18 +1406,20 @@ def setup_date_range(self): } } return indices; - """) - self.date_view = CDSView(source=self.source, filters=[self.date_filter]) + """, + ) + self.date_view = CDSView(filter=self.date_filter) + + def mk_plt_layout(self, plot_data): + """Create the bokeh plot layout - def mk_plt_layout(self): - """Create the bokeh plot layout""" - self.source = ColumnDataSource(data=self.msata_data) + Parameters + ---------- + plot_data : pandas.DateFrame + Pandas data frame of data to plot. + """ - # make sure all arrays are lists in order to later be able to read the data - # from the html file - for item in self.source.data: - if not isinstance(self.source.data[item], (str, float, int, list)): - self.source.data[item] = self.source.data[item].tolist() + self.source = ColumnDataSource(data=plot_data) # add a time array to the data source self.add_time_column() @@ -958,7 +1431,6 @@ def mk_plt_layout(self): self.setup_date_range() # set the output html file name and create the plot grid - output_file(self.output_file_name) p1 = self.plt_status() p2 = self.plt_residual_offsets() p3 = self.plt_res_offsets_corrected() @@ -973,20 +1445,37 @@ def mk_plt_layout(self): p11 = self.plt_mags_time() # make grid - grid = gridplot([p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12], - ncols=2, merge_tools=False) + grid = gridplot( + [p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12], + ncols=2, + merge_tools=False, + ) box_layout = layout(children=[self.date_range, grid]) - save(box_layout) - # return the needed components for embedding the results in the MSATA html template - script, div = components(box_layout) - return script, div + self.script, self.div = components(box_layout) def identify_tables(self): """Determine which database tables to use for a run of the TA monitor.""" mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] - self.query_table = eval('{}TAQueryHistory'.format(mixed_case_name)) - self.stats_table = eval('{}TAStats'.format(mixed_case_name)) + self.query_table = eval("{}TaQueryHistory".format(mixed_case_name)) + self.stats_table = eval("{}MsataStats".format(mixed_case_name)) + + def file_exists_in_database(self, filename): + """Checks if an entry for filename exists in the MSATA stats + database. + + Parameters + ---------- + filename : str + The full path to the uncal filename. + + Returns + ------- + file_exists : bool + ``True`` if filename exists in the MSATA stats database. + """ + results = self.stats_table.objects.filter(filename__iexact=filename).values() + return len(results) != 0 def most_recent_search(self): """Query the query history database and return the information @@ -999,60 +1488,27 @@ def most_recent_search(self): Date (in MJD) of the ending range of the previous MAST query where the msata monitor was run. """ - query = session.query(self.query_table).filter(and_(self.query_table.aperture == self.aperture, - self.query_table.run_monitor == True)).order_by(self.query_table.end_time_mjd).all() - dates = np.zeros(0) - for instance in query: - dates = np.append(dates, instance.end_time_mjd) + filters = {"aperture__iexact": self.aperture, "run_monitor": True} + + record = ( + self.query_table.objects.filter(**filters).order_by("-end_time_mjd").first() + ) - query_count = len(dates) - if query_count == 0: + if record is None: query_result = self.query_very_beginning - logging.info(('\tNo query history for {}. Beginning search date will be set to {}.' - .format(self.aperture, self.query_very_beginning))) + logging.info( + ( + "\tNo query history for {}. Beginning search date will be set to {}.".format( + self.aperture, self.query_very_beginning + ) + ) + ) else: - query_result = np.max(dates) + query_result = record.end_time_mjd return query_result - def get_data_from_html(self, html_file): - """ - This function gets the data from the Bokeh html file created with - the NIRSpec TA monitor script. - Parameters - ---------- - html_file: str - File created by the monitor script - Returns - ------- - prev_data_dict: dict - Dictionary containing all data used in the plots - """ - - # open the html file and get the contents - htmlFileToBeOpened = open(html_file, "r") - contents = htmlFileToBeOpened.read() - soup = BeautifulSoup(contents, 'html.parser') - - # now read as python dictionary and search for the data - prev_data_dict = {} - html_data = json.loads(soup.find('script', type='application/json').string) - for key, val in html_data.items(): - if 'roots' in val: # this is a dictionary - if 'references' in val['roots']: - for item in val['roots']['references']: # this is a list - # each item of the list is a dictionary - for item_key, item_val in item.items(): - if 'data' in item_val: - # finally the data dictionary! - for data_key, data_val in item_val['data'].items(): - prev_data_dict[data_key] = data_val - # set to None if dictionary is empty - if not bool(prev_data_dict): - prev_data_dict = None - return prev_data_dict - def construct_expected_data(self, keywd_dict, tot_number_of_stars): """This function creates the list to append to the dictionary key in the expected format. Parameters @@ -1070,71 +1526,26 @@ def construct_expected_data(self, keywd_dict, tot_number_of_stars): val = -999 list4dict = [] # create either the list or return the right type of value - if keywd_dict['loc'] != 'ta_table': # these cases should be singe values per observation - if keywd_dict['type'] == float: + if ( + keywd_dict["loc"] != "ta_table" + ): # these cases should be singe values per observation + if keywd_dict["type"] == float: val = float(val) - if keywd_dict['type'] == str: + if keywd_dict["type"] == str: val = str(val) list4dict = val else: - for tns in tot_number_of_stars: # elements the list of lists should have + for tns in tot_number_of_stars: # elements the list of lists should have list2append = [] - for _ in range(tns): # elements each sublist should have - if keywd_dict['type'] == float: + for _ in range(tns): # elements each sublist should have + if keywd_dict["type"] == float: val = float(val) - if keywd_dict['type'] == str: + if keywd_dict["type"] == str: val = str(val) list2append.append(val) list4dict.append(list2append) return list4dict - def prev_data2expected_format(self, prev_data_dict): - """Add all the necessary columns to match expected format to combine previous - and new data. - Parameters - ---------- - prev_data_dict: dictionary - Dictionary containing all data used in the Bokeh html file plots - Returns - ------- - prev_data: pandas dataframe - Contains all expected columns to be combined with the new data - latest_prev_obs: str - Date of the latest observation in the previously plotted data - """ - # remember that the time array created is in milliseconds, removing to get time object - time_in_millis = max(prev_data_dict['time_arr']) - latest_prev_obs = Time(time_in_millis / 1000., format='unix') - latest_prev_obs = latest_prev_obs.mjd - prev_data_expected_cols = {} - tot_number_of_stars = prev_data_dict['tot_number_of_stars'] - for file_keywd, keywd_dict in self.keywds2extract.items(): - key = keywd_dict['name'] - if key in prev_data_dict: - # case when all the info of all visits and ref stars is in the same list - if len(prev_data_dict[key]) > len(tot_number_of_stars): - correct_arrangement = [] - correct_start_idx, correct_end_idx = 0, tot_number_of_stars[0] - for idx, tns in enumerate(tot_number_of_stars): - list2append = prev_data_dict[key][correct_start_idx: correct_end_idx] - correct_arrangement.append(list2append) - correct_start_idx = correct_end_idx - correct_end_idx += tns - prev_data_expected_cols[key] = correct_arrangement - # case when the html stored thing is just an object but does not have data - elif len(prev_data_dict[key]) < len(tot_number_of_stars): - list4dict = self.construct_expected_data(keywd_dict, tot_number_of_stars) - prev_data_expected_cols[key] = list4dict - # case when nothing special to do - else: - prev_data_expected_cols[key] = prev_data_dict[key] - else: - list4dict = self.construct_expected_data(keywd_dict, tot_number_of_stars) - prev_data_expected_cols[key] = list4dict - # now convert to a panda dataframe to be combined with the new data - prev_data = pd.DataFrame(prev_data_expected_cols) - return prev_data, latest_prev_obs - def pull_filenames(self, file_info): """Extract filenames from the list of file information returned from query_mast. @@ -1151,10 +1562,10 @@ def pull_filenames(self, file_info): """ files = [] for list_element in file_info: - if 'filename' in list_element: - files.append(list_element['filename']) - elif 'root_name' in list_element: - files.append(list_element['root_name']) + if "filename" in list_element: + files.append(list_element["filename"]) + elif "root_name" in list_element: + files.append(list_element["root_name"]) return files def get_uncal_names(self, file_list): @@ -1170,13 +1581,13 @@ def get_uncal_names(self, file_list): """ good_files = [] for filename in file_list: - if filename.endswith('.fits'): + if filename.endswith(".fits"): # MAST names look like: jw01133003001_02101_00001_nrs2_cal.fits - suffix2replace = filename.split('_')[-1] - filename = filename.replace(suffix2replace, 'uncal.fits') + suffix2replace = filename.split("_")[-1] + filename = filename.replace(suffix2replace, "uncal.fits") else: # rootnames look like: jw01133003001_02101_00001_nrs2 - filename += '_uncal.fits' + filename += "_uncal.fits" if filename not in good_files: good_files.append(filename) return good_files @@ -1194,20 +1605,30 @@ def update_ta_success_txtfile(self): # check if previous file exsists and read the data from it if os.path.isfile(output_success_ta_txtfile): # now rename the previous file, for backup - os.rename(output_success_ta_txtfile, os.path.join(self.output_dir, "prev_msata_success.txt")) + os.rename( + output_success_ta_txtfile, + os.path.join(self.output_dir, "prev_msata_success.txt"), + ) # get the new data ta_success, ta_inprogress, ta_failure = [], [], [] - filenames, ta_status = self.msata_data.loc[:,'filename'], self.msata_data.loc[:,'ta_status'] + filenames, ta_status = ( + self.msata_data.loc[:, "filename"], + self.msata_data.loc[:, "ta_status"], + ) for fname, ta_stat in zip(filenames, ta_status): # select the appropriate list to append to - if ta_stat == 'SUCCESSFUL': + if ta_stat == "SUCCESSFUL": ta_success.append(fname) - elif ta_stat == 'IN_PROGRESS': + elif ta_stat == "IN_PROGRESS": ta_inprogress.append(fname) else: ta_failure.append(fname) # find which one is the longest list (to make sure the other lists have the same length) - successes, inprogress, failures = len(ta_success), len(ta_inprogress), len(ta_failure) + successes, inprogress, failures = ( + len(ta_success), + len(ta_inprogress), + len(ta_failure), + ) longest_list = None if successes >= inprogress: longest_list = successes @@ -1222,178 +1643,212 @@ def update_ta_success_txtfile(self): for _ in range(remaining_items): ta_list.append("") # write the new output file - with open(output_success_ta_txtfile, 'w+') as txt: + with open(output_success_ta_txtfile, "w+") as txt: txt.write("# MSATA successes and failure file names \n") - filehdr1 = "# {} Total successful and {} total failed MSATA ".format(successes, failures) - filehdr2 = "# {:<50} {:<50} {:<50}".format("Successes", "In_Progress", "Failures") + filehdr1 = "# {} Total successful and {} total failed MSATA ".format( + successes, failures + ) + filehdr2 = "# {:<50} {:<50} {:<50}".format( + "Successes", "In_Progress", "Failures" + ) txt.write(filehdr1 + "\n") txt.write(filehdr2 + "\n") for idx, suc in enumerate(ta_success): - line = "{:<50} {:<50} {:<50}".format(suc, ta_inprogress[idx], ta_failure[idx]) + line = "{:<50} {:<50} {:<50}".format( + suc, ta_inprogress[idx], ta_failure[idx] + ) txt.write(line + "\n") - def read_existing_html(self): - """ - This function gets the data from the Bokeh html file created with - the NIRSpec TA monitor script. + def add_msata_data(self): + """Method to add MSATA data to stats database""" + # self.msata_data is a pandas dataframe. When creating the django model + # to store all of the MSATA data, this data was previously extracted and stored + # into a dataframe. To avoid rewriting self.get_msata_data(), it is easier to + # iterate over the rows of the returned dataframe and access the metadata this + # way. + for _, row in self.msata_data.iterrows(): + stats_db_entry = { + "filename": row["filename"], + "date_obs": self.add_timezone(row["date_obs"]), + "visit_id": row["visit_id"], + "tafilter": row["tafilter"], + "detector": row["detector"], + "readout": row["readout"], + "subarray": row["subarray"], + "num_refstars": row["num_refstars"], + "ta_status": row["ta_status"], + "v2halffacet": row["v2halffacet"], + "v3halffacet": row["v3halffacet"], + "v2msactr": row["v2msactr"], + "v3msactr": row["v3msactr"], + "lsv2offset": row["lsv2offset"], + "lsv3offset": row["lsv3offset"], + "lsoffsetmag": row["lsoffsetmag"], + "lsrolloffset": row["lsrolloffset"], + "lsv2sigma": row["lsv2sigma"], + "lsv3sigma": row["lsv3sigma"], + "lsiterations": row["lsiterations"], + "guidestarid": row["guidestarid"], + "guidestarx": row["guidestarx"], + "guidestary": row["guidestary"], + "guidestarroll": row["guidestarroll"], + "samx": row["samx"], + "samy": row["samy"], + "samroll": row["samroll"], + "box_peak_value": list(row["box_peak_value"]), + "reference_star_mag": list(row["reference_star_mag"]), + "convergence_status": list(row["convergence_status"]), + "reference_star_number": list(row["reference_star_number"]), + "lsf_removed_status": list(row["lsf_removed_status"]), + "lsf_removed_reason": list(row["lsf_removed_reason"]), + "lsf_removed_x": list(row["lsf_removed_x"]), + "lsf_removed_y": list(row["lsf_removed_y"]), + "planned_v2": list(row["planned_v2"]), + "planned_v3": list(row["planned_v3"]), + "stars_in_fit": row["stars_in_fit"], + "entry_date": datetime.now(tz=timezone.utc), + } + + entry = self.stats_table(**stats_db_entry) + entry.save() + + logging.info("\tNew entry added to MSATA stats database table") + + logging.info("\tUpdated the MSATA statistics table") + + def plots_for_app(self): + """Utility function to access div and script objects for + embedding bokeh in JWQL application. """ - self.output_dir = os.path.join(get_config()['outputs'], 'msata_monitor') - ensure_dir_exists(self.output_dir) - - self.output_file_name = os.path.join(self.output_dir, "msata_layout.html") - if not os.path.isfile(self.output_file_name): - return 'No MSATA data available', '', '' - - # open the html file and get the contents - with open(self.output_file_name, "r") as html_file: - contents = html_file.read() - - soup = BeautifulSoup(contents, 'html.parser').body - - # find the script elements - script1 = str(soup.find('script', type='text/javascript')) - script2 = str(soup.find('script', type='application/json')) - - # find the div element - div = str(soup.find('div', class_='bk-root')) - return div, script1, script2 + # Query results and convert into pandas df. + self.query_results = pd.DataFrame( + list(NIRSpecMsataStats.objects.all().values()) + ) + # Generate plot + self.mk_plt_layout(self.query_results) @log_fail @log_info def run(self): """The main method. See module docstrings for further details.""" - logging.info('Begin logging for msata_monitor') - - # define MSATA variables - self.instrument = "nirspec" - self.aperture = "NRS_FULL_MSA" + logging.info("Begin logging for msata_monitor") # Identify which database tables to use self.identify_tables() - # Get the output directory and setup a directory to store the data - self.output_dir = os.path.join(get_config()['outputs'], 'msata_monitor') - ensure_dir_exists(self.output_dir) - # Set up directory to store the data - ensure_dir_exists(os.path.join(self.output_dir, 'data')) - self.data_dir = os.path.join(self.output_dir, - 'data/{}_{}'.format(self.instrument.lower(), - self.aperture.lower())) - ensure_dir_exists(self.data_dir) - - # Locate the record of most recent MAST search; use this time + # Locate the record of most recent time the monitor was run self.query_start = self.most_recent_search() - # get the data of the plots previously created and set the query start date - self.prev_data = None - self.output_file_name = os.path.join(self.output_dir, "msata_layout.html") - logging.info('\tNew output plot file will be written as: {}'.format(self.output_file_name)) - if os.path.isfile(self.output_file_name): - prev_data_dict = self.get_data_from_html(self.output_file_name) - self.prev_data, self.query_start = self.prev_data2expected_format(prev_data_dict) - logging.info('\tPrevious data read from html file: {}'.format(self.output_file_name)) - # move this plot to a previous version - shutil.copyfile(self.output_file_name, os.path.join(self.output_dir, "prev_msata_layout.html")) - # fail save - start from the beginning if there is no html file - else: - self.query_start = self.query_very_beginning - logging.info('\tPrevious output html file not found. Starting MAST query from Jan 28, 2022 == First JWST images (MIRI)') # Use the current time as the end time for MAST query self.query_end = Time.now().mjd - logging.info('\tQuery times: {} {}'.format(self.query_start, self.query_end)) - - # Query for data using the aperture and the time of the - # most recent previous search as the starting time + logging.info("\tQuery times: {} {}".format(self.query_start, self.query_end)) - # via MAST: - # new_entries = monitor_utils.mast_query_ta( - # self.instrument, self.aperture, self.query_start, self.query_end) - - # via django model: - new_entries = monitor_utils.model_query_ta( - self.instrument, self.aperture, self.query_start, self.query_end) + # Obtain all entries with instrument/aperture combinations: + new_entries = monitor_utils.mast_query_ta( + self.instrument, self.aperture, self.query_start, self.query_end + ) msata_entries = len(new_entries) - logging.info('\tQuery has returned {} MSATA files for {}, {}.'.format(msata_entries, self.instrument, self.aperture)) + logging.info( + "\tQuery has returned {} MSATA files for {}, {}.".format( + msata_entries, self.instrument, self.aperture + ) + ) # Filter new entries to only keep uncal files new_entries = self.pull_filenames(new_entries) new_entries = self.get_uncal_names(new_entries) msata_entries = len(new_entries) - logging.info('\tThere are {} uncal TA files to run the MSATA monitor.'.format(msata_entries)) + logging.info( + "\tThere are {} uncal TA files to run the MSATA monitor.".format( + msata_entries + ) + ) - # Get full paths to the files + # Check if filenames RootFileInfo model are in data storgage new_filenames = [] for filename_of_interest in new_entries: - if (self.prev_data is not None - and filename_of_interest in self.prev_data['filename'].values): - logging.warning('\t\tFile {} already in previous data. Skipping.'.format(filename_of_interest)) + if self.file_exists_in_database(filename_of_interest): + logging.warning( + "\t\tFile {} in database already, passing.".format( + filename_of_interest + ) + ) continue - try: - new_filenames.append(filesystem_path(filename_of_interest)) - logging.warning('\tFile {} included for processing.'.format(filename_of_interest)) - except FileNotFoundError: - logging.warning('\t\tUnable to locate {} in filesystem. Not including in processing.'.format(filename_of_interest)) - + else: + try: + new_filenames.append(filesystem_path(filename_of_interest)) + logging.warning( + "\t\tFile {} included for processing.".format( + filename_of_interest + ) + ) + except FileNotFoundError: + logging.warning( + "\t\tUnable to locate {} in filesystem. Not including in processing.".format( + filename_of_interest + ) + ) + + # If there are no new files, monitor is skipped if len(new_filenames) == 0: - logging.warning('\t\t ** Unable to locate any file in filesystem. Nothing to process. ** ') - - # Run the monitor on any new files - logging.info('\tMSATA monitor found {} new uncal files.'.format(len(new_filenames))) - self.script, self.div, self.msata_data = None, None, None - monitor_run = False - if len(new_filenames) > 0: # new data was found - # get the data - self.new_msata_data, no_ta_ext_msgs = self.get_msata_data(new_filenames) + logging.info( + "\t\t ** Unable to locate any file in filesystem. Nothing to process. ** " + ) + logging.info("\tMSATA monitor skipped. No MSATA data found.") + monitor_run = False + else: + # Run the monitor on any new files + # self.wata_data is a pandas dataframe + self.msata_data, no_ta_ext_msgs = self.get_msata_data(new_filenames) + logging.info( + "\tMSATA monitor found {} new uncal files.".format(len(new_filenames)) + ) + if len(no_ta_ext_msgs) >= 1: for item in no_ta_ext_msgs: logging.info(item) - if self.new_msata_data is not None: - # concatenate with previous data - if self.prev_data is not None: - self.msata_data = pd.concat([self.prev_data, self.new_msata_data]) - logging.info('\tData from previous html output file and new data concatenated.') - else: - self.msata_data = self.new_msata_data - logging.info('\tOnly new data was found - no previous html file.') else: - logging.info('\tMSATA monitor skipped. No MSATA data found.') - # make sure to return the old data if no new data is found - elif self.prev_data is not None: - self.msata_data = self.prev_data - logging.info('\tNo new data found. Using data from previous html output file.') - # make the plots if there is data - if self.msata_data is not None: - self.script, self.div = self.mk_plt_layout() + logging.info("\tNo TA Ext Msgs Found") + + # Add MSATA data to stats table. + self.add_msata_data() + + # Once data is added to database table and plots are made, the + # monitor has run successfully. monitor_run = True - logging.info('\tOutput html plot file created: {}'.format(self.output_file_name)) - msata_files_used4plots = len(self.msata_data['visit_id']) - logging.info('\t{} MSATA files were used to make plots.'.format(msata_files_used4plots)) + + msata_files_used4plots = len(self.msata_data["visit_id"]) + logging.info( + "\t{} MSATA files were used to make plots.".format( + msata_files_used4plots + ) + ) # update the list of successful and failed TAs self.update_ta_success_txtfile() - logging.info('\tMSATA status file was updated') - else: - logging.info('\tMSATA monitor skipped.') + logging.info("\tMSATA status file was updated") # Update the query history - new_entry = {'instrument': 'nirspec', - 'aperture': self.aperture, - 'start_time_mjd': self.query_start, - 'end_time_mjd': self.query_end, - 'entries_found': msata_entries, - 'files_found': len(new_filenames), - 'run_monitor': monitor_run, - 'entry_date': datetime.now()} - with engine.begin() as connection: - connection.execute(self.query_table.__table__.insert(), new_entry) - logging.info('\tUpdated the query history table') - - logging.info('MSATA Monitor completed successfully.') - - -if __name__ == '__main__': - - module = os.path.basename(__file__).strip('.py') + new_entry = { + "instrument": "nirspec", + "aperture": self.aperture, + "start_time_mjd": self.query_start, + "end_time_mjd": self.query_end, + "entries_found": msata_entries, + "files_found": len(new_filenames), + "run_monitor": monitor_run, + "entry_date": datetime.now(tz=timezone.utc), + } + + entry = self.query_table(**new_entry) + entry.save() + logging.info("\tUpdated the query history table") + + logging.info("MSATA Monitor completed successfully.") + + +if __name__ == "__main__": + module = os.path.basename(__file__).strip(".py") start_time, log_file = monitor_utils.initialize_instrument_monitor(module) monitor = MSATA() diff --git a/jwql/instrument_monitors/nirspec_monitors/ta_monitors/wata_monitor.py b/jwql/instrument_monitors/nirspec_monitors/ta_monitors/wata_monitor.py index f248a1d09..ebcb3a269 100755 --- a/jwql/instrument_monitors/nirspec_monitors/ta_monitors/wata_monitor.py +++ b/jwql/instrument_monitors/nirspec_monitors/ta_monitors/wata_monitor.py @@ -7,7 +7,7 @@ # Sep 2022 - Vr. 1.1: Modified ColumnDataSource so that data could be recovered # from an html file of a previous run of the monitor and # included the code to read and format the data from the html file - +# Apr 2024 - Vr. 1.2: Removed html webscraping and now store data in django models """ This module contains the code for the NIRSpec Wide Aperture Target @@ -21,6 +21,7 @@ ______ - Maria Pena-Guerrero - Melanie Clarke + - Mees Fix Use --- @@ -29,13 +30,11 @@ """ - # general imports -import json import os import logging -import shutil from datetime import datetime, timezone, timedelta +from dateutil import parser import numpy as np import pandas as pd @@ -45,24 +44,45 @@ from bokeh.io import output_file from bokeh.layouts import gridplot, layout from bokeh.models import ( - ColumnDataSource, Range1d, CustomJS, CustomJSFilter, CDSView, - Span, Label, DateRangeSlider) + ColumnDataSource, + Range1d, + CustomJS, + CustomJSFilter, + CDSView, + Span, + Label, + DateRangeSlider, +) from bokeh.models.tools import HoverTool, BoxSelectTool from bokeh.plotting import figure, save -from bs4 import BeautifulSoup -from sqlalchemy.sql.expression import and_ # jwql imports +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.logging_functions import log_info, log_fail from jwql.utils import monitor_utils from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE -from jwql.database.database_interface import session, engine -from jwql.database.database_interface import NIRSpecTAQueryHistory, NIRSpecTAStats -from jwql.utils.utils import ensure_dir_exists, filesystem_path, get_config, filename_parser +from jwql.utils.utils import ( + ensure_dir_exists, + filesystem_path, + get_config, +) + + +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + from jwql.website.apps.jwql.monitor_models.ta import ( + NIRSpecWataStats, + NIRSpecTaQueryHistory, + ) # noqa: E402 (module level import not at top of file) -class WATA(): - """ Class for executing the NIRSpec WATA monitor. + +class WATA: + """Class for executing the NIRSpec WATA monitor. This class will search for new WATA current files in the file systems for NIRSpec and will run the monitor on these files. The monitor will @@ -89,48 +109,67 @@ class WATA(): """ def __init__(self): - """ Initialize an instance of the WATA class """ + """Initialize an instance of the WATA class""" + # define WATA variables + self.instrument = "nirspec" + self.aperture = "NRS_S1600A1_SLIT" + # Very beginning of intake of images: Jan 28, 2022 == First JWST images (MIRI) self.query_very_beginning = 59607.0 # structure to define required keywords to extract and where they live - self.keywds2extract = {'FILENAME': {'loc': 'main_hdr', 'alt_key': None, 'name': 'filename', 'type': str}, - 'DATE-BEG': {'loc': 'main_hdr', 'alt_key': None, 'name': 'date_obs'}, - 'OBS_ID': {'loc': 'main_hdr', 'alt_key': 'OBSID', 'name': 'visit_id'}, - 'FILTER': {'loc': 'main_hdr', 'alt_key': 'FWA_POS', 'name': 'tafilter'}, - 'READOUT': {'loc': 'main_hdr', 'alt_key': 'READPATT', 'name': 'readout'}, - 'TASTATUS': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'ta_status'}, - 'STAT_RSN': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'status_reason'}, - 'REFSTNAM': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_name'}, - 'REFSTRA': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_ra'}, - 'REFSTDEC': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_dec'}, - 'REFSTMAG': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_mag'}, - 'REFSTCAT': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_catalog'}, - 'V2_PLAND': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'planned_v2'}, - 'V3_PLAND': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'planned_v3'}, - 'EXTCOLST': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'stamp_start_col'}, - 'EXTROWST': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'stamp_start_row'}, - 'TA_DTCTR': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'star_detector'}, - 'BOXPKVAL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'max_val_box'}, - 'BOXPKCOL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'max_val_box_col'}, - 'BOXPKROW': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'max_val_box_row'}, - 'TA_ITERS': {'loc': 'ta_hdr', 'alt_key': 'CENITERS', 'name': 'iterations'}, - 'CORR_COL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'corr_col'}, - 'CORR_ROW': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'corr_row'}, - 'IMCENCOL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'stamp_final_col'}, - 'IMCENROW': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'stamp_final_row'}, - 'DTCENCOL': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'detector_final_col'}, - 'DTCENROW': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'detector_final_row'}, - 'SCIXCNTR': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'final_sci_x'}, - 'SCIYCNTR': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'final_sci_y'}, - 'TARGETV2': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'measured_v2'}, - 'TARGETV3': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'measured_v3'}, - 'V2_REF': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'ref_v2'}, - 'V3_REF': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'ref_v3'}, - 'V2_RESID': {'loc': 'ta_hdr', 'alt_key': 'V2_OFFST', 'name': 'v2_offset'}, - 'V3_RESID': {'loc': 'ta_hdr', 'alt_key': 'V3_OFFST', 'name': 'v3_offset'}, - 'SAM_X': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'sam_x'}, - 'SAM_Y': {'loc': 'ta_hdr', 'alt_key': None, 'name': 'sam_y'}} + self.keywds2extract = { + "FILENAME": { + "loc": "main_hdr", + "alt_key": None, + "name": "filename", + "type": str, + }, + "DATE-BEG": {"loc": "main_hdr", "alt_key": None, "name": "date_obs"}, + "OBS_ID": {"loc": "main_hdr", "alt_key": "OBSID", "name": "visit_id"}, + "FILTER": {"loc": "main_hdr", "alt_key": "FWA_POS", "name": "tafilter"}, + "READOUT": {"loc": "main_hdr", "alt_key": "READPATT", "name": "readout"}, + "TASTATUS": {"loc": "ta_hdr", "alt_key": None, "name": "ta_status"}, + "STAT_RSN": {"loc": "ta_hdr", "alt_key": None, "name": "status_reason"}, + "REFSTNAM": {"loc": "ta_hdr", "alt_key": None, "name": "star_name"}, + "REFSTRA": {"loc": "ta_hdr", "alt_key": None, "name": "star_ra"}, + "REFSTDEC": {"loc": "ta_hdr", "alt_key": None, "name": "star_dec"}, + "REFSTMAG": {"loc": "ta_hdr", "alt_key": None, "name": "star_mag"}, + "REFSTCAT": {"loc": "ta_hdr", "alt_key": None, "name": "star_catalog"}, + "V2_PLAND": {"loc": "ta_hdr", "alt_key": None, "name": "planned_v2"}, + "V3_PLAND": {"loc": "ta_hdr", "alt_key": None, "name": "planned_v3"}, + "EXTCOLST": {"loc": "ta_hdr", "alt_key": None, "name": "stamp_start_col"}, + "EXTROWST": {"loc": "ta_hdr", "alt_key": None, "name": "stamp_start_row"}, + "TA_DTCTR": {"loc": "ta_hdr", "alt_key": None, "name": "star_detector"}, + "BOXPKVAL": {"loc": "ta_hdr", "alt_key": None, "name": "max_val_box"}, + "BOXPKCOL": {"loc": "ta_hdr", "alt_key": None, "name": "max_val_box_col"}, + "BOXPKROW": {"loc": "ta_hdr", "alt_key": None, "name": "max_val_box_row"}, + "TA_ITERS": {"loc": "ta_hdr", "alt_key": "CENITERS", "name": "iterations"}, + "CORR_COL": {"loc": "ta_hdr", "alt_key": None, "name": "corr_col"}, + "CORR_ROW": {"loc": "ta_hdr", "alt_key": None, "name": "corr_row"}, + "IMCENCOL": {"loc": "ta_hdr", "alt_key": None, "name": "stamp_final_col"}, + "IMCENROW": {"loc": "ta_hdr", "alt_key": None, "name": "stamp_final_row"}, + "DTCENCOL": { + "loc": "ta_hdr", + "alt_key": None, + "name": "detector_final_col", + }, + "DTCENROW": { + "loc": "ta_hdr", + "alt_key": None, + "name": "detector_final_row", + }, + "SCIXCNTR": {"loc": "ta_hdr", "alt_key": None, "name": "final_sci_x"}, + "SCIYCNTR": {"loc": "ta_hdr", "alt_key": None, "name": "final_sci_y"}, + "TARGETV2": {"loc": "ta_hdr", "alt_key": None, "name": "measured_v2"}, + "TARGETV3": {"loc": "ta_hdr", "alt_key": None, "name": "measured_v3"}, + "V2_REF": {"loc": "ta_hdr", "alt_key": None, "name": "ref_v2"}, + "V3_REF": {"loc": "ta_hdr", "alt_key": None, "name": "ref_v3"}, + "V2_RESID": {"loc": "ta_hdr", "alt_key": "V2_OFFST", "name": "v2_offset"}, + "V3_RESID": {"loc": "ta_hdr", "alt_key": "V3_OFFST", "name": "v3_offset"}, + "SAM_X": {"loc": "ta_hdr", "alt_key": None, "name": "sam_x"}, + "SAM_Y": {"loc": "ta_hdr", "alt_key": None, "name": "sam_y"}, + } # initialize attributes to be set later self.source = None @@ -139,7 +178,7 @@ def __init__(self): self.date_view = None def get_tainfo_from_fits(self, fits_file): - """ Get the TA information from the fits file + """Get the TA information from the fits file Parameters ---------- fits_file: str @@ -154,22 +193,22 @@ def get_tainfo_from_fits(self, fits_file): with fits.open(fits_file) as ff: # make sure this is a WATA file for hdu in ff: - if 'TARG_ACQ' in hdu.name: + if "TARG_ACQ" in hdu.name: wata = True break if not wata: return None main_hdr = ff[0].header try: - ta_hdr = ff['TARG_ACQ'].header + ta_hdr = ff["TARG_ACQ"].header except KeyError: - no_ta_ext_msg = 'No TARG_ACQ extension in file '+fits_file + no_ta_ext_msg = "No TARG_ACQ extension in file " + fits_file return no_ta_ext_msg wata_info = [main_hdr, ta_hdr] return wata_info def get_wata_data(self, new_filenames): - """ Create the data array for the WATA input files + """Create the data array for the WATA input files Parameters ---------- new_filenames: list @@ -191,18 +230,18 @@ def get_wata_data(self, new_filenames): continue main_hdr, ta_hdr = wata_info for key, key_dict in self.keywds2extract.items(): - key_name = key_dict['name'] + key_name = key_dict["name"] if key_name not in wata_dict: wata_dict[key_name] = [] ext = main_hdr - if key_dict['loc'] == 'ta_hdr': + if key_dict["loc"] == "ta_hdr": ext = ta_hdr try: val = ext[key] - if key == 'filename': + if key == "filename": val = fits_file except KeyError: - val = ext[key_dict['alt_key']] + val = ext[key_dict["alt_key"]] wata_dict[key_name].append(val) # create the pandas dataframe wata_df = pd.DataFrame(wata_dict) @@ -210,19 +249,12 @@ def get_wata_data(self, new_filenames): def add_time_column(self): """Add time column to data source, to be used by all plots.""" - date_obs = self.source.data['date_obs'] - if 'time_arr' not in self.source.data: - time_arr = [] - for do_str in date_obs: - # convert time string into an array of time (this is in UT) - t = datetime.fromisoformat(do_str) - time_arr.append(t) - - # add to the bokeh data structure - self.source.data["time_arr"] = time_arr + date_obs = self.source.data["date_obs"].astype(str) + time_arr = [self.add_timezone(do_str) for do_str in date_obs] + self.source.data["time_arr"] = time_arr def plt_status(self): - """ Plot the WATA status (passed = 0 or failed = 1). + """Plot the WATA status (passed = 0 or failed = 1). Parameters ---------- None @@ -230,41 +262,55 @@ def plt_status(self): ------- plot: bokeh plot object """ - ta_status = self.source.data['ta_status'] + ta_status = self.source.data["ta_status"] # check if this column exists in the data already, else create it - if 'bool_status' not in self.source.data: + if "bool_status" not in self.source.data: # bokeh does not like to plot strings, turn into binary type bool_status, status_colors = [], [] for tas in ta_status: - if 'unsuccessful' not in tas.lower(): + if "unsuccessful" not in tas.lower(): bool_status.append(1) - status_colors.append('blue') + status_colors.append("blue") else: bool_status.append(0) - status_colors.append('red') + status_colors.append("red") # add these to the bokeh data structure self.source.data["ta_status_bool"] = bool_status self.source.data["status_colors"] = status_colors # create a new bokeh plot - plot = figure(title="WATA Status [Success=1, Fail=0]", x_axis_label='Time', - y_axis_label='WATA Status', x_axis_type='datetime',) + plot = figure( + title="WATA Status [Success=1, Fail=0]", + x_axis_label="Time", + y_axis_label="WATA Status", + x_axis_type="datetime", + ) plot.y_range = Range1d(-0.5, 1.5) - plot.circle(x='time_arr', y='ta_status_bool', source=self.source, - color='status_colors', size=7, fill_alpha=0.3, view=self.date_view) + plot.scatter( + marker="circle", + x="time_arr", + y="ta_status_bool", + source=self.source, + color="status_colors", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) # make tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Magnitude', '@star_mag'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Magnitude", "@star_mag"), + ("--------", "----------------"), + ] plot.add_tools(hover) @@ -274,7 +320,7 @@ def plt_status(self): return plot def plt_residual_offsets(self): - """ Plot the residual V2 and V3 offsets + """Plot the residual V2 and V3 offsets Parameters ---------- None @@ -283,28 +329,41 @@ def plt_residual_offsets(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="WATA Residual V2-V3 Offsets", x_axis_label='Residual V2 Offset', - y_axis_label='Residual V3 Offset') - plot.circle(x='v2_offset', y='v3_offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="WATA Residual V2-V3 Offsets", + x_axis_label="Residual V2 Offset", + y_axis_label="Residual V3 Offset", + ) + plot.scatter( + marker="circle", + x="v2_offset", + y="v3_offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.x_range = Range1d(-0.5, 0.5) plot.y_range = Range1d(-0.5, 0.5) # mark origin lines - vline = Span(location=0, dimension='height', line_color='black', line_width=0.7) - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + vline = Span(location=0, dimension="height", line_color="black", line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([vline, hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Magnitude', '@star_mag'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Magnitude", "@star_mag"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -313,7 +372,7 @@ def plt_residual_offsets(self): return plot def plt_v2offset_time(self): - """ Plot the residual V2 versus time + """Plot the residual V2 versus time Parameters ---------- None @@ -322,26 +381,40 @@ def plt_v2offset_time(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="WATA V2 Offset vs Time", x_axis_label='Time', - y_axis_label='Residual V2 Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='v2_offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="WATA V2 Offset vs Time", + x_axis_label="Time", + y_axis_label="Residual V2 Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="v2_offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.5, 0.5) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Magnitude', '@star_mag'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Magnitude", "@star_mag"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -350,7 +423,7 @@ def plt_v2offset_time(self): return plot def plt_v3offset_time(self): - """ Plot the residual V3 versus time + """Plot the residual V3 versus time Parameters ---------- None @@ -359,26 +432,40 @@ def plt_v3offset_time(self): plot: bokeh plot object """ # create a new bokeh plot - plot = figure(title="WATA V3 Offset vs Time", x_axis_label='Time', - y_axis_label='Residual V3 Offset', x_axis_type='datetime') - plot.circle(x='time_arr', y='v3_offset', source=self.source, - color="blue", size=7, fill_alpha=0.3, view=self.date_view) + plot = figure( + title="WATA V3 Offset vs Time", + x_axis_label="Time", + y_axis_label="Residual V3 Offset", + x_axis_type="datetime", + ) + plot.scatter( + marker="triangle", + x="time_arr", + y="v3_offset", + source=self.source, + color="blue", + size=7, + fill_alpha=0.3, + view=self.date_view, + ) plot.y_range = Range1d(-0.5, 0.5) # mark origin line - hline = Span(location=0, dimension='width', line_color='black', line_width=0.7) + hline = Span(location=0, dimension="width", line_color="black", line_width=0.7) plot.renderers.extend([hline]) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Magnitude', '@star_mag'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Magnitude", "@star_mag"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -387,7 +474,7 @@ def plt_v3offset_time(self): return plot def plt_mag_time(self): - """ Plot the star magnitude versus time + """Plot the star magnitude versus time Parameters ---------- None @@ -396,39 +483,45 @@ def plt_mag_time(self): plot: bokeh plot object """ # calculate the pseudo magnitudes - max_val_box, time_arr = self.source.data['max_val_box'], self.source.data['time_arr'] + max_val_box, time_arr = ( + self.source.data["max_val_box"], + self.source.data["time_arr"], + ) # check if this column exists in the data already, else create it if "nrsrapid_f140x" not in self.source.data: # create the arrays per filter and readout pattern nrsrapid_f140x, nrsrapid_f110w, nrsrapid_clear = [], [], [] nrsrapidd6_f140x, nrsrapidd6_f110w, nrsrapidd6_clear = [], [], [] - filter_used, readout = self.source.data['tafilter'], self.source.data['readout'] + filter_used, readout = ( + self.source.data["tafilter"], + self.source.data["readout"], + ) for i, val in enumerate(max_val_box): - if '140' in filter_used[i]: - if readout[i].lower() == 'nrsrapid': + if "140" in filter_used[i]: + if readout[i].lower() == "nrsrapid": nrsrapid_f140x.append(val) nrsrapid_f110w.append(np.NaN) nrsrapid_clear.append(np.NaN) nrsrapidd6_f140x.append(np.NaN) nrsrapidd6_f110w.append(np.NaN) nrsrapidd6_clear.append(np.NaN) - elif readout[i].lower() == 'nrsrapidd6': + elif readout[i].lower() == "nrsrapidd6": nrsrapid_f140x.append(np.NaN) nrsrapid_f110w.append(np.NaN) nrsrapid_clear.append(np.NaN) nrsrapidd6_f140x.append(val) nrsrapidd6_f110w.append(np.NaN) nrsrapidd6_clear.append(np.NaN) - elif '110' in filter_used[i]: - if readout[i].lower() == 'nrsrapid': + elif "110" in filter_used[i]: + if readout[i].lower() == "nrsrapid": nrsrapid_f140x.append(np.NaN) nrsrapid_f110w.append(val) nrsrapid_clear.append(np.NaN) nrsrapidd6_f140x.append(np.NaN) nrsrapidd6_f110w.append(np.NaN) nrsrapidd6_clear.append(np.NaN) - elif readout[i].lower() == 'nrsrapidd6': + elif readout[i].lower() == "nrsrapidd6": nrsrapid_f140x.append(np.NaN) nrsrapid_f110w.append(np.NaN) nrsrapid_clear.append(np.NaN) @@ -436,14 +529,14 @@ def plt_mag_time(self): nrsrapidd6_f110w.append(val) nrsrapidd6_clear.append(np.NaN) else: - if readout[i].lower() == 'nrsrapid': + if readout[i].lower() == "nrsrapid": nrsrapid_f140x.append(np.NaN) nrsrapid_f110w.append(np.NaN) nrsrapid_clear.append(val) nrsrapidd6_f140x.append(np.NaN) nrsrapidd6_f110w.append(np.NaN) nrsrapidd6_clear.append(np.NaN) - elif readout[i].lower() == 'nrsrapidd6': + elif readout[i].lower() == "nrsrapidd6": nrsrapid_f140x.append(np.NaN) nrsrapid_f110w.append(np.NaN) nrsrapid_clear.append(np.NaN) @@ -460,31 +553,87 @@ def plt_mag_time(self): self.source.data["nrsrapidd6_clear"] = nrsrapidd6_clear # create a new bokeh plot - plot = figure(title="WATA Counts vs Time", x_axis_label='Time', - y_axis_label='box_peak [Counts]', x_axis_type='datetime') - plot.circle(x='time_arr', y='nrsrapid_f140x', source=self.source, - color="purple", size=7, fill_alpha=0.4, view=self.date_view) - plot.circle(x='time_arr', y='nrsrapidd6_f140x', source=self.source, - color="purple", size=12, fill_alpha=0.4, view=self.date_view) - plot.triangle(x='time_arr', y='nrsrapid_f110w', source=self.source, - color="orange", size=8, fill_alpha=0.4, view=self.date_view) - plot.triangle(x='time_arr', y='nrsrapidd6_f110w', source=self.source, - color="orange", size=13, fill_alpha=0.4, view=self.date_view) - plot.square(x='time_arr', y='nrsrapid_clear', source=self.source, - color="gray", size=7, fill_alpha=0.4, view=self.date_view) - plot.square(x='time_arr', y='nrsrapidd6_clear', source=self.source, - color="gray", size=12, fill_alpha=0.4, view=self.date_view) + plot = figure( + title="WATA Counts vs Time", + x_axis_label="Time", + y_axis_label="box_peak [Counts]", + x_axis_type="datetime", + ) + plot.scatter( + marker="circle", + x="time_arr", + y="nrsrapid_f140x", + source=self.source, + color="purple", + size=7, + fill_alpha=0.4, + view=self.date_view, + ) + plot.scatter( + marker="circle", + x="time_arr", + y="nrsrapidd6_f140x", + source=self.source, + color="purple", + size=12, + fill_alpha=0.4, + view=self.date_view, + ) + plot.scatter( + marker="triangle", + x="time_arr", + y="nrsrapid_f110w", + source=self.source, + color="orange", + size=8, + fill_alpha=0.4, + view=self.date_view, + ) + plot.scatter( + marker="triangle", + x="time_arr", + y="nrsrapidd6_f110w", + source=self.source, + color="orange", + size=13, + fill_alpha=0.4, + view=self.date_view, + ) + plot.scatter( + marker="square", + x="time_arr", + y="nrsrapid_clear", + source=self.source, + color="gray", + size=7, + fill_alpha=0.4, + view=self.date_view, + ) + plot.scatter( + marker="square", + x="time_arr", + y="nrsrapidd6_clear", + source=self.source, + color="gray", + size=12, + fill_alpha=0.4, + view=self.date_view, + ) # add count saturation warning lines loc1, loc2, loc3 = 45000.0, 50000.0, 60000.0 - hline1 = Span(location=loc1, dimension='width', line_color='green', line_width=3) - hline2 = Span(location=loc2, dimension='width', line_color='yellow', line_width=3) - hline3 = Span(location=loc3, dimension='width', line_color='red', line_width=3) + hline1 = Span( + location=loc1, dimension="width", line_color="green", line_width=3 + ) + hline2 = Span( + location=loc2, dimension="width", line_color="yellow", line_width=3 + ) + hline3 = Span(location=loc3, dimension="width", line_color="red", line_width=3) plot.renderers.extend([hline1, hline2, hline3]) - label1 = Label(x=time_arr[-1], y=loc1, y_units='data', text='45000 counts') - label2 = Label(x=time_arr[-1], y=loc2, y_units='data', text='50000 counts') - label3 = Label(x=time_arr[-1], y=loc3, y_units='data', text='60000 counts') + label1 = Label(x=time_arr[-1], y=loc1, y_units="data", text="45000 counts") + label2 = Label(x=time_arr[-1], y=loc2, y_units="data", text="50000 counts") + label3 = Label(x=time_arr[-1], y=loc3, y_units="data", text="60000 counts") plot.add_layout(label1) plot.add_layout(label2) plot.add_layout(label3) @@ -492,14 +641,16 @@ def plt_mag_time(self): # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Box peak', '@max_val_box'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Box peak", "@max_val_box"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -508,7 +659,7 @@ def plt_mag_time(self): return plot def get_unsuccessful_ta(self, arr_name): - """ Find unsuccessful TAs in this set (to be plotted in red) + """Find unsuccessful TAs in this set (to be plotted in red) Parameters ---------- arr_name: str, name of the array of interest @@ -529,7 +680,7 @@ def get_unsuccessful_ta(self, arr_name): return new_list_failed, new_list_else def plt_centroid(self): - """ Plot the WATA centroid + """Plot the WATA centroid Parameters ---------- None @@ -539,8 +690,8 @@ def plt_centroid(self): """ # get the failed TAs to plot in red if "corr_col_failed" not in self.source.data: - corr_col_failed, corr_col_not_failed = self.get_unsuccessful_ta('corr_col') - corr_row_failed, corr_row_not_failed = self.get_unsuccessful_ta('corr_row') + corr_col_failed, corr_col_not_failed = self.get_unsuccessful_ta("corr_col") + corr_row_failed, corr_row_not_failed = self.get_unsuccessful_ta("corr_row") # add these to the bokeh data structure self.source.data["corr_col_failed"] = corr_col_failed @@ -549,32 +700,48 @@ def plt_centroid(self): self.source.data["corr_row_not_failed"] = corr_row_not_failed # create a new bokeh plot - plot = figure(title="WATA Centroid", x_axis_label='Column', - y_axis_label='Row') + plot = figure(title="WATA Centroid", x_axis_label="Column", y_axis_label="Row") limits = [10, 25] plot.x_range = Range1d(limits[0], limits[1]) plot.y_range = Range1d(limits[0], limits[1]) - plot.circle(x='corr_col_not_failed', y='corr_row_not_failed', source=self.source, - color="blue", size=7, fill_alpha=0.5, view=self.date_view) - plot.circle(x='corr_col_failed', y='corr_row_failed', source=self.source, - color="red", size=7, fill_alpha=0.5, view=self.date_view) + plot.scatter( + marker="circle", + x="corr_col_not_failed", + y="corr_row_not_failed", + source=self.source, + color="blue", + size=7, + fill_alpha=0.5, + view=self.date_view, + ) + plot.scatter( + x="corr_col_failed", + y="corr_row_failed", + source=self.source, + color="red", + size=7, + fill_alpha=0.5, + view=self.date_view, + ) plot.x_range = Range1d(0.0, 32.0) plot.y_range = Range1d(0.0, 32.0) # add tooltips hover = HoverTool() - hover.tooltips = [('File name', '@filename'), - ('Visit ID', '@visit_id'), - ('TA status', '@ta_status'), - ('Filter', '@tafilter'), - ('Readout', '@readout'), - ('Date-Obs', '@date_obs'), - ('Magnitude', '@star_mag'), - ('Box Centr Col', '@corr_col'), - ('Box Centr Row', '@corr_row'), - ('Det Centr Col', '@detector_final_col'), - ('Det Centr Row', '@detector_final_row'), - ('--------', '----------------')] + hover.tooltips = [ + ("File name", "@filename"), + ("Visit ID", "@visit_id"), + ("TA status", "@ta_status"), + ("Filter", "@tafilter"), + ("Readout", "@readout"), + ("Date-Obs", "@date_obs"), + ("Magnitude", "@star_mag"), + ("Box Centr Col", "@corr_col"), + ("Box Centr Row", "@corr_row"), + ("Det Centr Col", "@detector_final_col"), + ("Det Centr Row", "@detector_final_row"), + ("--------", "----------------"), + ] plot.add_tools(hover) # add shared selection tools @@ -586,8 +753,8 @@ def setup_date_range(self): """Set up a date range filter, defaulting to the last week of data.""" end_date = datetime.now(tz=timezone.utc) one_week_ago = end_date.date() - timedelta(days=7) - first_data_point = np.min(self.source.data['time_arr']).date() - last_data_point = np.max(self.source.data['time_arr']).date() + first_data_point = np.min(self.source.data["time_arr"]).date() + last_data_point = np.max(self.source.data["time_arr"]).date() if last_data_point < one_week_ago: # keep at least one point in the plot if there was # no TA data this week @@ -597,21 +764,30 @@ def setup_date_range(self): # allowed range is from the first ever data point to today self.date_range = DateRangeSlider( - title="Date range displayed", start=first_data_point, - end=end_date, value=(start_date, end_date), step=1) - - callback = CustomJS(args=dict(s=self.source), code=""" + title="Date range displayed", + start=first_data_point, + end=end_date, + value=(start_date, end_date), + step=1, + ) + + callback = CustomJS( + args=dict(s=self.source), + code=""" s.change.emit(); - """) - self.date_range.js_on_change('value', callback) + """, + ) + self.date_range.js_on_change("value", callback) - filt = CustomJSFilter(args=dict(slider=self.date_range), code=""" + filt = CustomJSFilter( + args=dict(slider=self.date_range), + code=""" var indices = []; var start = slider.value[0]; var end = slider.value[1]; for (var i=0; i < source.get_length(); i++) { - if (source.data['time_arr'][i] >= start + if (source.data['time_arr'][i] >= start && source.data['time_arr'][i] <= end) { indices.push(true); } else { @@ -619,18 +795,20 @@ def setup_date_range(self): } } return indices; - """) - self.date_view = CDSView(source=self.source, filters=[filt]) + """, + ) + self.date_view = CDSView(filter=filt) - def mk_plt_layout(self): - """Create the bokeh plot layout""" - self.source = ColumnDataSource(data=self.wata_data) + def mk_plt_layout(self, plot_data): + """Create the bokeh plot layout - # make sure all arrays are lists in order to later be able to read the data - # from the html file - for item in self.source.data: - if not isinstance(self.source.data[item], (str, float, int, list)): - self.source.data[item] = self.source.data[item].tolist() + Parameters + ---------- + plot_data : pandas.DataFrame + Dataframe of data to plot in bokeh + """ + + self.source = ColumnDataSource(data=plot_data) # add a time array to the data source self.add_time_column() @@ -642,7 +820,6 @@ def mk_plt_layout(self): self.setup_date_range() # set the output html file name and create the plot grid - output_file(self.output_file_name) p1 = self.plt_status() p2 = self.plt_residual_offsets() p3 = self.plt_v2offset_time() @@ -653,17 +830,31 @@ def mk_plt_layout(self): # make grid grid = gridplot([p1, p2, p3, p4, p5, p6], ncols=2, merge_tools=False) box_layout = layout(children=[self.date_range, grid]) - save(box_layout) - # return the needed components for embeding the results in the WATA html template - script, div = components(box_layout) - return script, div + self.script, self.div = components(box_layout) + + def file_exists_in_database(self, filename): + """Checks if an entry for filename exists in the wata stats + database. + + Parameters + ---------- + filename : str + The full path to the uncal filename. + + Returns + ------- + file_exists : bool + ``True`` if filename exists in the WATA stats database. + """ + results = self.stats_table.objects.filter(filename__iexact=filename).values() + return len(results) != 0 def identify_tables(self): """Determine which database tables to use for a run of the TA monitor.""" mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] - self.query_table = eval('{}TAQueryHistory'.format(mixed_case_name)) - self.stats_table = eval('{}TAStats'.format(mixed_case_name)) + self.query_table = eval("{}TaQueryHistory".format(mixed_case_name)) + self.stats_table = eval("{}WataStats".format(mixed_case_name)) def most_recent_search(self): """Query the query history database and return the information @@ -676,103 +867,26 @@ def most_recent_search(self): Date (in MJD) of the ending range of the previous MAST query where the wata monitor was run. """ - query = session.query(self.query_table).filter(and_(self.query_table.aperture == self.aperture, - self.query_table.run_monitor == True)).order_by(self.query_table.end_time_mjd).all() + filters = {"aperture__iexact": self.aperture, "run_monitor": True} - dates = np.zeros(0) - for instance in query: - dates = np.append(dates, instance.end_time_mjd) + record = ( + self.query_table.objects.filter(**filters).order_by("-end_time_mjd").first() + ) - query_count = len(dates) - if query_count == 0: + if record is None: query_result = self.query_very_beginning - logging.info(('\tNo query history for {}. Beginning search date will be set to {}.'.format(self.aperture, self.query_very_beginning))) + logging.info( + ( + "\tNo query history for {}. Beginning search date will be set to {}.".format( + self.aperture, self.query_very_beginning + ) + ) + ) else: - query_result = np.max(dates) + query_result = record.end_time_mjd return query_result - def get_expected_data(self, keywd_dict, tot_number_of_stars): - """This function gets the value append to the dictionary key in the expected format. - Parameters - ---------- - keywd_dict: dictonary - Dictionary corresponding to the file keyword - tot_number_of_stars: integer - Number of stars in the observation - Returns - ------- - val4dict: value - Value appended to the data structure; either string, float or integer - """ - # set the value to add - val = -999 - # return the right type of value - if keywd_dict['type'] == float: - val = float(val) - if keywd_dict['type'] == str: - val = str(val) - val4dict = val - return val4dict - - def get_data_from_html(self, html_file): - """ - This function gets the data from the Bokeh html file created with - the NIRSpec TA monitor script. - Parameters - ---------- - html_file: str - File created by the monitor script - Returns - ------- - prev_data: pandas dataframe - Contains all expected columns to be combined with the new data - latest_prev_obs: str - Date of the latest observation in the previously plotted data - """ - - # open the html file and get the contents - htmlFileToBeOpened = open(html_file, "r") - contents = htmlFileToBeOpened.read() - soup = BeautifulSoup(contents, 'html.parser') - - # now read as python dictionary and search for the data - prev_data_dict = {} - html_data = json.loads(soup.find('script', type='application/json').string) - for key, val in html_data.items(): - if 'roots' in val: # this is a dictionary - if 'references' in val['roots']: - for item in val['roots']['references']: # this is a list - # each item of the list is a dictionary - for item_key, item_val in item.items(): - if 'data' in item_val: - # finally the data dictionary! - for data_key, data_val in item_val['data'].items(): - prev_data_dict[data_key] = data_val - # find the latest observation date - time_in_millis = max(prev_data_dict['time_arr']) - latest_prev_obs = Time(time_in_millis / 1000., format='unix') - latest_prev_obs = latest_prev_obs.mjd - # put data in expected format - prev_data_expected_cols = {} - visit_ids = prev_data_dict['visit_id'] - for file_keywd, keywd_dict in self.keywds2extract.items(): - key = keywd_dict['name'] - if key in prev_data_dict: - # case when the html stored thing is just an object but does not have data - if len(prev_data_dict[key]) < len(visit_ids): - list4dict = self.get_expected_data(keywd_dict, visit_ids) - prev_data_expected_cols[key] = list4dict - # case when nothing special to do - else: - prev_data_expected_cols[key] = prev_data_dict[key] - else: - list4dict = self.get_expected_data(keywd_dict, visit_ids) - prev_data_expected_cols[key] = list4dict - # now convert to a panda dataframe to be combined with the new data - prev_data = pd.DataFrame(prev_data_expected_cols) - return prev_data, latest_prev_obs - def pull_filenames(self, file_info): """Extract filenames from the list of file information returned from query_mast. @@ -789,10 +903,10 @@ def pull_filenames(self, file_info): """ files = [] for list_element in file_info: - if 'filename' in list_element: - files.append(list_element['filename']) - elif 'root_name' in list_element: - files.append(list_element['root_name']) + if "filename" in list_element: + files.append(list_element["filename"]) + elif "root_name" in list_element: + files.append(list_element["root_name"]) return files def get_uncal_names(self, file_list): @@ -808,13 +922,13 @@ def get_uncal_names(self, file_list): """ good_files = [] for filename in file_list: - if filename.endswith('.fits'): + if filename.endswith(".fits"): # MAST names look like: jw01133003001_02101_00001_nrs2_cal.fits - suffix2replace = filename.split('_')[-1] - filename = filename.replace(suffix2replace, 'uncal.fits') + suffix2replace = filename.split("_")[-1] + filename = filename.replace(suffix2replace, "uncal.fits") else: # rootnames look like: jw01133003001_02101_00001_nrs2 - filename += '_uncal.fits' + filename += "_uncal.fits" if filename not in good_files: good_files.append(filename) return good_files @@ -832,13 +946,19 @@ def update_ta_success_txtfile(self): # check if previous file exists and read the data from it if os.path.isfile(output_success_ta_txtfile): # now rename the previous file, for backup - os.rename(output_success_ta_txtfile, os.path.join(self.output_dir, "prev_wata_success.txt")) + os.rename( + output_success_ta_txtfile, + os.path.join(self.output_dir, "prev_wata_success.txt"), + ) # get the new data ta_success, ta_failure = [], [] - filenames, ta_status = self.wata_data.loc[:,'filename'], self.wata_data.loc[:,'ta_status'] + filenames, ta_status = ( + self.wata_data.loc[:, "filename"], + self.wata_data.loc[:, "ta_status"], + ) for fname, ta_stat in zip(filenames, ta_status): # select the appropriate list to append to - if ta_stat == 'SUCCESSFUL': + if ta_stat == "SUCCESSFUL": ta_success.append(fname) else: ta_failure.append(fname) @@ -856,9 +976,11 @@ def update_ta_success_txtfile(self): for _ in range(remaining_items): ta_list.append("") # write the new output file - with open(output_success_ta_txtfile, 'w+') as txt: + with open(output_success_ta_txtfile, "w+") as txt: txt.write("# WATA successes and failure file names \n") - filehdr1 = "# {} Total successful and {} total failed WATA ".format(successes, failures) + filehdr1 = "# {} Total successful and {} total failed WATA ".format( + successes, failures + ) filehdr2 = "# {:<50} {:<50}".format("Successes", "Failures") txt.write(filehdr1 + "\n") txt.write(filehdr2 + "\n") @@ -866,167 +988,196 @@ def update_ta_success_txtfile(self): line = "{:<50} {:<50}".format(suc, ta_failure[idx]) txt.write(line + "\n") - def read_existing_html(self): + def add_timezone(self, date_str): + """Method to bypass timezone warning from Django""" + dt_timezone = parser.parse(date_str).replace(tzinfo=timezone.utc) + return dt_timezone + + def add_wata_data(self): + """Method to add WATA data to stats database""" + # self.wata_data is a pandas dataframe. When creating the django model + # to store all of the WATA data, this data was previously extracted and stored + # into a dataframe. To avoid rewriting self.get_wata_data(), it is easier to + # iterate over the rows of the returned dataframe and access the metadata this + # way. + for _, row in self.wata_data.iterrows(): + stats_db_entry = { + "filename": row["filename"], + "date_obs": self.add_timezone(row["date_obs"]), + "visit_id": row["visit_id"], + "tafilter": row["tafilter"], + "readout": row["readout"], + "ta_status": row["ta_status"], + "star_name": row["star_name"], + "star_ra": row["star_ra"], + "star_dec": row["star_dec"], + "star_mag": row["star_mag"], + "star_catalog": row["star_catalog"], + "planned_v2": row["planned_v2"], + "planned_v3": row["planned_v3"], + "stamp_start_col": row["stamp_start_col"], + "stamp_start_row": row["stamp_start_row"], + "star_detector": row["star_detector"], + "max_val_box": row["max_val_box"], + "max_val_box_col": row["max_val_box_col"], + "max_val_box_row": row["max_val_box_row"], + "iterations": row["iterations"], + "corr_col": row["corr_col"], + "corr_row": row["corr_row"], + "stamp_final_col": row["stamp_final_col"], + "stamp_final_row": row["stamp_final_row"], + "detector_final_col": row["detector_final_col"], + "detector_final_row": row["detector_final_row"], + "final_sci_x": row["final_sci_x"], + "final_sci_y": row["final_sci_y"], + "measured_v2": row["measured_v2"], + "measured_v3": row["measured_v3"], + "ref_v2": row["ref_v2"], + "ref_v3": row["ref_v3"], + "v2_offset": row["v2_offset"], + "v3_offset": row["v3_offset"], + "sam_x": row["sam_x"], + "sam_y": row["sam_y"], + "entry_date": datetime.now(tz=timezone.utc), + } + + entry = self.stats_table(**stats_db_entry) + entry.save() + + logging.info("\tNew entry added to WATA stats database table") + + logging.info("\tUpdated the WATA statistics table") + + def plots_for_app(self): + """Utility function to access div and script objects for + embedding bokeh in JWQL application. """ - This function gets the data from the Bokeh html file created with - the NIRSpec TA monitor script. - """ - self.output_dir = os.path.join(get_config()['outputs'], 'wata_monitor') - ensure_dir_exists(self.output_dir) - - self.output_file_name = os.path.join(self.output_dir, "wata_layout.html") - if not os.path.isfile(self.output_file_name): - return 'No WATA data available', '', '' - - # open the html file and get the contents - with open(self.output_file_name, "r") as html_file: - contents = html_file.read() - - soup = BeautifulSoup(contents, 'html.parser').body - - # find the script elements - script1 = str(soup.find('script', type='text/javascript')) - script2 = str(soup.find('script', type='application/json')) - - # find the div element - div = str(soup.find('div', class_='bk-root')) - return div, script1, script2 + # Query results and convert into pandas df. + self.query_results = pd.DataFrame( + list(NIRSpecWataStats.objects.all().values()) + ) + # Generate plot + self.mk_plt_layout(self.query_results) @log_fail @log_info def run(self): """The main method. See module docstrings for further details.""" - logging.info('Begin logging for wata_monitor') - - # define WATA variables - self.instrument = "nirspec" - self.aperture = "NRS_S1600A1_SLIT" + logging.info("Begin logging for wata_monitor") # Identify which database tables to use self.identify_tables() - # Get the output directory and setup a directory to store the data - self.output_dir = os.path.join(get_config()['outputs'], 'wata_monitor') - ensure_dir_exists(self.output_dir) - # Set up directories for the copied data - ensure_dir_exists(os.path.join(self.output_dir, 'data')) - self.data_dir = os.path.join(self.output_dir, - 'data/{}_{}'.format(self.instrument.lower(), - self.aperture.lower())) - ensure_dir_exists(self.data_dir) - - # Locate the record of most recent MAST search; use this time + # Locate the record of most recent time the monitor was run self.query_start = self.most_recent_search() - # get the data of the plots previously created and set the query start date - self.prev_data = None - self.output_file_name = os.path.join(self.output_dir, "wata_layout.html") - logging.info('\tNew output plot file will be written as: {}'.format(self.output_file_name)) - if os.path.isfile(self.output_file_name): - self.prev_data, self.query_start = self.get_data_from_html(self.output_file_name) - logging.info('\tPrevious data read from html file: {}'.format(self.output_file_name)) - # copy this plot to a previous version - shutil.copyfile(self.output_file_name, os.path.join(self.output_dir, "prev_wata_layout.html")) - # fail save - start from the beginning if there is no html file - else: - self.query_start = self.query_very_beginning - logging.info('\tPrevious output html file not found. Starting MAST query from Jan 28, 2022 == First JWST images (MIRI)') # Use the current time as the end time for MAST query self.query_end = Time.now().mjd - logging.info('\tQuery times: {} {}'.format(self.query_start, self.query_end)) - - # Query for data using the aperture and the time of the - # most recent previous search as the starting time - - # via MAST: - # new_entries = monitor_utils.mast_query_ta( - # self.instrument, self.aperture, self.query_start, self.query_end) + logging.info("\tQuery times: {} {}".format(self.query_start, self.query_end)) - # via django model: + # Obtain all entries with instrument/aperture combinations: new_entries = monitor_utils.model_query_ta( - self.instrument, self.aperture, self.query_start, self.query_end) + self.instrument, self.aperture, self.query_start, self.query_end + ) wata_entries = len(new_entries) - logging.info('\tQuery has returned {} WATA files for {}, {}.'.format(wata_entries, self.instrument, self.aperture)) + logging.info( + "\tQuery has returned {} WATA files for {}, {}.".format( + wata_entries, self.instrument, self.aperture + ) + ) # Filter new entries to only keep uncal files new_entries = self.pull_filenames(new_entries) new_entries = self.get_uncal_names(new_entries) wata_entries = len(new_entries) - logging.info('\tThere are {} uncal TA files to run the WATA monitor.'.format(wata_entries)) + logging.info( + "\tThere are {} uncal TA files to run the WATA monitor.".format( + wata_entries + ) + ) - # Get full paths to the files + # Check if filenames RootFileInfo model are in data storgage new_filenames = [] for filename_of_interest in new_entries: - if (self.prev_data is not None - and filename_of_interest in self.prev_data['filename'].values): - logging.warning('\t\tFile {} already in previous data. Skipping.'.format(filename_of_interest)) + if self.file_exists_in_database(filename_of_interest): + logging.warning( + "\t\tFile {} in database already, passing.".format( + filename_of_interest + ) + ) continue - try: - new_filenames.append(filesystem_path(filename_of_interest)) - logging.warning('\tFile {} included for processing.'.format(filename_of_interest)) - except FileNotFoundError: - logging.warning('\t\tUnable to locate {} in filesystem. Not including in processing.'.format(filename_of_interest)) - + else: + try: + new_filenames.append(filesystem_path(filename_of_interest)) + logging.warning( + "\t\tFile {} included for processing.".format( + filename_of_interest + ) + ) + except FileNotFoundError: + logging.warning( + "\t\tUnable to locate {} in filesystem. Not including in processing.".format( + filename_of_interest + ) + ) + + # If there are no new files, monitor is skipped if len(new_filenames) == 0: - logging.warning('\t\t ** Unable to locate any file in filesystem. Nothing to process. ** ') - - # Run the monitor on any new files - self.script, self.div, self.wata_data = None, None, None - monitor_run = False - if len(new_filenames) > 0: # new data was found - # get the data - self.new_wata_data, no_ta_ext_msgs = self.get_wata_data(new_filenames) + logging.info( + "\t\t ** Unable to locate any file in filesystem. Nothing to process. ** " + ) + logging.info("\tWATA monitor skipped. No WATA data found.") + monitor_run = False + else: + # Run the monitor on any new files + # self.wata_data is a pandas dataframe + self.wata_data, no_ta_ext_msgs = self.get_wata_data(new_filenames) + + # Log msgs from TA files. if len(no_ta_ext_msgs) >= 1: for item in no_ta_ext_msgs: logging.info(item) - if self.new_wata_data is not None: - # concatenate with previous data - if self.prev_data is not None: - self.wata_data = pd.concat([self.prev_data, self.new_wata_data]) - logging.info('\tData from previous html output file and new data concatenated.') - else: - self.wata_data = self.new_wata_data - logging.info('\tOnly new data was found - no previous html file.') else: - logging.info('\tWATA monitor skipped. No WATA data found.') - # make sure to return the old data if no new data is found - elif self.prev_data is not None: - self.wata_data = self.prev_data - logging.info('\tNo new data found. Using data from previous html output file.') - # do the plots if there is any data - if self.wata_data is not None: - self.script, self.div = self.mk_plt_layout() - monitor_run = True - logging.info('\tOutput html plot file created: {}'.format(self.output_file_name)) - wata_files_used4plots = len(self.wata_data['visit_id']) - logging.info('\t{} WATA files were used to make plots.'.format(wata_files_used4plots)) - # update the list of successful and failed TAs - self.update_ta_success_txtfile() - logging.info('\tWATA status file was updated') - else: - logging.info('\tWATA monitor skipped.') - - # Update the query history - new_entry = {'instrument': self.instrument, - 'aperture': self.aperture, - 'start_time_mjd': self.query_start, - 'end_time_mjd': self.query_end, - 'entries_found': wata_entries, - 'files_found': len(new_filenames), - 'run_monitor': monitor_run, - 'entry_date': datetime.now()} + logging.info("\t No TA Ext Msgs Found") - with engine.begin() as connection: - connection.execute(self.query_table.__table__.insert(), new_entry) - logging.info('\tUpdated the query history table') + # Add WATA data to stats table. + self.add_wata_data() - logging.info('WATA Monitor completed successfully.') + # Once data is added to database table and plots are made, the + # monitor has run successfully. + monitor_run = True + wata_files_used4plots = len(self.wata_data["visit_id"]) + logging.info( + "\t{} WATA files were used to make plots.".format(wata_files_used4plots) + ) + # update the list of successful and failed TAs + self.update_ta_success_txtfile() -if __name__ == '__main__': + logging.info("\tWATA status file was updated") - module = os.path.basename(__file__).strip('.py') + # Update the query history + new_entry = { + "instrument": self.instrument, + "aperture": self.aperture, + "start_time_mjd": self.query_start, + "end_time_mjd": self.query_end, + "entries_found": wata_entries, + "files_found": len(new_filenames), + "run_monitor": monitor_run, + "entry_date": datetime.now(tz=timezone.utc), + } + + entry = self.query_table(**new_entry) + entry.save() + logging.info("\tUpdated the query history table") + + logging.info("WATA Monitor completed successfully.") + + +if __name__ == "__main__": + module = os.path.basename(__file__).strip(".py") start_time, log_file = monitor_utils.initialize_instrument_monitor(module) monitor = WATA() diff --git a/jwql/jwql_monitors/generate_preview_images.py b/jwql/jwql_monitors/generate_preview_images.py index 544683457..cb897368e 100755 --- a/jwql/jwql_monitors/generate_preview_images.py +++ b/jwql/jwql_monitors/generate_preview_images.py @@ -212,6 +212,13 @@ def check_existence(file_list, outdir): # for the appropriately named jpg of the mosaic, which depends # on the specific detectors in the file_list file_parts = filename_parser(file_list[0]) + + # If filename_parser() does not recognize the filename, return False + if not file_parts['recognized_filename']: + logging.warning((f'While running checking_existence() for a preview image for {file_list[0]}, ' + 'filename_parser() failed to recognize the file pattern.')) + return False + if file_parts['detector'].upper() in NIRCAM_SHORTWAVE_DETECTORS: mosaic_str = "NRC_SW*_MOSAIC_" elif file_parts['detector'].upper() in NIRCAM_LONGWAVE_DETECTORS: @@ -253,7 +260,14 @@ def create_dummy_filename(filelist): modules = [] for filename in filelist: indir, infile = os.path.split(filename) - det_string = filename_parser(infile)['detector'] + parsed_filename = filename_parser(infile) + if parsed_filename['recognized_filename']: + det_string = parsed_filename['detector'] + else: + # If filename_parser() does not recognize the file, skip it + logging.warning((f'While using {infile} to create a dummy filename in create_dummy_filename(), the ' + 'filename parser failed.')) + continue det_string_list.append(det_string) modules.append(det_string[3].upper()) @@ -307,7 +321,14 @@ def create_mosaic(filenames): else: diff_im = image.data data.append(diff_im) - detector.append(filename_parser(filename)['detector'].upper()) + file_info = filename_parser(filename) + if file_info['recognized_filename']: + detector.append(file_info['detector'].upper()) + else: + # If filename_parser() does not recognize the file, skip it. + logging.warning((f'While running create_mosaic() using {file_list[0]}, ' + 'filename_parser() failed to recognize the file pattern.')) + pass data_lower_left.append((image.xstart, image.ystart)) # Make sure SW and LW data are not being mixed. Create the @@ -438,6 +459,7 @@ def define_options(parser=None, usage=None, conflict_handler='resolve'): parser = argparse.ArgumentParser(usage=usage, conflict_handler=conflict_handler) parser.add_argument('--overwrite', action='store_true', default=None, help='If set, existing preview images will be re-created and overwritten.') + parser.add_argument('-p', '--programs', nargs='+', type=int, help='List of program IDs to generate preview images for. If omitted, all programs will be done.') return parser @@ -540,7 +562,7 @@ def get_base_output_name(filename_dict): @log_fail @log_info -def generate_preview_images(overwrite): +def generate_preview_images(overwrite, programs=None): """The main function of the ``generate_preview_image`` module. See module docstring for further details. @@ -548,12 +570,38 @@ def generate_preview_images(overwrite): ---------- overwrite : bool If True, any existing preview images and thumbnails are overwritten + + programs : list + List of program ID numbers (e.g. 1068, 01220) for which to generate preview + images. If None (the default), preview images are generated for all programs. """ + # Get a list of programs to create preview images for. First, generate a list of all + # possible programs. We can compare any user inputs to this list, and if there are no + # user inputs, then use this entire list. + all_programs = [os.path.basename(item) for item in glob.glob(os.path.join(SETTINGS['filesystem'], 'public', 'jw*'))] + all_programs.extend([os.path.basename(item) for item in glob.glob(os.path.join(SETTINGS['filesystem'], 'proprietary', 'jw*'))]) + + if programs is None: + program_list = all_programs + else: + if not isinstance(programs, list): + raise ValueError(f'program_list must be a list. In this call, it is {type(program_list)}') + program_list = [] + for prog in programs: + jwprog = f'jw{str(prog).zfill(5)}' + if jwprog in all_programs: + program_list.append(jwprog) + else: + logging.info(f'Program {prog} not present in filesystem. Excluding.') + + if len(program_list) > 0: + program_list = sorted(program_list, reverse=True) + else: + no_prog_message = f'Empty list of programs. No preview images to be made.' + logging.info(no_prog_message) + raise ValueError(no_prog_message) # Process programs in parallel - program_list = [os.path.basename(item) for item in glob.glob(os.path.join(SETTINGS['filesystem'], 'public', 'jw*'))] - program_list.extend([os.path.basename(item) for item in glob.glob(os.path.join(SETTINGS['filesystem'], 'proprietary', 'jw*'))]) - program_list = list(set(program_list)) pool = multiprocessing.Pool(processes=int(SETTINGS['cores'])) program_list = [(element, overwrite) for element in program_list] results = pool.starmap(process_program, program_list) @@ -623,10 +671,10 @@ def group_filenames(filenames): subgroup = [] # Generate string to be matched with other filenames - try: - filename_dict = filename_parser(os.path.basename(filename)) - except ValueError: - logging.warning('Could not parse filename for {}'.format(filename)) + filename_dict = filename_parser(os.path.basename(filename)) + if not filename_dict['recognized_filename']: + logging.warning((f'While running generate_preview_images.group_filenames() on {filename}, the ' + 'filename_parser() failed to recognize the file pattern.')) break # If the filename was already involved in a match, then skip @@ -704,7 +752,16 @@ def process_program(program, overwrite): filenames = [filename for filename in filenames if os.path.splitext(filename.split('_')[-1])[0] not in IGNORED_SUFFIXES] # Remove guiding files, as these are not currently visible in JWQL anyway - filenames = [filename for filename in filenames if 'guider_mode' not in filename_parser(filename)] + filtered_filenames = [] + for filename in filenames: + parsed = filename_parser(filename) + if parsed['recognized_filename']: + if 'guider_mode' not in parsed and 'detector' in parsed: + filtered_filenames.append(filename) + else: + logging.warning((f'While running generate_preview_images.process_program() on {filename}, the ' + 'filename_parser() failed to recognize the file pattern.')) + filenames = filtered_filenames logging.info('Found {} filenames'.format(len(filenames))) logging.info('') @@ -718,10 +775,14 @@ def process_program(program, overwrite): logging.debug(f'Working on {filename}') # Determine the save location - try: - identifier = 'jw{}'.format(filename_parser(filename)['program_id']) - except ValueError: + parsed = filename_parser(filename) + if parsed['recognized_filename']: + identifier = 'jw{}'.format(parsed['program_id']) + else: + # In this case, the filename_parser failed to recognize the filename identifier = os.path.basename(filename).split('.fits')[0] + logging.warning((f'While running generate_preview_images.process_program() on filtered filename {filename}, the ' + 'filename_parser() failed to recognize the file pattern.')) preview_output_directory = os.path.join(SETTINGS['preview_image_filesystem'], identifier) thumbnail_output_directory = os.path.join(SETTINGS['thumbnail_filesystem'], identifier) @@ -819,7 +880,7 @@ def update_listfile(filename, file_list, filetype): @lock_module -def protected_code(overwrite): +def protected_code(overwrite, programs): """Protected code ensures only 1 instance of module will run at any given time Parameters @@ -830,11 +891,11 @@ def protected_code(overwrite): module = os.path.basename(__file__).strip('.py') start_time, log_file = initialize_instrument_monitor(module) - generate_preview_images(overwrite) + generate_preview_images(overwrite, programs=programs) update_monitor_table(module, start_time, log_file) if __name__ == '__main__': parser = define_options() args = parser.parse_args() - protected_code(args.overwrite) + protected_code(args.overwrite, args.programs) diff --git a/jwql/jwql_monitors/monitor_filesystem.py b/jwql/jwql_monitors/monitor_filesystem.py index 2c1f4d379..27364f888 100755 --- a/jwql/jwql_monitors/monitor_filesystem.py +++ b/jwql/jwql_monitors/monitor_filesystem.py @@ -75,6 +75,7 @@ THUMBNAILS = SETTINGS['thumbnail_filesystem'] LOGS = SETTINGS['log_dir'] + def files_per_filter(): """Querying MAST (rather than looping through the filesystem), determine how many files use each filter for each instrument. Note that thiw function takes @@ -94,7 +95,7 @@ def files_per_filter(): for fname in FILTERS_PER_INSTRUMENT[instrument]: # note that this does not include pupil wheel-based filters obs = Observations.query_criteria(filters=fname, instrument_name=JWST_INSTRUMENT_NAMES_MIXEDCASE[instrument]) batch_size = 5 - batches = [obs[i:i+batch_size] for i in range(0, len(obs), batch_size)] + batches = [obs[i:i + batch_size] for i in range(0, len(obs), batch_size)] obs_table = [Observations.get_product_list(batch) for batch in batches] products = unique(vstack(obs_table), keys='productFilename') @@ -137,9 +138,12 @@ def gather_statistics(general_results_dict, instrument_results_dict): if filename.endswith(".fits"): # Parse out filename information - try: - filename_dict = filename_parser(filename) - except ValueError: + filename_dict = filename_parser(filename) + if filename_dict['recognized_filename']: + filename_type = filename_dict['filename_type'] + else: + logging.warning((f'While running gather_statistics() on the filesystem {filename}, ' + 'caused filename_parser() to fail.')) break # For MSA files, which do not have traditional suffixes, set the diff --git a/jwql/pull_jwql_branch.sh b/jwql/pull_jwql_branch.sh index 95a1c94b4..6c4bcbf94 100644 --- a/jwql/pull_jwql_branch.sh +++ b/jwql/pull_jwql_branch.sh @@ -62,11 +62,12 @@ echo "Reset: $reset"; echo "Notify: $notify $recipient"; # 1. Pull updated code from GitHub deployment branch (keep second checkout in case its already defined for some weird reason) -git checkout -b $branch_name --track origin/$branch_name +git fetch upstream +git checkout -b $branch_name --track upstream/$branch_name git checkout $branch_name -git fetch origin $branch_name -git pull origin $branch_name -git fetch origin --tags +git fetch upstream $branch_name +git pull upstream $branch_name +git fetch upstream --tags # 2. Bring the service down if [ "$reset" = true ]; then @@ -76,8 +77,9 @@ fi # 3. Install jwql pip install -e .. -# 4. Merge Any Migrations -python ./website/manage.py migrate +# 4. Merge Any Migrations that exist in either database (router.py will sort where they go) +python ./website/manage.py migrate jwql +python ./website/manage.py migrate jwql --database=monitors # 5. Bring the service back up if [ "$reset" = true ]; then diff --git a/jwql/shared_tasks/run_pipeline.py b/jwql/shared_tasks/run_pipeline.py index ffc197310..a29de1a74 100755 --- a/jwql/shared_tasks/run_pipeline.py +++ b/jwql/shared_tasks/run_pipeline.py @@ -359,6 +359,9 @@ def run_save_jump(input_file, short_name, work_directory, instrument, ramp_fit=T pipe_type = args.pipe outputs = args.outputs step_args = args.step_args + # ***** FUTURE FIX ***** + # This needs to be removed once it's possible to do multi-core pipelines again + args.max_cores = "none" status_file = os.path.join(working_path, short_name + "_status.txt") with open(status_file, 'w') as out_file: diff --git a/jwql/tests/test_data_containers.py b/jwql/tests/test_data_containers.py index 7c4f68401..4b1f1c4ba 100644 --- a/jwql/tests/test_data_containers.py +++ b/jwql/tests/test_data_containers.py @@ -31,7 +31,7 @@ import pandas as pd import pytest -from jwql.utils.constants import ON_GITHUB_ACTIONS +from jwql.utils.constants import ON_GITHUB_ACTIONS, DEFAULT_MODEL_CHARFIELD os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") @@ -45,7 +45,7 @@ from jwql.utils.utils import get_config # noqa: E402 (module level import not at top of file) from jwql.website.apps.jwql.models import RootFileInfo - + @pytest.mark.skipif(ON_GITHUB_ACTIONS, reason='Requires access to django models.') def test_build_table(): tab = data_containers.build_table('filesystem_general') @@ -199,7 +199,6 @@ def test_get_all_proposals(): (['uncal', 'rate', 'o001_crf', 'o006_crfints', 'bad'], {'bad'})), (False, ['rate', 'uncal', 'bad', 'o006_crfints', 'o001_crf'], ['uncal', 'rate', 'o001_crf', 'o006_crfints', 'bad'])]) - def test_get_available_suffixes(untracked, input_suffixes, expected): result = data_containers.get_available_suffixes( input_suffixes, return_untracked=untracked) @@ -339,6 +338,7 @@ def test_get_anomaly_form_post_group(mocker): assert update_mock.call_count == 2 """ + @pytest.mark.skipif(ON_GITHUB_ACTIONS, reason='Requires access to django models.') def test_get_dashboard_components(): request = MockPostRequest() @@ -607,42 +607,42 @@ def test_mast_query_by_rootname(): instrument = 'NIRCam' rootname1 = 'jw02767002001_02103_00005_nrcb4' dict_stuff = data_containers.mast_query_by_rootname(instrument, rootname1) - defaults = dict(filter=dict_stuff.get('filter', ''), - detector=dict_stuff.get('detector', ''), - exp_type=dict_stuff.get('exp_type', ''), - read_pat=dict_stuff.get('readpatt', ''), - grating=dict_stuff.get('grating', ''), + defaults = dict(filter=dict_stuff.get('filter', DEFAULT_MODEL_CHARFIELD), + detector=dict_stuff.get('detector', DEFAULT_MODEL_CHARFIELD), + exp_type=dict_stuff.get('exp_type', DEFAULT_MODEL_CHARFIELD), + read_pat=dict_stuff.get('readpatt', DEFAULT_MODEL_CHARFIELD), + grating=dict_stuff.get('grating', DEFAULT_MODEL_CHARFIELD), patt_num=dict_stuff.get('patt_num', 0), - aperture=dict_stuff.get('apername', ''), - subarray=dict_stuff.get('subarray', ''), - pupil=dict_stuff.get('pupil', '')) + aperture=dict_stuff.get('apername', DEFAULT_MODEL_CHARFIELD), + subarray=dict_stuff.get('subarray', DEFAULT_MODEL_CHARFIELD), + pupil=dict_stuff.get('pupil', DEFAULT_MODEL_CHARFIELD)) assert isinstance(defaults, dict) rootname2 = 'jw02084001001_04103_00001-seg003_nrca3' dict_stuff = data_containers.mast_query_by_rootname(instrument, rootname2) - defaults = dict(filter=dict_stuff.get('filter', ''), - detector=dict_stuff.get('detector', ''), - exp_type=dict_stuff.get('exp_type', ''), - read_pat=dict_stuff.get('readpatt', ''), - grating=dict_stuff.get('grating', ''), + defaults = dict(filter=dict_stuff.get('filter', DEFAULT_MODEL_CHARFIELD), + detector=dict_stuff.get('detector', DEFAULT_MODEL_CHARFIELD), + exp_type=dict_stuff.get('exp_type', DEFAULT_MODEL_CHARFIELD), + read_pat=dict_stuff.get('readpatt', DEFAULT_MODEL_CHARFIELD), + grating=dict_stuff.get('grating', DEFAULT_MODEL_CHARFIELD), patt_num=dict_stuff.get('patt_num', 0), - aperture=dict_stuff.get('apername', ''), - subarray=dict_stuff.get('subarray', ''), - pupil=dict_stuff.get('pupil', '')) + aperture=dict_stuff.get('apername', DEFAULT_MODEL_CHARFIELD), + subarray=dict_stuff.get('subarray', DEFAULT_MODEL_CHARFIELD), + pupil=dict_stuff.get('pupil', DEFAULT_MODEL_CHARFIELD)) assert isinstance(defaults, dict) instrument2 = 'FGS' rootname3 = 'jw01029003001_06201_00001_guider2' dict_stuff = data_containers.mast_query_by_rootname(instrument2, rootname3) - defaults = dict(filter=dict_stuff.get('filter', ''), - detector=dict_stuff.get('detector', ''), - exp_type=dict_stuff.get('exp_type', ''), - read_pat=dict_stuff.get('readpatt', ''), - grating=dict_stuff.get('grating', ''), + defaults = dict(filter=dict_stuff.get('filter', DEFAULT_MODEL_CHARFIELD), + detector=dict_stuff.get('detector', DEFAULT_MODEL_CHARFIELD), + exp_type=dict_stuff.get('exp_type', DEFAULT_MODEL_CHARFIELD), + read_pat=dict_stuff.get('readpatt', DEFAULT_MODEL_CHARFIELD), + grating=dict_stuff.get('grating', DEFAULT_MODEL_CHARFIELD), patt_num=dict_stuff.get('patt_num', 0), - aperture=dict_stuff.get('apername', ''), - subarray=dict_stuff.get('subarray', ''), - pupil=dict_stuff.get('pupil', '')) + aperture=dict_stuff.get('apername', DEFAULT_MODEL_CHARFIELD), + subarray=dict_stuff.get('subarray', DEFAULT_MODEL_CHARFIELD), + pupil=dict_stuff.get('pupil', DEFAULT_MODEL_CHARFIELD)) assert isinstance(defaults, dict) diff --git a/jwql/tests/test_nrs_ta_plotting.py b/jwql/tests/test_nrs_ta_plotting.py new file mode 100644 index 000000000..5ee04a6ff --- /dev/null +++ b/jwql/tests/test_nrs_ta_plotting.py @@ -0,0 +1,167 @@ +"""Test NRS TA (WATA & MSATA) plotting bokeh routines. + +Author +______ + - Mees Fix +""" + +import datetime +import pandas as pd + + +from jwql.instrument_monitors.nirspec_monitors.ta_monitors.msata_monitor import MSATA +from jwql.instrument_monitors.nirspec_monitors.ta_monitors.wata_monitor import WATA + + +def test_nrs_msata_bokeh(): + test_row = { + "id": 1, + "filename": "filename", + "date_obs": datetime.datetime( + 1990, 11, 15, 20, 28, 59, 8000, tzinfo=datetime.timezone.utc + ), + "visit_id": "visit_id", + "tafilter": "tafilter", + "detector": "detector", + "readout": "readout", + "subarray": "subarray", + "num_refstars": 1, + "ta_status": "ta_status", + "v2halffacet": 1.0, + "v3halffacet": 1.0, + "v2msactr": 1.0, + "v3msactr": 1.0, + "lsv2offset": 1.0, + "lsv3offset": 1.0, + "lsoffsetmag": 1.0, + "lsrolloffset": 1.0, + "lsv2sigma": 1.0, + "lsv3sigma": 1.0, + "lsiterations": 1, + "guidestarid": 1, + "guidestarx": 1.0, + "guidestary": 1.0, + "guidestarroll": 1.0, + "samx": 1.0, + "samy": 1.0, + "samroll": 1.0, + "box_peak_value": [ + 1.0, + 1.0, + ], + "reference_star_mag": [ + 1.0, + 1.0, + ], + "convergence_status": [ + "convergence_status", + "convergence_status", + ], + "reference_star_number": [ + 1, + 1, + ], + "lsf_removed_status": [ + "lsf_removed_status", + "lsf_removed_status", + ], + "lsf_removed_reason": [ + "lsf_removed_reason", + "lsf_removed_reason", + ], + "lsf_removed_x": [ + 1.0, + 1.0, + ], + "lsf_removed_y": [ + 1.0, + 1.0, + ], + "planned_v2": [ + 1.0, + 1.0, + ], + "planned_v3": [ + 1.0, + 1.0, + ], + "stars_in_fit": 1, + "entry_date": datetime.datetime( + 1990, 11, 15, 20, 28, 59, 8000, tzinfo=datetime.timezone.utc + ), + } + + df = pd.DataFrame([test_row]) + monitor = MSATA() + monitor.output_file_name = "msata_output.html" + monitor.mk_plt_layout(df) + + +def test_nrs_wata_bokeh(): + test_row = { + "id": 1, + "filename": "filename", + "date_obs": datetime.datetime( + 1990, 11, 15, 20, 28, 59, 8000, tzinfo=datetime.timezone.utc + ), + "visit_id": "visit_id", + "tafilter": "tafilter", + "readout": "readout", + "ta_status": "ta_status", + "star_name": 1, + "star_ra": 1.0, + "star_dec": 1.0, + "star_mag": 1.0, + "star_catalog": 1, + "planned_v2": 1.0, + "planned_v3": 1.0, + "stamp_start_col": 1, + "stamp_start_row": 1, + "star_detector": "star_detector", + "max_val_box": 1.0, + "max_val_box_col": 1, + "max_val_box_row": 1, + "iterations": 1, + "corr_col": 1, + "corr_row": 1, + "stamp_final_col": 1.0, + "stamp_final_row": 1.0, + "detector_final_col": 1.0, + "detector_final_row": 1.0, + "final_sci_x": 1.0, + "final_sci_y": 1.0, + "measured_v2": 1.0, + "measured_v3": 1.0, + "ref_v2": 1.0, + "ref_v3": 1.0, + "v2_offset": 1.0, + "v3_offset": 1.0, + "sam_x": 1.0, + "sam_y": 1.0, + "entry_date": datetime.datetime( + 1990, 11, 15, 20, 28, 59, 8000, tzinfo=datetime.timezone.utc + ), + "nrsrapid_f140x": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + "nrsrapid_f110w": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + "nrsrapid_clear": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + "nrsrapidd6_f140x": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + "nrsrapidd6_f110w": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + "nrsrapidd6_clear": [ + 1.0 + ], # Not in DB but added to column source data in algorithm, adding here + } + + df = pd.DataFrame([test_row]) + monitor = WATA() + monitor.output_file_name = "wata_output.html" + monitor.mk_plt_layout(df) diff --git a/jwql/tests/test_utils.py b/jwql/tests/test_utils.py index ecc34790b..121a0dbec 100644 --- a/jwql/tests/test_utils.py +++ b/jwql/tests/test_utils.py @@ -42,6 +42,7 @@ 'observation': '001', 'parallel_seq_id': '1', 'program_id': '90002', + 'recognized_filename': True, 'suffix': 'rateints', 'visit': '001', 'visit_group': '02', @@ -58,6 +59,7 @@ 'observation': '001', 'parallel_seq_id': '1', 'program_id': '00327', + 'recognized_filename': True, 'suffix': 'rate', 'visit': '001', 'visit_group': '02', @@ -74,6 +76,7 @@ 'observation': '001', 'parallel_seq_id': '1', 'program_id': '00327', + 'recognized_filename': True, 'visit': '001', 'visit_group': '02', 'file_root': 'jw00327001001_02101_00002_nrca1', @@ -85,6 +88,7 @@ 'instrument': 'nirspec', 'observation': '008', 'program_id': '01118', + 'recognized_filename': True, 'visit': '001', 'detector': 'Unknown', 'file_root': 'jw01118008001_01_msa', @@ -101,6 +105,7 @@ 'observation': '002', 'parallel_seq_id': '1', 'program_id': '94015', + 'recognized_filename': True, 'suffix': 'crf', 'visit': '002', 'visit_group': '02', @@ -118,6 +123,7 @@ 'observation': '001', 'parallel_seq_id': '1', 'program_id': '90001', + 'recognized_filename': True, 'visit': '003', 'visit_group': '02', 'file_root': 'jw90001001003_02101_00001_nis', @@ -130,6 +136,7 @@ 'instrument': 'miri', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'suffix': 'i2d', 'target_id': 't001', 'detector': 'Unknown', @@ -143,6 +150,7 @@ 'instrument': 'miri', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'suffix': 'i2d', 'target_id': 't001', 'detector': 'Unknown', @@ -156,6 +164,7 @@ 'instrument': 'miri', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'source_id': 's00001', 'suffix': 'i2d', 'detector': 'Unknown', @@ -170,6 +179,7 @@ 'epoch': '1', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'suffix': 'i2d', 'target_id': 't001', 'detector': 'Unknown', @@ -184,6 +194,7 @@ 'epoch': '1', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'source_id': 's00001', 'suffix': 'i2d', 'detector': 'Unknown', @@ -197,6 +208,7 @@ 'instrument': 'miri', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'target_id': 't001', 'detector': 'Unknown', 'file_root': 'jw80600-o009_t001_miri_f1130w', @@ -209,6 +221,7 @@ 'instrument': 'miri', 'optical_elements': 'f1130w', 'program_id': '80600', + 'recognized_filename': True, 'source_id': 's00001', 'detector': 'Unknown', 'file_root': 'jw80600-o009_s00001_miri_f1130w', @@ -224,6 +237,7 @@ 'observation': '003', 'parallel_seq_id': '1', 'program_id': '00733', + 'recognized_filename': True, 'segment': '001', 'suffix': 'rate', 'visit': '001', @@ -242,6 +256,7 @@ 'observation': '003', 'parallel_seq_id': '1', 'program_id': '00733', + 'recognized_filename': True, 'segment': '001', 'suffix': 'crfints', 'visit': '001', @@ -259,6 +274,7 @@ 'observation': '003', 'parallel_seq_id': '1', 'program_id': '00733', + 'recognized_filename': True, 'segment': '001', 'visit': '001', 'visit_group': '02', @@ -274,6 +290,7 @@ 'instrument': 'fgs', 'observation': '011', 'program_id': '00729', + 'recognized_filename': True, 'suffix': 'image_cal', 'visit': '001', 'detector': 'Unknown', @@ -289,6 +306,7 @@ 'instrument': 'fgs', 'observation': '011', 'program_id': '00729', + 'recognized_filename': True, 'suffix': 'image_cal', 'visit': '001', 'detector': 'Unknown', @@ -304,6 +322,7 @@ 'instrument': 'fgs', 'observation': '001', 'program_id': '00327', + 'recognized_filename': True, 'visit': '001', 'detector': 'Unknown', 'file_root': 'jw00327001001_gs-id_2', @@ -318,6 +337,7 @@ 'instrument': 'fgs', 'observation': '001', 'program_id': '00327', + 'recognized_filename': True, 'visit': '001', 'detector': 'Unknown', 'file_root': 'jw00327001001_gs-id_12', @@ -332,6 +352,7 @@ 'instrument': 'fgs', 'observation': '048', 'program_id': '86600', + 'recognized_filename': True, 'suffix': 'stream', 'visit': '001', 'detector': 'Unknown', @@ -347,6 +368,7 @@ 'instrument': 'fgs', 'observation': '011', 'program_id': '00729', + 'recognized_filename': True, 'visit': '001', 'detector': 'Unknown', 'file_root': 'jw00729011001_gs-acq2_2019155024808', @@ -361,6 +383,7 @@ 'instrument': 'fgs', 'observation': '005', 'program_id': '01118', + 'recognized_filename': True, 'segment': '002', 'suffix': 'uncal', 'visit': '001', @@ -371,6 +394,7 @@ # Test msa file ('jw02560013001_01_msa.fits', {'program_id': '02560', + 'recognized_filename': True, 'observation': '013', 'visit': '001', 'filename_type': 'stage_2_msa', @@ -430,43 +454,15 @@ def test_filename_parser(filename, solution): assert filename_parser(filename) == solution -@pytest.mark.skipif(ON_GITHUB_ACTIONS, reason='Requires access to central storage.') -def test_filename_parser_whole_filesystem(): - """Test the filename_parser on all files currently in the filesystem.""" - # Get all files - filesystem_dir = get_config()['filesystem'] - all_files = [] - for dir_name, _, file_list in os.walk(filesystem_dir): - for file in file_list: - if 'public' in file or 'proprietary' in file: - if file.endswith('.fits'): - all_files.append(os.path.join(dir_name, file)) - - # Run the filename_parser on all files - bad_filenames = [] - for filepath in all_files: - try: - filename_parser(filepath) - except ValueError: - bad_filenames.append(os.path.basename(filepath)) - - # Determine if the test failed - fail = bad_filenames != [] - failure_msg = '{} files could not be successfully parsed: \n - {}'.\ - format(len(bad_filenames), '\n - '.join(bad_filenames)) - - # Check which ones failed - assert not fail, failure_msg - - def test_filename_parser_non_jwst(): """Attempt to generate a file parameter dictionary from a file that is not formatted in the JWST naming convention. Ensure the appropriate error is raised. """ - with pytest.raises(ValueError): - filename = 'not_a_jwst_file.fits' - filename_parser(filename) + filename = 'not_a_jwst_file.fits' + filename_dict = filename_parser(filename) + assert 'recognized_filename' in filename_dict + assert filename_dict['recognized_filename'] is False @pytest.mark.skipif(ON_GITHUB_ACTIONS, reason='Requires access to central storage.') @@ -508,7 +504,7 @@ def test_validate_config(): "admin_account": "", "auth_mast": "", "connection_string": "", - "database": { + "databases": { "engine": "", "name": "", "user": "", @@ -516,12 +512,32 @@ def test_validate_config(): "host": "", "port": "" }, + "django_databases": { + "default": { + "ENGINE": "", + "NAME": "", + "USER": "", + "PASSWORD": "", + "HOST": "", + "PORT": "" + }, + "monitors": { + "ENGINE": "", + "NAME": "", + "USER": "", + "PASSWORD": "", + "HOST": "", + "PORT": "" + } + }, + "django_debug": "", "jwql_dir": "", "jwql_version": "", "server_type": "", "log_dir": "", "mast_token": "", "outputs": "", + "working": "", "preview_image_filesystem": "", "filesystem": "", "setup_file": "", diff --git a/jwql/utils/constants.py b/jwql/utils/constants.py index 5a588f439..37d390a74 100644 --- a/jwql/utils/constants.py +++ b/jwql/utils/constants.py @@ -364,6 +364,10 @@ "wfscmb", ] +# Default Model Values +DEFAULT_MODEL_CHARFIELD = "empty" +DEFAULT_MODEL_COMMENT = "" + # Filename Component Lengths FILE_AC_CAR_ID_LEN = 4 FILE_AC_O_ID_LEN = 3 @@ -467,7 +471,6 @@ "F170LP", "F290LP", "OPAQUE", - "P750L", ], } @@ -700,6 +703,7 @@ "nircam_dark_dark_current", "nircam_dark_pixel_stats", "nircam_dark_query_history", "niriss_dark_dark_current", "niriss_dark_pixel_stats", "niriss_dark_query_history", "nirspec_dark_dark_current", "nirspec_dark_pixel_stats", "nirspec_dark_query_history", + "nirspec_grating_query_history", "fgs_edb_blocks_stats", "fgs_edb_daily_stats", "fgs_edb_every_change_stats", "fgs_edb_time_interval_stats", "fgs_edb_time_stats", "miri_edb_blocks_stats", "miri_edb_daily_stats", "miri_edb_every_change_stats", "miri_edb_time_interval_stats", "miri_edb_time_stats", "nircam_edb_blocks_stats", "nircam_edb_daily_stats", "nircam_edb_every_change_stats", "nircam_edb_time_interval_stats", "nircam_edb_time_stats", @@ -712,7 +716,7 @@ "niriss_readnoise_query_history", "niriss_readnoise_stats", "nirspec_readnoise_query_history", "nirspec_readnoise_stats", "miri_ta_query_history", "miri_ta_stats", - "nirspec_ta_query_history", "nirspec_ta_stats" + "nirspec_ta_query_history", "nirspec_ta_stats", "nirspec_wata_stats", "nirspec_msata_stats" ] # Suffix for msa files @@ -782,7 +786,7 @@ ] # Possible suffix types for AMI files -NIRISS_AMI_SUFFIX_TYPES = ["amiavg", "aminorm", "ami", "psf-amiavg"] +NIRISS_AMI_SUFFIX_TYPES = ["amiavg", "aminorm", "ami", "psf-amiavg", "psf-ami-oi", "ami-oi", "aminorm-oi", "amimulti-oi", "amilg"] # Determine if the code is being run as part of CI checking on github ON_GITHUB_ACTIONS = '/home/runner' in os.path.expanduser('~') or '/Users/runner' in os.path.expanduser('~') @@ -1032,3 +1036,28 @@ class QueryConfigKeys: + WFSC_SUFFIX_TYPES + MSA_SUFFIX ) + +# Model.Charfield Max Length Constants +MAX_LEN_AMPLIFIER = 40 +MAX_LEN_APERTURE = 40 +MAX_LEN_DEPENDENCY_VALUE = 40 +MAX_LEN_DETECTOR = 40 +MAX_LEN_DIFF_IMAGE = 1000 +MAX_LEN_FILENAME = 1000 +MAX_LEN_FILTER = 7 +MAX_LEN_GENERIC_TEXT = 100 +MAX_LEN_GRATING = 40 +MAX_LEN_INSTRUMENT = 7 +MAX_LEN_MNEMONIC = 40 +MAX_LEN_NGROUPS = 10 +MAX_LEN_NINTS = 10 +MAX_LEN_OBS = 3 +MAX_LEN_PATH = 1000 +MAX_LEN_PROPOSAL = 5 +MAX_LEN_PUPIL = 40 +MAX_LEN_READPATTERN = 40 +MAX_LEN_SUBARRAY = 40 +MAX_LEN_TIME = 50 +MAX_LEN_TYPE = 40 +MAX_LEN_USER = 50 +MAX_LEN_VISIT = 30 diff --git a/jwql/utils/instrument_properties.py b/jwql/utils/instrument_properties.py index 88acb5465..3b2358794 100644 --- a/jwql/utils/instrument_properties.py +++ b/jwql/utils/instrument_properties.py @@ -268,7 +268,7 @@ def get_obstime(filename): time = h[0].header['TIME-OBS'] year, month, day = [int(element) for element in date.split('-')] hour, minute, second = [float(element) for element in time.split(':')] - return datetime.datetime(year, month, day, int(hour), int(minute), int(second)) + return datetime.datetime(year, month, day, int(hour), int(minute), int(second), tzinfo=datetime.timezone.utc) def mean_time(times): diff --git a/jwql/utils/interactive_preview_image.py b/jwql/utils/interactive_preview_image.py index 1ffd2ab7b..1845e0d91 100644 --- a/jwql/utils/interactive_preview_image.py +++ b/jwql/utils/interactive_preview_image.py @@ -554,7 +554,7 @@ def add_interactive_controls(self, images, color_bars): # JS callbacks for client side controls # set alternate image visibility when scale selection changes - scale_group.js_on_click(CustomJS(args={'i1': images[0], 'c1': color_bars[0], + scale_group.js_on_change('active', CustomJS(args={'i1': images[0], 'c1': color_bars[0], 'i2': images[1], 'c2': color_bars[1]}, code=""" if (i1.visible == true) { @@ -594,10 +594,10 @@ def add_interactive_controls(self, images, color_bars): limit_high.js_link('value', color_bars[i].color_mapper, 'high') # reset boxes to preset range on button click - reset.js_on_click(limit_reset) + reset.js_on_event('button_click', limit_reset) # also reset when swapping limit style - scale_group.js_on_click(limit_reset) + scale_group.js_on_change('active', limit_reset) # return widgets spacer = Spacer(height=20) diff --git a/jwql/utils/logging_functions.py b/jwql/utils/logging_functions.py index abd82a45e..c20bfce3d 100644 --- a/jwql/utils/logging_functions.py +++ b/jwql/utils/logging_functions.py @@ -57,6 +57,7 @@ def my_main_function(): import getpass import importlib import logging +import logging.config import os import pwd import socket @@ -76,8 +77,40 @@ def my_main_function(): from jwql.utils.utils import get_config, ensure_dir_exists +def filter_maker(level): + """ + This creates a logging filter that takes in an integer describing log level (with + DEBUG being the lowest value and CRITICAL the highest), and returns True if and only + if the logged message has a lower level than the filter level. + + The filter is needed because the logging system is designed so that it outputs + messages of LogLevel *or higher*, because the assumption is you want to know if + something happens that's more serious than what you're looking at. + + In this case, though, we're dividing printed-out log messages between the built-in + STDOUT and STDERR output streams, and we have assigned ERROR and above to go to + STDERR, while INFO and above go to STDOUT. So, by default, anything at ERROR or at + CRITICAL would go to *both* STDOUT and STDERR. This function lets you add a filter + that returns false for anything with a level above WARNING, so that STDOUT won't + duplicate those messages. + """ + level = getattr(logging, level) + + def filter(record): + return record.levelno <= level + + return filter + + def configure_logging(module): - """Configure the log file with a standard logging format. + """ + Configure the log file with a standard logging format. The format in question is + set up as follows: + + - DEBUG messages are ignored + - INFO and WARNING messages go to both the log file and sys.stdout + - ERROR and CRITICAL messages go to both the log file and sys.stderr + - existing loggers are disabled before this configuration is applied Parameters ---------- @@ -98,15 +131,14 @@ def configure_logging(module): # Determine log file location log_file = make_log_file(module) - # Make sure no other root lhandlers exist before configuring the logger - for handler in logging.root.handlers[:]: - logging.root.removeHandler(handler) + # Get the logging configuration dictionary + logging_config = get_config()['logging'] + + # Set the log file to the file that we got above + logging_config["handlers"]["file"]["filename"] = log_file - # Create the log file and set the permissions - logging.basicConfig(filename=log_file, - format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%m/%d/%Y %H:%M:%S %p', - level=logging.INFO) + # Configure the logging system and set permissions for the file + logging.config.dictConfig(logging_config) print('Log file initialized to {}'.format(log_file)) set_permissions(log_file) @@ -243,7 +275,7 @@ def wrapped(*args, **kwargs): # nosec comment added to ignore bandit security check try: - environment = subprocess.check_output('conda env export', universal_newlines=True, shell=True) # nosec + environment = subprocess.check_output('conda env export', universal_newlines=True, shell=True) # nosec logging.info('Environment:') for line in environment.split('\n'): logging.info(line) diff --git a/jwql/utils/monitor_template.py b/jwql/utils/monitor_template.py index 9e36ccc0f..b2567a35b 100644 --- a/jwql/utils/monitor_template.py +++ b/jwql/utils/monitor_template.py @@ -106,11 +106,14 @@ def monitor_template_main(): # Example of locating a dataset in the filesystem filesystem = SETTINGS['filesystem'] - dataset = os.path.join(filesystem, - 'public', - 'jw{}'.format(filename_dict['program_id']), - 'jw{}{}{}'.format(filename_dict['program_id'], filename_dict['observation'], filename_dict['visit']), - filename_of_interest) + if filename_dict['recognized_filename']: + dataset = os.path.join(filesystem, + 'public', + 'jw{}'.format(filename_dict['program_id']), + 'jw{}{}{}'.format(filename_dict['program_id'], filename_dict['observation'], filename_dict['visit']), + filename_of_interest) + else: + raise KeyError(f'Filename {filename_of_interest} not recognized by filename_parser() in monitor_template_main') # Example of reading in dataset using jwst.datamodels im = datamodels.open(dataset) diff --git a/jwql/utils/monitor_utils.py b/jwql/utils/monitor_utils.py index 536ac1ad4..8eb4c8a16 100644 --- a/jwql/utils/monitor_utils.py +++ b/jwql/utils/monitor_utils.py @@ -27,7 +27,6 @@ from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.logging_functions import configure_logging, get_log_status from jwql.utils import mast_utils -from jwql.utils.utils import filename_parser # Increase the limit on the number of entries that can be returned by diff --git a/jwql/utils/organize_filesystem.py b/jwql/utils/organize_filesystem.py index b4dcf483e..ee1e2ce4a 100644 --- a/jwql/utils/organize_filesystem.py +++ b/jwql/utils/organize_filesystem.py @@ -52,12 +52,16 @@ def organize_filesystem(): filename_dict = filename_parser(src) # Build destination path for those filenames that can be parsed - try: + if filename_dict['recognized_filename']: destination_directory = os.path.join( SETTINGS['filesystem'], 'jw{}'.format(filename_dict['program_id']), 'jw{}{}{}'.format(filename_dict['program_id'], filename_dict['observation'], filename_dict['visit'])) - except KeyError: # Some filenames do not have a program_id/observation/visit structure + else: + # Some filenames do not have a program_id/observation/visit structure + # Files that are not recognized by filename_parser will also end up here. + logging.warning((f'While running organize_filesystem(), {src} was not, ' + 'recognized by the filename_parser().')) break # Build complete destination location @@ -92,11 +96,15 @@ def revert_filesystem(): filename_dict = filename_parser(src) # Build destination path for those filenames that can be parsed - try: + if filename_dict['recognized_filename']: destination_directory = os.path.join( SETTINGS['old_filesystem'], 'jw{}'.format(filename_dict['program_id'])) - except KeyError: # Some filenames do not have a program_id/observation/visit structure + else: + # Some filenames do not have a program_id/observation/visit structure + # Filenames not recognized by filename_parser() will also end up here. + logging.warning((f'While running revert_filesystem(), {src} was not, ' + 'recognized by the filename_parser().')) break # Build complete destination location diff --git a/jwql/utils/preview_image.py b/jwql/utils/preview_image.py index 088996ab9..ce9a9d607 100755 --- a/jwql/utils/preview_image.py +++ b/jwql/utils/preview_image.py @@ -490,16 +490,54 @@ def make_figure(self, image, integration_number, min_value, max_value, dig = 2 format_string = "%.{}f".format(dig) tlabelstr = [format_string % number for number in tlabelflt] - cbar = self.fig.colorbar(cax, ticks=tickvals) # This seems to correctly remove the ticks and labels we want to remove. It gives a warning that # it doesn't work on log scales, which we don't care about. So let's ignore that warning. warnings.filterwarnings("ignore", message="AutoMinorLocator does not work with logarithmic scale") - cbar.ax.yaxis.set_minor_locator(AutoMinorLocator(n=0)) - cbar.ax.set_yticklabels(tlabelstr) - cbar.ax.tick_params(labelsize=maxsize * 5. / 4) - cbar.ax.set_ylabel(self.units, labelpad=10, rotation=270) + xyratio = xsize / ysize + if xyratio < 1.6: + # For apertures that are taller than they are wide, square, or that are wider than + # they are tall but still reasonably close to square, put the colorbar on the right + # side of the image. + + # Some magic numbers arrived at through testing aspect ratios for all apertures + if xyratio > 0.4: + cb_width = 0.05 + else: + cb_width = 0.05 * 0.4 / xyratio + + upper_x_anchor = 0.02 + if xyratio < 0.1: + upper_x_anchor = 0.12 + + cbax = self.fig.add_axes([ax.get_position().x1 + upper_x_anchor, + ax.get_position().y0, + cb_width, + ax.get_position().height + ]) + cbar = self.fig.colorbar(cax, cax=cbax, ticks=tickvals, orientation='vertical') + cbar.ax.yaxis.set_minor_locator(AutoMinorLocator(n=0)) + cbar.ax.set_yticklabels(tlabelstr) + cbar.ax.set_ylabel(self.units, labelpad=7, rotation=270) + else: + # For apertures that are significantly wider than they are tall, put the colorbar + # under the image. + + # Again, some magic numbers controlling the positioning and height of the + # colorbar, based on testing. + lower_y_anchor = 0. - (xyratio / 14.5) + cb_height = 0.07 * (np.log2(xyratio) - 1) + + cbax = self.fig.add_axes([ax.get_position().x0, + ax.get_position().y0 + lower_y_anchor, + ax.get_position().width, + cb_height]) + cbar = self.fig.colorbar(cax, cax=cbax, ticks=tickvals, orientation='horizontal') + cbar.ax.xaxis.set_minor_locator(AutoMinorLocator(n=0)) + cbar.ax.set_xticklabels(tlabelstr) + cbar.ax.set_xlabel(self.units, labelpad=7, rotation=0) + ax.set_xlabel('Pixels', fontsize=maxsize * 5. / 4) ax.set_ylabel('Pixels', fontsize=maxsize * 5. / 4) ax.tick_params(labelsize=maxsize) diff --git a/jwql/utils/utils.py b/jwql/utils/utils.py index b5d2e5a46..d96388d9a 100644 --- a/jwql/utils/utils.py +++ b/jwql/utils/utils.py @@ -31,6 +31,7 @@ import glob import itertools import json +import logging import pyvo as vo import os import re @@ -173,6 +174,22 @@ def create_png_from_fits(filename, outdir): """ if os.path.isfile(filename): image = fits.getdata(filename) + + # If the input file is a rateints/calints file, it will have 3 dimensions. + # If it is a file containing all groups, prior to ramp-fitting, it will have + # 4 dimensions. In this case, grab the appropriate 2D image to work with. For + # a 3D case, get the first integration. For a 4D case, get the last group + # (which will have the highest SNR). + ndim = len(image.shape) + if ndim == 2: + pass + elif ndim == 3: + image = image[0, :, :] + elif ndim == 4: + image = image[0, -1, :, :] + else: + raise ValueError(f'File {filename} has an unsupported number of dimensions: {ndim}.') + ny, nx = image.shape img_mn, img_med, img_dev = sigma_clipped_stats(image[4: ny - 4, 4: nx - 4]) @@ -514,7 +531,8 @@ def filename_parser(filename): time_series, time_series_2c, guider, - guider_segment] + guider_segment + ] filename_type_names = [ 'stage_1_and_2', @@ -552,6 +570,9 @@ def filename_parser(filename): # Convert the regex match to a dictionary filename_dict = jwst_file.groupdict() + # Add an entry indicating that the filename was successfully parsed + filename_dict['recognized_filename'] = True + # Add the filename type to that dict filename_dict['filename_type'] = name_match @@ -579,11 +600,9 @@ def filename_parser(filename): # Raise error if unable to parse the filename except AttributeError: - jdox_url = 'https://jwst-docs.stsci.edu/understanding-jwst-data-files/jwst-data-file-naming-conventions' - raise ValueError( - 'Provided file {} does not follow JWST naming conventions. ' - 'See {} for further information.'.format(filename, jdox_url) - ) + filename_dict = {'recognized_filename': False} + logging.exception((f'\nFile; {filename} was not recognized by filename_parser(). Update parser or ' + 'constants.py if it should be recognized.\n')) return filename_dict @@ -636,7 +655,7 @@ def filesystem_path(filename, check_existence=True, search=None): elif os.path.isfile(full_path_proprietary): full_path = full_path_proprietary else: - raise FileNotFoundError('{} is not in the predicted location: {}'.format(filename, full_path)) + raise FileNotFoundError('{} is not in the expected location: {}'.format(filename, full_path)) return full_path diff --git a/jwql/website/apps/jwql/archive_database_update.py b/jwql/website/apps/jwql/archive_database_update.py index 071df24a8..748cde5dc 100755 --- a/jwql/website/apps/jwql/archive_database_update.py +++ b/jwql/website/apps/jwql/archive_database_update.py @@ -26,7 +26,7 @@ Use the '--fill_empty' argument to provide a model and field. Updates ALL fields for any model with empty/null/0 specified field $ python archive_database_update.py --fill_empty rootfileinfo expstart WARNING: Not all fields will be populated by all model objects. This will result in updates that may not be necessary. - While this will not disturb the data, it has the potential to increase run time. + While this will not disturb the data, it has the potential to increase run time. Select the field that is most pertient to the models you need updated minimize run time Use the 'update' argument to update every rootfileinfo data model with the most complete information from MAST @@ -49,6 +49,7 @@ from django.apps import apps from jwql.utils.protect_module import lock_module +from jwql.utils.constants import DEFAULT_MODEL_CHARFIELD # These lines are needed in order to use the Django models in a standalone # script (as opposed to code run as a result of a webpage request). If these @@ -116,9 +117,14 @@ def get_updates(update_database): for rootname in all_rootnames: filename_dict = filename_parser(rootname) - # Weed out file types that are not supported by generate_preview_images - if 'stage_3' not in filename_dict['filename_type']: - rootnames.append(rootname) + if filename_dict['recognized_filename']: + # Weed out file types that are not supported by generate_preview_images + if 'stage_3' not in filename_dict['filename_type']: + rootnames.append(rootname) + else: + logging.warning((f'While running get_updates() to update the RootfileInfo tables, {rootname}, ' + 'was not recognized by the filename_parser().')) + pass if len(filenames) > 0: @@ -160,6 +166,16 @@ def get_updates(update_database): create_archived_proposals_context(inst) +@log_info +@log_fail +def cleanup_past_runs(): + logging.info("Starting cleanup_past_runs") + rootfileinfo_field_set = ["filter", "detector", "exp_type", "read_patt", "grating", "read_patt_num", "aperture", "subarray", "pupil", "expstart"] + # Consume iterator created in map with list in order to make it run + list(map(lambda x: fill_empty_model("rootfileinfo", x), rootfileinfo_field_set)) + logging.info("Finished cleanup_past_runs") + + def get_all_possible_filenames_for_proposal(instrument, proposal_num): """Wrapper around a MAST query for filenames from a given instrument/proposal @@ -332,15 +348,15 @@ def update_database_table(update, instrument, prop, obs, thumbnail, obsfiles, ty # Updating defaults only on update or creation to prevent call to mast_query_by_rootname on every file name. defaults_dict = mast_query_by_rootname(instrument, file) - defaults = dict(filter=defaults_dict.get('filter', ''), - detector=defaults_dict.get('detector', ''), - exp_type=defaults_dict.get('exp_type', ''), - read_patt=defaults_dict.get('readpatt', ''), - grating=defaults_dict.get('grating', ''), - read_patt_num=defaults_dict.get('patt_num', 0), - aperture=defaults_dict.get('apername', ''), - subarray=defaults_dict.get('subarray', ''), - pupil=defaults_dict.get('pupil', ''), + defaults = dict(filter=defaults_dict.get('filter', DEFAULT_MODEL_CHARFIELD), + detector=defaults_dict.get('detector', DEFAULT_MODEL_CHARFIELD), + exp_type=defaults_dict.get('exp_type', DEFAULT_MODEL_CHARFIELD), + read_patt=defaults_dict.get('readpatt', DEFAULT_MODEL_CHARFIELD), + grating=defaults_dict.get('grating', DEFAULT_MODEL_CHARFIELD), + read_patt_num=defaults_dict.get('patt_num', 1), + aperture=defaults_dict.get('apername', DEFAULT_MODEL_CHARFIELD), + subarray=defaults_dict.get('subarray', DEFAULT_MODEL_CHARFIELD), + pupil=defaults_dict.get('pupil', DEFAULT_MODEL_CHARFIELD), expstart=defaults_dict.get('expstart', 0.0)) for key, value in defaults.items(): @@ -369,10 +385,14 @@ def fill_empty_model(model_name, model_field): ''' + is_proposal = (model_name == "proposal") + is_rootfileinfo = (model_name == "rootfileinfo") + rootfile_info_fields_default_ok = ["filter", "grating", "pupil"] + model_field_null = model_field + "__isnull" model_field_empty = model_field + "__exact" - model = apps.get_model('jwql', model_name) + model = apps.get_model("jwql", model_name) null_models = empty_models = zero_models = model.objects.none() # filter(field__isnull=True) @@ -387,6 +407,13 @@ def fill_empty_model(model_name, model_field): except ValueError: pass + # filter(field__exact=DEFAULT_MODEL_CHARFIELD) + try: + if is_proposal or model_field not in rootfile_info_fields_default_ok: + empty_models = model.objects.filter(**{model_field_empty: DEFAULT_MODEL_CHARFIELD}) + except ValueError: + pass + # filter(field=0) try: zero_models = model.objects.filter(**{model_field: 0}) @@ -396,9 +423,9 @@ def fill_empty_model(model_name, model_field): model_set = null_models | empty_models | zero_models if model_set.exists(): logging.info(f'{model_set.count()} models to be updated') - if model_name == 'proposal': + if is_proposal: fill_empty_proposals(model_set) - elif model_name == 'rootfileinfo': + elif is_rootfileinfo: fill_empty_rootfileinfo(model_set) else: logging.warning(f'Filling {model_name} model is not currently implemented') @@ -458,18 +485,21 @@ def fill_empty_rootfileinfo(rootfileinfo_set): for rootfileinfo_mod in rootfileinfo_set: defaults_dict = mast_query_by_rootname(rootfileinfo_mod.instrument, rootfileinfo_mod.root_name) - defaults = dict(filter=defaults_dict.get('filter', ''), - detector=defaults_dict.get('detector', ''), - exp_type=defaults_dict.get('exp_type', ''), - read_patt=defaults_dict.get('readpatt', ''), - grating=defaults_dict.get('grating', ''), - read_patt_num=defaults_dict.get('patt_num', 0), - aperture=defaults_dict.get('apername', ''), - subarray=defaults_dict.get('subarray', ''), - pupil=defaults_dict.get('pupil', ''), + defaults = dict(filter=defaults_dict.get('filter', DEFAULT_MODEL_CHARFIELD), + detector=defaults_dict.get('detector', DEFAULT_MODEL_CHARFIELD), + exp_type=defaults_dict.get('exp_type', DEFAULT_MODEL_CHARFIELD), + read_patt=defaults_dict.get('readpatt', DEFAULT_MODEL_CHARFIELD), + grating=defaults_dict.get('grating', DEFAULT_MODEL_CHARFIELD), + read_patt_num=defaults_dict.get('patt_num', 1), + aperture=defaults_dict.get('apername', DEFAULT_MODEL_CHARFIELD), + subarray=defaults_dict.get('subarray', DEFAULT_MODEL_CHARFIELD), + pupil=defaults_dict.get('pupil', DEFAULT_MODEL_CHARFIELD), expstart=defaults_dict.get('expstart', 0.0)) for key, value in defaults.items(): + # Final check to verify no None exists + if value is None: + value = DEFAULT_MODEL_CHARFIELD setattr(rootfileinfo_mod, key, value) try: rootfileinfo_mod.save() @@ -496,6 +526,7 @@ def protected_code(update_database, fill_empty_list): fill_empty_model(fill_empty_list[0], fill_empty_list[1]) else: get_updates(update_database) + cleanup_past_runs() if __name__ == '__main__': diff --git a/jwql/website/apps/jwql/bokeh_containers.py b/jwql/website/apps/jwql/bokeh_containers.py index f4569747a..aba023568 100644 --- a/jwql/website/apps/jwql/bokeh_containers.py +++ b/jwql/website/apps/jwql/bokeh_containers.py @@ -278,7 +278,7 @@ def generic_telemetry_plot(times, values, name, nominal_value=None, yellow_limit fig = figure(width=400, height=400, x_axis_label='Date', y_axis_label='Voltage', x_axis_type='datetime') - fig.circle(times, values, size=4, color='navy', alpha=0.5) + fig.circle(times, values, color='navy', alpha=0.5, radius=2, radius_dimension='y', radius_units='screen') if nominal_value is not None: fig.line(times, np.repeat(nominal_value, len(times)), line_dash='dashed') diff --git a/jwql/website/apps/jwql/bokeh_dashboard.py b/jwql/website/apps/jwql/bokeh_dashboard.py index 536ef9710..a4469be82 100644 --- a/jwql/website/apps/jwql/bokeh_dashboard.py +++ b/jwql/website/apps/jwql/bokeh_dashboard.py @@ -225,9 +225,9 @@ def dashboard_disk_usage(self): y_axis_label='Disk Space (TB)') plots[data['shortname']].line(x='date', y='available', source=source, legend_label='Available', line_dash='dashed', line_color='#C85108', line_width=3) - plots[data['shortname']].circle(x='date', y='available', source=source,color='#C85108', size=10) + plots[data['shortname']].circle(x='date', y='available', source=source,color='#C85108', radius=5, radius_dimension='y', radius_units='screen') plots[data['shortname']].line(x='date', y='used', source=source, legend_label='Used', line_dash='dashed', line_color='#355C7D', line_width=3) - plots[data['shortname']].circle(x='date', y='used', source=source, color='#355C7D', size=10) + plots[data['shortname']].circle(x='date', y='used', source=source, color='#355C7D', radius=5, radius_dimension='y', radius_units='screen') plots[data['shortname']].xaxis.formatter = DatetimeTickFormatter(hours="%H:%M %d %B %Y", days="%d %B %Y", @@ -289,7 +289,7 @@ def dashboard_central_store_data_volume(self): # Plot the results source = ColumnDataSource(results) plot.line(x='date', y='used', source=source, line_color=color, line_dash='dashed', legend_label=area, line_width=3) - plot.circle(x='date', y='used', source=source, color=color, size=10) + plot.circle(x='date', y='used', source=source, color=color, radius=5, radius_dimension='y', radius_units='screen') hover_tool = HoverTool(tooltips=[('Used:', f'@used TB'), ('Date:', '@date{%d %b %Y}') @@ -331,7 +331,7 @@ def dashboard_central_store_data_volume(self): # Plot the results legend_str = 'File volume' cen_store_plot.line(x='date', y='used', source=cen_store_source, legend_label=legend_str, line_dash='dashed', line_color='#355C7D', line_width=3) - cen_store_plot.circle(x='date', y='used', source=cen_store_source, color='#355C7D', size=10) + cen_store_plot.circle(x='date', y='used', source=cen_store_source, color='#355C7D', radius=5, radius_dimension='y', radius_units='screen') cen_store_plot.xaxis.formatter = DatetimeTickFormatter(hours="%H:%M %d %B %Y", days="%d %B %Y", months="%d %B %Y", diff --git a/jwql/website/apps/jwql/clean_old_log_files.py b/jwql/website/apps/jwql/clean_old_log_files.py old mode 100644 new mode 100755 index dd0a6b95c..2d1f3ccaf --- a/jwql/website/apps/jwql/clean_old_log_files.py +++ b/jwql/website/apps/jwql/clean_old_log_files.py @@ -38,12 +38,14 @@ def define_options(): """ usage = 'clean_old_log_files.py -t 14' parser = argparse.ArgumentParser(usage=usage) - parser.add_argument('-t', '--time_limit', type=int, default=14, + parser.add_argument('-t', '--time_limit', type=float, default=14, help='Time limit in days. Log files older than this will be deleted.') + parser.add_argument('-d', '--dry_run', action="store_true", + help='If True, the log files that would be deleted are printed to the screen') return parser -def run(time_limit=timedelta(days=14)): +def run(time_limit=timedelta(days=14), dry_run=False): """Look through log directories and delete log files that are older than ``time_limit``. Have time_limit default to be 14 days. @@ -51,6 +53,10 @@ def run(time_limit=timedelta(days=14)): ------ time_limit : datetime.timdelta Files older than this time limit will be deleted + + dry_run : bool + If True, log files will not be deleted. Those that would be deleted are instead + printed to the screen """ now = datetime.now() @@ -75,10 +81,13 @@ def run(time_limit=timedelta(days=14)): age = now - last_modified_time if age > time_limit: full_path = os.path.join(log_dir, logtype, item) - os.remove(full_path) + if not dry_run: + os.remove(full_path) + else: + print(f'DELETE: {full_path}') if __name__ == '__main__': parser = define_options() args = parser.parse_args() - run(timedelta(days=args.time_limit)) + run(time_limit=timedelta(days=args.time_limit), dry_run=args.dry_run) diff --git a/jwql/website/apps/jwql/data_containers.py b/jwql/website/apps/jwql/data_containers.py index 4c9a6a742..9819282fa 100644 --- a/jwql/website/apps/jwql/data_containers.py +++ b/jwql/website/apps/jwql/data_containers.py @@ -27,44 +27,61 @@ """ import copy -from collections import OrderedDict import glob import json -from operator import getitem +import logging import os import re import tempfile -import logging +from collections import OrderedDict +from datetime import datetime +from operator import getitem, itemgetter +import numpy as np +import pandas as pd +import pyvo as vo +import requests from astropy.io import fits from astropy.time import Time +from astroquery.mast import Mast from bs4 import BeautifulSoup -from django import setup +from django import forms, setup from django.conf import settings from django.contrib import messages from django.core.exceptions import ObjectDoesNotExist from django.db.models.query import QuerySet -import numpy as np -from operator import itemgetter -import pandas as pd -import pyvo as vo -import requests -from datetime import datetime from jwql.database import database_interface as di from jwql.database.database_interface import load_connection from jwql.edb.engineering_database import get_mnemonic, get_mnemonic_info, mnemonic_inventory -from jwql.utils.utils import check_config_for_key, ensure_dir_exists, filesystem_path, filename_parser, get_config -from jwql.utils.constants import MAST_QUERY_LIMIT, MONITORS, THUMBNAIL_LISTFILE, THUMBNAIL_FILTER_LOOK -from jwql.utils.constants import EXPOSURE_PAGE_SUFFIX_ORDER, IGNORED_SUFFIXES, INSTRUMENT_SERVICE_MATCH -from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE, JWST_INSTRUMENT_NAMES -from jwql.utils.constants import REPORT_KEYS_PER_INSTRUMENT -from jwql.utils.constants import SUFFIXES_TO_ADD_ASSOCIATION, SUFFIXES_WITH_AVERAGED_INTS, QueryConfigKeys -from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS +from jwql.utils.constants import ( + DEFAULT_MODEL_COMMENT, + EXPOSURE_PAGE_SUFFIX_ORDER, + IGNORED_SUFFIXES, + INSTRUMENT_SERVICE_MATCH, + JWST_INSTRUMENT_NAMES, + JWST_INSTRUMENT_NAMES_MIXEDCASE, + MAST_QUERY_LIMIT, + MONITORS, + ON_GITHUB_ACTIONS, + ON_READTHEDOCS, + REPORT_KEYS_PER_INSTRUMENT, + SUFFIXES_TO_ADD_ASSOCIATION, + SUFFIXES_WITH_AVERAGED_INTS, + THUMBNAIL_FILTER_LOOK, + THUMBNAIL_LISTFILE, + QueryConfigKeys, +) from jwql.utils.credentials import get_mast_token from jwql.utils.permissions import set_permissions -from jwql.utils.utils import get_rootnames_for_instrument_proposal -from astroquery.mast import Mast +from jwql.utils.utils import ( + check_config_for_key, + ensure_dir_exists, + filename_parser, + filesystem_path, + get_config, + get_rootnames_for_instrument_proposal, +) # Increase the limit on the number of entries that can be returned by # a MAST query. @@ -79,8 +96,16 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") setup() - from .forms import MnemonicSearchForm, MnemonicQueryForm, MnemonicExplorationForm, InstrumentAnomalySubmitForm - from jwql.website.apps.jwql.models import Observation, Proposal, RootFileInfo, Anomalies + from jwql.website.apps.jwql.models import Anomalies, Observation, Proposal, RootFileInfo + + from .forms import ( + InstrumentAnomalySubmitForm, + MnemonicExplorationForm, + MnemonicQueryForm, + MnemonicSearchForm, + RootFileInfoCommentSubmitForm, + RootFileInfoExposureCommentSubmitForm, + ) check_config_for_key('auth_mast') configs = get_config() auth_mast = configs['auth_mast'] @@ -396,11 +421,16 @@ def get_additional_exposure_info(root_file_infos, image_info): filter_value = '/'.join(set([e.filter for e in root_file_infos])) pupil_value = '/'.join(set([e.pupil for e in root_file_infos])) grating_value = '/'.join(set([e.grating for e in root_file_infos])) + exp_comment = root_file_infos.first().exp_comment elif isinstance(root_file_infos, RootFileInfo): root_file_info = root_file_infos filter_value = root_file_info.filter pupil_value = root_file_info.pupil grating_value = root_file_info.grating + exp_comment = root_file_info.exp_comment + + # Print N/A if no exposure comment is used + exp_comment = exp_comment if exp_comment != DEFAULT_MODEL_COMMENT else "N/A" # Initialize dictionary of file info to show at the top of the page, along # with another for info that will be in the collapsible text box. @@ -427,7 +457,8 @@ def get_additional_exposure_info(root_file_infos, image_info): 'TARG_DEC': 'N/A', 'CRDS context': 'N/A', 'PA_V3': 'N/A', - 'EXPSTART': root_file_info.expstart + 'EXPSTART': root_file_info.expstart, + 'EXP_COMMENT': exp_comment } elif isinstance(root_file_infos, RootFileInfo): additional_info = {'READPATT': root_file_info.read_patt, @@ -442,7 +473,8 @@ def get_additional_exposure_info(root_file_infos, image_info): 'DEC_REF': 'N/A', 'CRDS context': 'N/A', 'ROLL_REF': 'N/A', - 'EXPSTART': root_file_info.expstart + 'EXPSTART': root_file_info.expstart, + 'EXP_COMMENT': exp_comment } # Deal with instrument-specific parameters @@ -461,19 +493,23 @@ def get_additional_exposure_info(root_file_infos, image_info): # get_image_info() has already globbed over the directory with the files and # returned the list of existing suffixes, so we shouldn't need to check for # file existence here. - file_path = filesystem_path(filename, check_existence=True) + try: + file_path = filesystem_path(filename, check_existence=True) + except FileNotFoundError as e: + raise e header = fits.getheader(file_path) header_sci = fits.getheader(file_path, 1) - basic_info['category'] = header['CATEGORY'] - basic_info['visit_status'] = header['VISITSTA'] - additional_info['NGROUPS'] = header['NGROUPS'] - additional_info['NINTS'] = header['NINTS'] - additional_info['EXPTIME'] = header['EFFEXPTM'] - additional_info['TITLE'] = header['TITLE'] - additional_info['PI_NAME'] = header['PI_NAME'] - additional_info['TARGNAME'] = header['TARGPROP'] + # Dont assume headers exist, some are omitted in parallel observations + basic_info['category'] = header.get('CATEGORY', 'N/A') + basic_info['visit_status'] = header.get('VISITSTA', 'N/A') + additional_info['NGROUPS'] = header.get('NGROUPS', 'N/A') + additional_info['NINTS'] = header.get('NINTS', 'N/A') + additional_info['EXPTIME'] = header.get('EFFEXPTM', 'N/A') + additional_info['TITLE'] = header.get('TITLE', 'N/A') + additional_info['PI_NAME'] = header.get('PI_NAME', 'N/A') + additional_info['TARGNAME'] = header.get('TARGPROP', 'N/A') # For the exposure level (i.e. multiple files) present the target # RA and Dec. For the image level, give RA_REF, DEC_REF, since those @@ -481,21 +517,21 @@ def get_additional_exposure_info(root_file_infos, image_info): # PA_V3, which applies to all detectors. At the image level, show # ROLL_REF, which is detector-specific. if isinstance(root_file_infos, QuerySet): - additional_info['TARG_RA'] = header['TARG_RA'] - additional_info['TARG_DEC'] = header['TARG_DEC'] - additional_info['PA_V3'] = header_sci['PA_V3'] + additional_info['TARG_RA'] = header.get('TARG_RA', 'N/A') + additional_info['TARG_DEC'] = header.get('TARG_DEC', 'N/A') + additional_info['PA_V3'] = header_sci.get('PA_V3', 'N/A') elif isinstance(root_file_infos, RootFileInfo): - additional_info['RA_REF'] = header_sci['RA_REF'] - additional_info['DEC_REF'] = header_sci['DEC_REF'] - additional_info['ROLL_REF'] = header_sci['ROLL_REF'] + additional_info['RA_REF'] = header_sci.get('RA_REF', 'N/A') + additional_info['DEC_REF'] = header_sci.get('DEC_REF', 'N/A') + additional_info['ROLL_REF'] = header_sci.get('ROLL_REF', 'N/A') additional_info['CAL_VER'] = 'N/A' additional_info['CRDS context'] = 'N/A' # Pipeline version and CRDS context info are not in uncal files if suffix != 'uncal': - additional_info['CAL_VER'] = header['CAL_VER'] - additional_info['CRDS context'] = header['CRDS_CTX'] + additional_info['CAL_VER'] = header.get('CAL_VER', 'N/A') + additional_info['CRDS context'] = header.get('CRDS_CTX', 'N/A') return basic_info, additional_info @@ -662,6 +698,102 @@ def get_anomaly_form(request, inst, file_root): return form +def get_comment_form(request, file_root): + """Generate form data for comment form + + Parameters + ---------- + request : HttpRequest object + Incoming request + file_root : str + FITS filename of selected image in filesystem. May be a + file or group root name. + + Returns + ------- + RootFileInfoCommentSubmitForm object + form object to be sent with context to template + """ + + root_file_info = RootFileInfo.objects.get(root_name=file_root) + + if request.method == 'POST': + comment_form = RootFileInfoCommentSubmitForm(request.POST, instance=root_file_info) + if comment_form.is_valid(): + comment_form.save() + else: + messages.error(request, "Failed to update comment form") + else: + comment_form = RootFileInfoCommentSubmitForm(instance=root_file_info) + + return comment_form + + +def get_exp_comment_form(request, file_root): + """Generate form data for exposure comment + This form updates all exposure level comments in each related rootfileimage model. + Each model related to this exposure will have the same exposure_comment associated with it. + When getting the default comment for this form, just use the first of the set. When updating + the comment, update for every rootfileinfo in the query set. + + Parameters + ---------- + request : HttpRequest object + Incoming request + file_root : str + Partial FITS filename substring of exposure root name. + + Returns + ------- + RootFileInfoExposureCommentSubmitForm object + form object to be sent with context to template + """ + + rootfileinfo_set = RootFileInfo.objects.filter(root_name__startswith=file_root) + + if request.method == 'POST': + exp_comment_form = RootFileInfoExposureCommentSubmitForm(request.POST, instance=rootfileinfo_set.first()) + if exp_comment_form.is_valid(): + # Update the for all images in exposure + for rootfileinfo in rootfileinfo_set: + rootfileinfo.exp_comment = exp_comment_form.cleaned_data['exp_comment'] + rootfileinfo.save() + else: + messages.error(request, "Failed to update exposure comment form") + else: + exp_comment_form = RootFileInfoExposureCommentSubmitForm(instance=rootfileinfo_set.first()) + + return exp_comment_form + + +def get_group_anomalies(file_root): + """Generate form data for context + + Parameters + ---------- + file_root : str + FITS filename of selected image in filesystem. May be a + file or group root name. + + Returns + ------- + group_anomaly_dict dict + root file name key with string of anomalies followed by anomaly comment + """ + # Check for group root name + rootfileinfo_set = RootFileInfo.objects.filter(root_name__startswith=file_root).order_by("root_name") + group_anomaly_dict = {} + for rootfileinfo in rootfileinfo_set: + anomalies_list = get_current_flagged_anomalies([rootfileinfo]) + anomalies_string = ', '.join(anomalies_list) + group_anomaly_dict[rootfileinfo.root_name] = anomalies_string + if rootfileinfo.comment != DEFAULT_MODEL_COMMENT: + anomalies_string += f" -- Comments: {rootfileinfo.comment}" + group_anomaly_dict[rootfileinfo.root_name] = anomalies_string + + return group_anomaly_dict + + def get_dashboard_components(request): """Build and return a Dashboard class. @@ -1202,7 +1334,10 @@ def get_header_info(filename, filetype): header_info = {} # Open the file - fits_filepath = filesystem_path(filename, search=f'*_{filetype}.fits') + try: + fits_filepath = filesystem_path(filename, search=f'*_{filetype}.fits') + except FileNotFoundError as e: + raise e hdulist = fits.open(fits_filepath) # Extract header information from file @@ -1295,7 +1430,13 @@ def get_image_info(file_root): parsed_fn = filename_parser(filename) # Get suffix information - suffix = parsed_fn['suffix'] + if parsed_fn['recognized_filename']: + suffix = parsed_fn['suffix'] + else: + # If the filename parser does not recognize the file, skip it + logging.warning((f'While running get_image_info() on {filename}, the ' + 'filename_parser() failed to recognize the file pattern.')) + continue # For crf or crfints suffixes, we need to also include the association value # in the suffix, so that preview images can be found later. @@ -1577,11 +1718,19 @@ def get_proposal_info(filepaths): obsnums = [] for fname in files_for_proposal: - try: - obs = filename_parser(fname)['observation'] - obsnums.append(obs) - except KeyError: - pass + file_info = filename_parser(fname) + if file_info['recognized_filename']: + # Wrap in a try/except because level 3 files do not have an 'observation' key. + # That's ok. We will ignore those files. + try: + obs = file_info['observation'] + obsnums.append(obs) + except KeyError: + pass + else: + logging.warning((f'While running get_proposal_info() for a program {proposal}, {fname} ' + 'was not recognized by the filename_parser().')) + obsnums = sorted(obsnums) observations.extend(obsnums) num_files.append(len(files_for_proposal)) @@ -1995,7 +2144,7 @@ def thumbnails_ajax(inst, proposal, obs_num=None): ---------- inst : str Name of JWST instrument - proposal : str (optional) + proposal : str Number of APT proposal to filter obs_num : str (optional) Observation number @@ -2017,10 +2166,16 @@ def thumbnails_ajax(inst, proposal, obs_num=None): # Wrap in try/except because level 3 rootnames won't have an observation # number returned by the filename_parser. That's fine, we're not interested # in those files anyway. - try: - all_obs.append(filename_parser(root)['observation']) - except KeyError: - pass + file_info = filename_parser(root) + if file_info['recognized_filename']: + try: + all_obs.append(file_info['observation']) + except KeyError: + pass + else: + logging.warning((f'While running thumbnails_ajax() on root {root}, ' + 'filename_parser() failed to recognize the file pattern.')) + obs_list = sorted(list(set(all_obs))) # Get the available files for the instrument @@ -2038,26 +2193,22 @@ def thumbnails_ajax(inst, proposal, obs_num=None): # Gather data for each rootname, and construct a list of all observations # in the proposal for rootname in rootnames: + # Skip over unsupported filenames + # e.g. jw02279-o001_s000... are spec2 products for WFSS with one file per source + # Any filename with a dash after the proposal number is either this spec2 product + # or a level 3 product + if f'jw{proposal}-' in rootname: + continue # Parse filename - try: - filename_dict = filename_parser(rootname) - + filename_dict = filename_parser(rootname) + if filename_dict['recognized_filename']: # Weed out file types that are not supported by generate_preview_images if 'stage_3' in filename_dict['filename_type']: continue - - except ValueError: - # Temporary workaround for noncompliant files in filesystem - filename_dict = {'activity': rootname[17:19], - 'detector': rootname[26:], - 'exposure_id': rootname[20:25], - 'observation': rootname[7:10], - 'parallel_seq_id': rootname[16], - 'program_id': rootname[2:7], - 'visit': rootname[10:13], - 'visit_group': rootname[14:16], - 'group_root': rootname[:26]} + else: + # Skip over files not recognized by the filename_parser + continue # Get list of available filenames and exposure start times. All files with a given # rootname will have the same exposure start time, so just keep the first. @@ -2072,13 +2223,19 @@ def thumbnails_ajax(inst, proposal, obs_num=None): exp_type = columns['exp_type'][i] exp_types.add(exp_type) - # Viewed is stored by rootname in the Model db. Save it with the data_dict + # These attributes are stored by rootname in the Model db. Save them with the data_dict # THUMBNAIL_FILTER_LOOK is boolean accessed according to a viewed flag try: root_file_info = RootFileInfo.objects.get(root_name=rootname) viewed = THUMBNAIL_FILTER_LOOK[root_file_info.viewed] + filter_type = root_file_info.filter + pupil_type = root_file_info.pupil + grating_type = root_file_info.grating except RootFileInfo.DoesNotExist: viewed = THUMBNAIL_FILTER_LOOK[0] + filter_type = "" + pupil_type = "" + grating_type = "" # Add to list of all exposure groups exp_groups.add(filename_dict['group_root']) @@ -2090,6 +2247,9 @@ def thumbnails_ajax(inst, proposal, obs_num=None): data_dict['file_data'][rootname]['viewed'] = viewed data_dict['file_data'][rootname]['exp_type'] = exp_type data_dict['file_data'][rootname]['thumbnail'] = get_thumbnail_by_rootname(rootname) + data_dict['file_data'][rootname]['filter'] = filter_type + data_dict['file_data'][rootname]['pupil'] = pupil_type + data_dict['file_data'][rootname]['grating'] = grating_type try: data_dict['file_data'][rootname]['expstart'] = exp_start @@ -2102,7 +2262,7 @@ def thumbnails_ajax(inst, proposal, obs_num=None): # Extract information for sorting with dropdown menus # (Don't include the proposal as a sorting parameter if the proposal has already been specified) - detectors, proposals, visits = [], [], [] + detectors, proposals, visits, filters, pupils, gratings = [], [], [], [], [], [] for rootname in list(data_dict['file_data'].keys()): proposals.append(data_dict['file_data'][rootname]['filename_dict']['program_id']) try: # Some rootnames cannot parse out detectors @@ -2113,6 +2273,18 @@ def thumbnails_ajax(inst, proposal, obs_num=None): visits.append(data_dict['file_data'][rootname]['filename_dict']['visit']) except KeyError: pass + try: + filters.append(data_dict['file_data'][rootname]['filter']) + except KeyError: + pass + try: + pupils.append(data_dict['file_data'][rootname]['pupil']) + except KeyError: + pass + try: + gratings.append(data_dict['file_data'][rootname]['grating']) + except KeyError: + pass if proposal is not None: dropdown_menus = {'detector': sorted(detectors), @@ -2125,6 +2297,12 @@ def thumbnails_ajax(inst, proposal, obs_num=None): 'look': THUMBNAIL_FILTER_LOOK, 'exp_type': sorted(exp_types), 'visit': sorted(visits)} + if filters is not None: + dropdown_menus['filter'] = sorted(filters) + if pupils is not None: + dropdown_menus['pupil'] = sorted(pupils) + if gratings is not None: + dropdown_menus['grating'] = sorted(gratings) data_dict['tools'] = MONITORS data_dict['dropdown_menus'] = dropdown_menus @@ -2171,20 +2349,32 @@ def thumbnails_query_ajax(rootnames): continue # Parse filename - try: - filename_dict = filename_parser(rootname) - except ValueError: + filename_dict = filename_parser(rootname) + + if filename_dict['recognized_filename']: + # Add to list of all exposure groups + exp_groups.add(filename_dict['group_root']) + else: + logging.warning((f'While running thumbnails_query_ajax() on rootname {rootname}, ' + 'filename_parser() failed to recognize the file pattern.')) continue - # Add to list of all exposure groups - exp_groups.add(filename_dict['group_root']) + try: + root_file_info = RootFileInfo.objects.get(root_name=rootname) + filter_type = root_file_info.filter + pupil_type = root_file_info.pupil + grating_type = root_file_info.grating + except RootFileInfo.DoesNotExist: + filter_type = "" + pupil_type = "" + grating_type = "" # Get list of available filenames available_files = get_filenames_by_rootname(rootname) # Add data to dictionary data_dict['file_data'][rootname] = {} - data_dict['file_data'][rootname]['inst'] = JWST_INSTRUMENT_NAMES_MIXEDCASE[filename_parser(rootname)['instrument']] + data_dict['file_data'][rootname]['inst'] = JWST_INSTRUMENT_NAMES_MIXEDCASE[filename_dict['instrument']] data_dict['file_data'][rootname]['filename_dict'] = filename_dict data_dict['file_data'][rootname]['available_files'] = available_files root_file_info = RootFileInfo.objects.get(root_name=rootname) @@ -2193,12 +2383,19 @@ def thumbnails_query_ajax(rootnames): data_dict['file_data'][rootname]['expstart_iso'] = Time(exp_start, format='mjd').iso.split('.')[0] data_dict['file_data'][rootname]['suffixes'] = [] data_dict['file_data'][rootname]['prop'] = rootname[2:7] + data_dict['file_data'][rootname]['filter'] = filter_type + data_dict['file_data'][rootname]['pupil'] = pupil_type + data_dict['file_data'][rootname]['grating'] = grating_type for filename in available_files: - try: - suffix = filename_parser(filename)['suffix'] + file_info = filename_parser(filename) + if file_info['recognized_filename']: + suffix = file_info['suffix'] data_dict['file_data'][rootname]['suffixes'].append(suffix) - except ValueError: + else: + logging.warning((f'While running thumbnails_query_ajax() on filename {filename}, ' + 'filename_parser() failed to recognize the file pattern.')) continue + data_dict['file_data'][rootname]['thumbnail'] = get_thumbnail_by_rootname(rootname) # Extract information for sorting with dropdown menus @@ -2221,11 +2418,23 @@ def thumbnails_query_ajax(rootnames): rootname in list(data_dict['file_data'].keys())] visits = [data_dict['file_data'][rootname]['filename_dict']['visit'] for rootname in list(data_dict['file_data'].keys())] + filters = [data_dict['file_data'][rootname]['filter'] for + rootname in list(data_dict['file_data'].keys())] + pupils = [data_dict['file_data'][rootname]['pupil'] for + rootname in list(data_dict['file_data'].keys())] + gratings = [data_dict['file_data'][rootname]['grating'] for + rootname in list(data_dict['file_data'].keys())] dropdown_menus = {'instrument': instruments, 'detector': sorted(detectors), 'proposal': sorted(proposals), 'visit': sorted(visits)} + if filters is not None: + dropdown_menus['filter'] = sorted(filters) + if pupils is not None: + dropdown_menus['pupil'] = sorted(pupils) + if gratings is not None: + dropdown_menus['grating'] = sorted(gratings) data_dict['tools'] = MONITORS data_dict['dropdown_menus'] = dropdown_menus diff --git a/jwql/website/apps/jwql/forms.py b/jwql/website/apps/jwql/forms.py index f9ce45b7b..e66bbad04 100644 --- a/jwql/website/apps/jwql/forms.py +++ b/jwql/website/apps/jwql/forms.py @@ -44,29 +44,42 @@ def view_function(request): placed in the ``jwql`` directory. """ -from collections import defaultdict import datetime import glob -import os import logging +import os +from collections import defaultdict from astropy.time import Time, TimeDelta from django import forms from django.shortcuts import redirect from django.utils.html import format_html from django.utils.safestring import mark_safe -from jwql.edb.engineering_database import is_valid_mnemonic -from jwql.website.apps.jwql.models import Anomalies - +from wtforms import StringField, SubmitField -from jwql.utils.constants import (ANOMALY_CHOICES_PER_INSTRUMENT, ANOMALIES_PER_INSTRUMENT, APERTURES_PER_INSTRUMENT, DETECTOR_PER_INSTRUMENT, - EXP_TYPE_PER_INSTRUMENT, FILTERS_PER_INSTRUMENT, GENERIC_SUFFIX_TYPES, GRATING_PER_INSTRUMENT, - GUIDER_FILENAME_TYPE, JWST_INSTRUMENT_NAMES_MIXEDCASE, JWST_INSTRUMENT_NAMES_SHORTHAND, - READPATT_PER_INSTRUMENT, IGNORED_SUFFIXES, SUBARRAYS_PER_INSTRUMENT, PUPILS_PER_INSTRUMENT, - LOOK_OPTIONS, SORT_OPTIONS, PROPOSAL_CATEGORIES) -from jwql.utils.utils import (get_config, get_rootnames_for_instrument_proposal, filename_parser, query_format) - -from wtforms import SubmitField, StringField +from jwql.edb.engineering_database import is_valid_mnemonic +from jwql.utils.constants import ( + ANOMALIES_PER_INSTRUMENT, + ANOMALY_CHOICES_PER_INSTRUMENT, + APERTURES_PER_INSTRUMENT, + DETECTOR_PER_INSTRUMENT, + EXP_TYPE_PER_INSTRUMENT, + FILTERS_PER_INSTRUMENT, + GENERIC_SUFFIX_TYPES, + GRATING_PER_INSTRUMENT, + GUIDER_FILENAME_TYPE, + IGNORED_SUFFIXES, + JWST_INSTRUMENT_NAMES_MIXEDCASE, + JWST_INSTRUMENT_NAMES_SHORTHAND, + LOOK_OPTIONS, + PROPOSAL_CATEGORIES, + PUPILS_PER_INSTRUMENT, + READPATT_PER_INSTRUMENT, + SORT_OPTIONS, + SUBARRAYS_PER_INSTRUMENT, +) +from jwql.utils.utils import filename_parser, get_config, get_rootnames_for_instrument_proposal, query_format +from jwql.website.apps.jwql.models import Anomalies, RootFileInfo class BaseForm(forms.Form): @@ -79,6 +92,26 @@ class BaseForm(forms.Form): resolve_submit = SubmitField('Resolve Target') +class RootFileInfoCommentSubmitForm(forms.ModelForm): + """Creates a ``Comment Form`` object that allows for text input in a form field. + This uses forms.ModelForm which is good for simplifying direct access to + Django Model database information + """ + class Meta: + model = RootFileInfo + fields = ['comment'] + + +class RootFileInfoExposureCommentSubmitForm(forms.ModelForm): + """Creates a ``Comment Form`` object that allows for text input in a form field. + This uses forms.ModelForm which is good for simplifying direct access to + Django Model database information + """ + class Meta: + model = RootFileInfo + fields = ['exp_comment'] + + class JwqlQueryForm(BaseForm): """Form validation for the JWQL Query viewing tool""" @@ -160,7 +193,7 @@ class JwqlQueryForm(BaseForm): num_choices = [(50, 50), (100, 100), (200, 200), (500, 500)] num_per_page = forms.ChoiceField( required=True, - choices=num_choices, initial=num_choices[1], + choices=num_choices, initial=num_choices[3], widget=forms.RadioSelect) # instrument specific parameters @@ -330,10 +363,17 @@ def clean_search(self): if any(map(filename.__contains__, GUIDER_FILENAME_TYPE)): continue else: - instrument = filename_parser(file)['instrument'] - observation = filename_parser(file)['observation'] - all_instruments.append(instrument) - all_observations[instrument].append(observation) + fileinfo = filename_parser(file) + if fileinfo['recognized_filename']: + instrument = fileinfo['instrument'] + observation = fileinfo['observation'] + all_instruments.append(instrument) + all_observations[instrument].append(observation) + else: + # If the filename is not recognized by filename_parser(), skip it. + logging.warning((f'While running FileSearchForm.clean_search() on {file}, ' + 'filename_parser() failed to recognize the file pattern.')) + continue # sort lists so first observation is available when link is clicked. for instrument in all_instruments: @@ -382,11 +422,11 @@ def _search_is_fileroot(self, search): bool Is the search term formatted like a fileroot? """ - - try: - self.fileroot_dict = filename_parser(search) + parsed = filename_parser(search) + if parsed['recognized_filename']: + self.fileroot_dict = parsed return True - except ValueError: + else: return False def redirect_to_files(self): diff --git a/jwql/website/apps/jwql/migrations/0019_alter_fgsreadnoisequeryhistory_aperture_and_more.py b/jwql/website/apps/jwql/migrations/0019_alter_fgsreadnoisequeryhistory_aperture_and_more.py new file mode 100644 index 000000000..7d1402194 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0019_alter_fgsreadnoisequeryhistory_aperture_and_more.py @@ -0,0 +1,363 @@ +# Generated by Django 4.1.7 on 2024-03-15 14:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0018_nircamclawstats_doy_nircamclawstats_total_bkg'), + ] + + operations = [ + migrations.AlterField( + model_name='fgsreadnoisequeryhistory', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisequeryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='ngroups', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='nints', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='read_pattern', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='readnoise_diff_image', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='readnoise_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='subarray', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsreadnoisestats', + name='uncal_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisequeryhistory', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisequeryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='ngroups', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='nints', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='read_pattern', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='readnoise_diff_image', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='readnoise_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='subarray', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='mirireadnoisestats', + name='uncal_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamclawqueryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='filter', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='obs', + field=models.CharField(blank=True, max_length=3, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='proposal', + field=models.CharField(blank=True, max_length=5, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='pupil', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamclawstats', + name='skyflat_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisequeryhistory', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisequeryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='ngroups', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='nints', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='read_pattern', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='readnoise_diff_image', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='readnoise_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='subarray', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamreadnoisestats', + name='uncal_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisequeryhistory', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisequeryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='ngroups', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='nints', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='read_pattern', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='readnoise_diff_image', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='readnoise_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='subarray', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissreadnoisestats', + name='uncal_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisequeryhistory', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisequeryhistory', + name='instrument', + field=models.CharField(blank=True, max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='aperture', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='detector', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='expstart', + field=models.CharField(blank=True, max_length=50, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='ngroups', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='nints', + field=models.CharField(blank=True, max_length=10, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='read_pattern', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='readnoise_diff_image', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='readnoise_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='subarray', + field=models.CharField(blank=True, max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecreadnoisestats', + name='uncal_filename', + field=models.CharField(blank=True, max_length=1000, null=True), + ), + migrations.AlterField( + model_name='proposal', + name='thumbnail_path', + field=models.CharField(default='', help_text='Path to the proposal thumbnail', max_length=1000), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0020_alter_proposal_category_and_more.py b/jwql/website/apps/jwql/migrations/0020_alter_proposal_category_and_more.py new file mode 100644 index 000000000..afdc7686c --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0020_alter_proposal_category_and_more.py @@ -0,0 +1,63 @@ +# Generated by Django 4.1.7 on 2024-03-20 14:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0019_alter_fgsreadnoisequeryhistory_aperture_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='proposal', + name='category', + field=models.CharField(default='empty', help_text='Category Type', max_length=10), + ), + migrations.AlterField( + model_name='proposal', + name='thumbnail_path', + field=models.CharField(default='empty', help_text='Path to the proposal thumbnail', max_length=1000), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='aperture', + field=models.CharField(blank=True, default='empty', help_text='Aperture', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='detector', + field=models.CharField(blank=True, default='empty', help_text='Detector', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='exp_type', + field=models.CharField(blank=True, default='empty', help_text='Exposure Type', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='filter', + field=models.CharField(blank=True, default='empty', help_text='Instrument name', max_length=7, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='grating', + field=models.CharField(blank=True, default='empty', help_text='Grating', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='pupil', + field=models.CharField(blank=True, default='empty', help_text='Pupil', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='read_patt', + field=models.CharField(blank=True, default='empty', help_text='Read Pattern', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='subarray', + field=models.CharField(blank=True, default='empty', help_text='Subarray', max_length=40, null=True), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0021_alter_rootfileinfo_read_patt_num.py b/jwql/website/apps/jwql/migrations/0021_alter_rootfileinfo_read_patt_num.py new file mode 100644 index 000000000..96f6e6d58 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0021_alter_rootfileinfo_read_patt_num.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.7 on 2024-04-05 18:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0020_alter_proposal_category_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='rootfileinfo', + name='read_patt_num', + field=models.IntegerField(default=1, help_text='Read Pattern Number'), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0022_nirspecedbtimestats_nirspecedbtimeintervalstats_and_more.py b/jwql/website/apps/jwql/migrations/0022_nirspecedbtimestats_nirspecedbtimeintervalstats_and_more.py new file mode 100644 index 000000000..19b4dd955 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0022_nirspecedbtimestats_nirspecedbtimeintervalstats_and_more.py @@ -0,0 +1,1099 @@ +# Generated by Django 4.2.5 on 2024-04-17 17:44 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0021_alter_rootfileinfo_read_patt_num'), + ] + + operations = [ + migrations.CreateModel( + name='NIRSpecEdbTimeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_edb_time_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecEdbTimeIntervalStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_edb_time_interval_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecEdbEveryChangeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('time', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('mnemonic_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('dependency_mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('dependency_value', models.CharField(blank=True, max_length=40, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_edb_every_change_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecEdbDailyStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_edb_daily_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecEdbBlocksStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_edb_blocks_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecDarkQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_dark_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecDarkPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'nirspec_dark_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecDarkDarkCurrent', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('amplifier', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('gauss_amplitude', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_peak', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_width', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_chisq', models.FloatField(blank=True, null=True)), + ('double_gauss_amplitude1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_amplitude2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_chisq', models.FloatField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('hist_dark_values', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('hist_amplitudes', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'nirspec_dark_dark_current', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecBadPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'nirspec_bad_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecBadPixelQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('dark_start_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_end_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_start_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_end_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_files_found', models.IntegerField(blank=True, null=True)), + ('flat_files_found', models.IntegerField(blank=True, null=True)), + ('run_bpix_from_darks', models.BooleanField(blank=True, null=True)), + ('run_bpix_from_flats', models.BooleanField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_bad_pixel_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSEdbTimeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_edb_time_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSEdbTimeIntervalStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_edb_time_interval_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSEdbEveryChangeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('time', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('mnemonic_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('dependency_mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('dependency_value', models.CharField(blank=True, max_length=40, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_edb_every_change_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSEdbDailyStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_edb_daily_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSEdbBlocksStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_edb_blocks_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSDarkQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_dark_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSDarkPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'niriss_dark_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSDarkDarkCurrent', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('amplifier', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('gauss_amplitude', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_peak', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_width', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_chisq', models.FloatField(blank=True, null=True)), + ('double_gauss_amplitude1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_amplitude2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_chisq', models.FloatField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('hist_dark_values', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('hist_amplitudes', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'niriss_dark_dark_current', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSBadPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'niriss_bad_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSBadPixelQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('dark_start_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_end_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_start_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_end_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_files_found', models.IntegerField(blank=True, null=True)), + ('flat_files_found', models.IntegerField(blank=True, null=True)), + ('run_bpix_from_darks', models.BooleanField(blank=True, null=True)), + ('run_bpix_from_flats', models.BooleanField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_bad_pixel_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamEdbTimeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_edb_time_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamEdbTimeIntervalStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_edb_time_interval_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamEdbEveryChangeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('time', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('mnemonic_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('dependency_mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('dependency_value', models.CharField(blank=True, max_length=40, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_edb_every_change_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamEdbDailyStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_edb_daily_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamEdbBlocksStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_edb_blocks_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamDarkQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_dark_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamDarkPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'nircam_dark_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamDarkDarkCurrent', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('amplifier', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('gauss_amplitude', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_peak', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_width', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_chisq', models.FloatField(blank=True, null=True)), + ('double_gauss_amplitude1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_amplitude2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_chisq', models.FloatField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('hist_dark_values', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('hist_amplitudes', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'nircam_dark_dark_current', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamBadPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'nircam_bad_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamBadPixelQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('dark_start_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_end_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_start_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_end_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_files_found', models.IntegerField(blank=True, null=True)), + ('flat_files_found', models.IntegerField(blank=True, null=True)), + ('run_bpix_from_darks', models.BooleanField(blank=True, null=True)), + ('run_bpix_from_flats', models.BooleanField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_bad_pixel_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIEdbTimeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_edb_time_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIEdbTimeIntervalStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_edb_time_interval_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIEdbEveryChangeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('time', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('mnemonic_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('dependency_mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('dependency_value', models.CharField(blank=True, max_length=40, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_edb_every_change_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIEdbDailyStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_edb_daily_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIEdbBlocksStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_edb_blocks_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIDarkQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_dark_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIDarkPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'miri_dark_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIDarkDarkCurrent', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('amplifier', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('gauss_amplitude', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_peak', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_width', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_chisq', models.FloatField(blank=True, null=True)), + ('double_gauss_amplitude1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_amplitude2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_chisq', models.FloatField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('hist_dark_values', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('hist_amplitudes', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'miri_dark_dark_current', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIBadPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'miri_bad_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRIBadPixelQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('dark_start_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_end_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_start_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_end_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_files_found', models.IntegerField(blank=True, null=True)), + ('flat_files_found', models.IntegerField(blank=True, null=True)), + ('run_bpix_from_darks', models.BooleanField(blank=True, null=True)), + ('run_bpix_from_flats', models.BooleanField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_bad_pixel_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSEdbTimeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_edb_time_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSEdbTimeIntervalStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_edb_time_interval_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSEdbEveryChangeStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('time', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('mnemonic_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('dependency_mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('dependency_value', models.CharField(blank=True, max_length=40, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_edb_every_change_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSEdbDailyStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_edb_daily_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSEdbBlocksStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('mnemonic', models.CharField(blank=True, max_length=40, null=True)), + ('latest_query', models.DateTimeField(blank=True, null=True)), + ('times', django.contrib.postgres.fields.ArrayField(base_field=models.DateTimeField(), size=None)), + ('data', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stdev', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('median', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('max', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('min', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_edb_blocks_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSDarkQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_dark_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSDarkPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('baseline_file', models.CharField(max_length=1000, null=True)), + ], + options={ + 'db_table': 'fgs_dark_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSDarkDarkCurrent', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('amplifier', models.CharField(blank=True, max_length=40, null=True)), + ('readpattern', models.CharField(blank=True, max_length=40, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('stdev', models.FloatField(blank=True, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('gauss_amplitude', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_peak', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_width', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('gauss_chisq', models.FloatField(blank=True, null=True)), + ('double_gauss_amplitude1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width1', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_amplitude2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_peak2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_width2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('double_gauss_chisq', models.FloatField(blank=True, null=True)), + ('mean_dark_image_file', models.CharField(max_length=1000, null=True)), + ('hist_dark_values', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('hist_amplitudes', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'fgs_dark_dark_current', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSBadPixelStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('x_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('y_coord', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('type', models.CharField(blank=True, max_length=40, null=True)), + ('source_files', models.TextField(blank=True, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_mid_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('baseline_file', models.CharField(blank=True, max_length=1000, null=True)), + ], + options={ + 'db_table': 'fgs_bad_pixel_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSBadPixelQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('dark_start_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_end_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_start_time_mjd', models.FloatField(blank=True, null=True)), + ('flat_end_time_mjd', models.FloatField(blank=True, null=True)), + ('dark_files_found', models.IntegerField(blank=True, null=True)), + ('flat_files_found', models.IntegerField(blank=True, null=True)), + ('run_bpix_from_darks', models.BooleanField(blank=True, null=True)), + ('run_bpix_from_flats', models.BooleanField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_bad_pixel_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0023_centralstorage_fgsanomaly_filesystemcharacteristics_and_more.py b/jwql/website/apps/jwql/migrations/0023_centralstorage_fgsanomaly_filesystemcharacteristics_and_more.py new file mode 100644 index 000000000..2789628a9 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0023_centralstorage_fgsanomaly_filesystemcharacteristics_and_more.py @@ -0,0 +1,713 @@ +# Generated by Django 4.1.7 on 2024-04-18 18:37 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0022_nirspecedbtimestats_nirspecedbtimeintervalstats_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='CentralStorage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date', models.DateTimeField()), + ('area', models.CharField()), + ('size', models.FloatField()), + ('used', models.FloatField()), + ('available', models.FloatField()), + ], + options={ + 'db_table': 'central_storage', + 'managed': True, + }, + ), + migrations.CreateModel( + name='FgsAnomaly', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('rootname', models.CharField()), + ('flag_date', models.DateTimeField()), + ('user', models.CharField()), + ('cosmic_ray_shower', models.BooleanField()), + ('diffraction_spike', models.BooleanField()), + ('excessive_saturation', models.BooleanField()), + ('guidestar_failure', models.BooleanField()), + ('persistence', models.BooleanField()), + ('crosstalk', models.BooleanField()), + ('data_transfer_error', models.BooleanField()), + ('ghost', models.BooleanField()), + ('snowball', models.BooleanField()), + ('other', models.BooleanField()), + ], + options={ + 'db_table': 'fgs_anomaly', + 'managed': True, + }, + ), + migrations.CreateModel( + name='FilesystemCharacteristics', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date', models.DateTimeField()), + ('instrument', models.TextField()), + ('filter_pupil', models.TextField(blank=True, null=True)), + ('obs_per_filter_pupil', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'filesystem_characteristics', + 'managed': True, + }, + ), + migrations.CreateModel( + name='FilesystemGeneral', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date', models.DateTimeField(unique=True)), + ('total_file_count', models.IntegerField()), + ('total_file_size', models.FloatField()), + ('fits_file_count', models.IntegerField()), + ('fits_file_size', models.FloatField()), + ('used', models.FloatField()), + ('available', models.FloatField()), + ], + options={ + 'db_table': 'filesystem_general', + 'managed': True, + }, + ), + migrations.CreateModel( + name='MiriAnomaly', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('rootname', models.CharField()), + ('flag_date', models.DateTimeField()), + ('user', models.CharField()), + ('cosmic_ray_shower', models.BooleanField()), + ('diffraction_spike', models.BooleanField()), + ('excessive_saturation', models.BooleanField()), + ('guidestar_failure', models.BooleanField()), + ('persistence', models.BooleanField()), + ('column_pull_up', models.BooleanField()), + ('internal_reflection', models.BooleanField()), + ('row_pull_down', models.BooleanField()), + ('other', models.BooleanField()), + ('column_pull_down', models.BooleanField()), + ('mrs_glow', models.BooleanField(db_column='MRS_Glow')), + ('mrs_zipper', models.BooleanField(db_column='MRS_Zipper')), + ('row_pull_up', models.BooleanField()), + ('lrs_contamination', models.BooleanField(db_column='LRS_Contamination')), + ('tree_rings', models.BooleanField()), + ], + options={ + 'db_table': 'miri_anomaly', + 'managed': True, + }, + ), + migrations.CreateModel( + name='Monitor', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('monitor_name', models.CharField()), + ('start_time', models.DateTimeField()), + ('end_time', models.DateTimeField(blank=True, null=True)), + ('status', models.TextField(blank=True, null=True)), + ('log_file', models.CharField()), + ], + options={ + 'db_table': 'monitor', + 'managed': True, + }, + ), + migrations.CreateModel( + name='NircamAnomaly', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('rootname', models.CharField()), + ('flag_date', models.DateTimeField()), + ('user', models.CharField()), + ('cosmic_ray_shower', models.BooleanField()), + ('diffraction_spike', models.BooleanField()), + ('excessive_saturation', models.BooleanField()), + ('guidestar_failure', models.BooleanField()), + ('persistence', models.BooleanField()), + ('crosstalk', models.BooleanField()), + ('data_transfer_error', models.BooleanField()), + ('ghost', models.BooleanField()), + ('snowball', models.BooleanField()), + ('dragons_breath', models.BooleanField()), + ('other', models.BooleanField()), + ('scattered_light', models.BooleanField()), + ('claws', models.BooleanField()), + ('wisps', models.BooleanField()), + ('tilt_event', models.BooleanField()), + ], + options={ + 'db_table': 'nircam_anomaly', + 'managed': True, + }, + ), + migrations.CreateModel( + name='NirissAnomaly', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('rootname', models.CharField()), + ('flag_date', models.DateTimeField()), + ('user', models.CharField()), + ('cosmic_ray_shower', models.BooleanField()), + ('diffraction_spike', models.BooleanField()), + ('excessive_saturation', models.BooleanField()), + ('guidestar_failure', models.BooleanField()), + ('persistence', models.BooleanField()), + ('crosstalk', models.BooleanField()), + ('data_transfer_error', models.BooleanField()), + ('ghost', models.BooleanField()), + ('snowball', models.BooleanField()), + ('other', models.BooleanField()), + ('scattered_light', models.TextField()), + ('light_saber', models.TextField()), + ], + options={ + 'db_table': 'niriss_anomaly', + 'managed': True, + }, + ), + migrations.CreateModel( + name='NirspecAnomaly', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('rootname', models.CharField()), + ('flag_date', models.DateTimeField()), + ('user', models.CharField()), + ('cosmic_ray_shower', models.BooleanField()), + ('diffraction_spike', models.BooleanField()), + ('excessive_saturation', models.BooleanField()), + ('guidestar_failure', models.BooleanField()), + ('persistence', models.BooleanField()), + ('crosstalk', models.BooleanField()), + ('data_transfer_error', models.BooleanField()), + ('ghost', models.BooleanField()), + ('snowball', models.BooleanField()), + ('dominant_msa_leakage', models.BooleanField(db_column='Dominant_MSA_Leakage')), + ('optical_short', models.BooleanField()), + ('other', models.BooleanField()), + ], + options={ + 'db_table': 'nirspec_anomaly', + 'managed': True, + }, + ), + migrations.CreateModel( + name='NIRSpecTaStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ('uncal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('subarray', models.CharField(blank=True, max_length=40, null=True)), + ('read_pattern', models.CharField(blank=True, max_length=40, null=True)), + ('nints', models.CharField(blank=True, max_length=10, null=True)), + ('ngroups', models.CharField(blank=True, max_length=10, null=True)), + ('expstart', models.CharField(blank=True, max_length=50, null=True)), + ('full_image_mean', models.FloatField(blank=True, null=True)), + ('full_image_stddev', models.FloatField(blank=True, null=True)), + ('full_image_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('full_image_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('diff_image_mean', models.FloatField(blank=True, null=True)), + ('diff_image_stddev', models.FloatField(blank=True, null=True)), + ('diff_image_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('diff_image_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp1_mean', models.FloatField(blank=True, null=True)), + ('amp1_stddev', models.FloatField(blank=True, null=True)), + ('amp1_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp1_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp2_mean', models.FloatField(blank=True, null=True)), + ('amp2_stddev', models.FloatField(blank=True, null=True)), + ('amp2_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp2_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp3_mean', models.FloatField(blank=True, null=True)), + ('amp3_stddev', models.FloatField(blank=True, null=True)), + ('amp3_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp3_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp4_mean', models.FloatField(blank=True, null=True)), + ('amp4_stddev', models.FloatField(blank=True, null=True)), + ('amp4_n', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp4_bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ], + options={ + 'db_table': 'nirspec_ta_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecTaQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('entries_found', models.IntegerField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_ta_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecGratingStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('time', models.CharField(blank=True, max_length=50, null=True)), + ('inrsh_gwa_adcmgain', models.FloatField(blank=True, null=True)), + ('inrsh_gwa_adcmoffset', models.FloatField(blank=True, null=True)), + ('inrsh_gwa_motor_vref', models.FloatField(blank=True, null=True)), + ('prism_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('prism_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('mirror_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('mirror_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g140h_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g140h_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g235h_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g235h_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g395h_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g395h_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g140m_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g140m_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g235m_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g235m_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ('g395m_inrsi_c_gwa_x_position', models.FloatField(blank=True, null=True)), + ('g395m_inrsi_c_gwa_y_position', models.FloatField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_grating_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecGratingQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_grating_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecCosmicRayStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('source_file', models.CharField(max_length=1000, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('jump_count', models.IntegerField(blank=True, null=True)), + ('jump_rate', models.FloatField(blank=True, null=True)), + ('magnitude', models.TextField(blank=True, null=True)), + ('outliers', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_cosmic_ray_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecCosmicRayQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_cosmic_ray_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecBiasStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('uncal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_image', models.CharField(blank=True, max_length=1000, null=True)), + ('expstart', models.CharField(blank=True, max_length=50, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('median', models.FloatField(blank=True, null=True)), + ('stddev', models.FloatField(blank=True, null=True)), + ('collapsed_rows', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('collapsed_columns', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('counts', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp1_even_med', models.FloatField(blank=True, null=True)), + ('amp1_odd_med', models.FloatField(blank=True, null=True)), + ('amp2_even_med', models.FloatField(blank=True, null=True)), + ('amp2_odd_med', models.FloatField(blank=True, null=True)), + ('amp3_even_med', models.FloatField(blank=True, null=True)), + ('amp3_odd_med', models.FloatField(blank=True, null=True)), + ('amp4_even_med', models.FloatField(blank=True, null=True)), + ('amp4_odd_med', models.FloatField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_bias_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecBiasQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('entries_found', models.IntegerField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_bias_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSCosmicRayStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('source_file', models.CharField(max_length=1000, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('jump_count', models.IntegerField(blank=True, null=True)), + ('jump_rate', models.FloatField(blank=True, null=True)), + ('magnitude', models.TextField(blank=True, null=True)), + ('outliers', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_cosmic_ray_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSCosmicRayQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_cosmic_ray_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSBiasStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('uncal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_image', models.CharField(blank=True, max_length=1000, null=True)), + ('expstart', models.CharField(blank=True, max_length=50, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('median', models.FloatField(blank=True, null=True)), + ('stddev', models.FloatField(blank=True, null=True)), + ('collapsed_rows', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('collapsed_columns', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('counts', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp1_even_med', models.FloatField(blank=True, null=True)), + ('amp1_odd_med', models.FloatField(blank=True, null=True)), + ('amp2_even_med', models.FloatField(blank=True, null=True)), + ('amp2_odd_med', models.FloatField(blank=True, null=True)), + ('amp3_even_med', models.FloatField(blank=True, null=True)), + ('amp3_odd_med', models.FloatField(blank=True, null=True)), + ('amp4_even_med', models.FloatField(blank=True, null=True)), + ('amp4_odd_med', models.FloatField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_bias_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRISSBiasQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('entries_found', models.IntegerField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'niriss_bias_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamCosmicRayStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('source_file', models.CharField(max_length=1000, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('jump_count', models.IntegerField(blank=True, null=True)), + ('jump_rate', models.FloatField(blank=True, null=True)), + ('magnitude', models.TextField(blank=True, null=True)), + ('outliers', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_cosmic_ray_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamCosmicRayQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_cosmic_ray_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamBiasStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('uncal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_filename', models.CharField(blank=True, max_length=1000, null=True)), + ('cal_image', models.CharField(blank=True, max_length=1000, null=True)), + ('expstart', models.CharField(blank=True, max_length=50, null=True)), + ('mean', models.FloatField(blank=True, null=True)), + ('median', models.FloatField(blank=True, null=True)), + ('stddev', models.FloatField(blank=True, null=True)), + ('collapsed_rows', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('collapsed_columns', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('counts', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('bin_centers', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('amp1_even_med', models.FloatField(blank=True, null=True)), + ('amp1_odd_med', models.FloatField(blank=True, null=True)), + ('amp2_even_med', models.FloatField(blank=True, null=True)), + ('amp2_odd_med', models.FloatField(blank=True, null=True)), + ('amp3_even_med', models.FloatField(blank=True, null=True)), + ('amp3_odd_med', models.FloatField(blank=True, null=True)), + ('amp4_even_med', models.FloatField(blank=True, null=True)), + ('amp4_odd_med', models.FloatField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_bias_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRCamBiasQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('entries_found', models.IntegerField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nircam_bias_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRITaStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('cal_file_name', models.CharField(blank=True, max_length=1000, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('targx', models.FloatField(blank=True, null=True)), + ('targy', models.FloatField(blank=True, null=True)), + ('offset', models.FloatField(blank=True, null=True)), + ('full_im_path', models.CharField(blank=True, max_length=1000, null=True)), + ('zoom_im_path', models.CharField(blank=True, max_length=1000, null=True)), + ], + options={ + 'db_table': 'miri_ta_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRITaQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('entries_found', models.IntegerField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_ta_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRICosmicRayStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('source_file', models.CharField(max_length=1000, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('jump_count', models.IntegerField(blank=True, null=True)), + ('jump_rate', models.FloatField(blank=True, null=True)), + ('magnitude', models.TextField(blank=True, null=True)), + ('outliers', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_cosmic_ray_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='MIRICosmicRayQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'miri_cosmic_ray_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FilesystemInstrument', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date', models.DateTimeField()), + ('instrument', models.TextField()), + ('filetype', models.TextField()), + ('count', models.IntegerField()), + ('size', models.FloatField()), + ], + options={ + 'db_table': 'filesystem_instrument', + 'managed': True, + 'unique_together': {('date', 'instrument', 'filetype')}, + }, + ), + migrations.CreateModel( + name='FGSCosmicRayStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('source_file', models.CharField(max_length=1000, null=True)), + ('obs_start_time', models.DateTimeField(blank=True, null=True)), + ('obs_end_time', models.DateTimeField(blank=True, null=True)), + ('jump_count', models.IntegerField(blank=True, null=True)), + ('jump_rate', models.FloatField(blank=True, null=True)), + ('magnitude', models.TextField(blank=True, null=True)), + ('outliers', models.TextField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_cosmic_ray_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='FGSCosmicRayQueryHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('entry_date', models.DateTimeField(unique=True)), + ('instrument', models.CharField(blank=True, max_length=7, null=True)), + ('aperture', models.CharField(blank=True, max_length=40, null=True)), + ('start_time_mjd', models.FloatField(blank=True, null=True)), + ('end_time_mjd', models.FloatField(blank=True, null=True)), + ('files_found', models.IntegerField(blank=True, null=True)), + ('run_monitor', models.BooleanField(blank=True, null=True)), + ], + options={ + 'db_table': 'fgs_cosmic_ray_query_history', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0024_nirspecmsatastats_nirspecwatastats_and_more.py b/jwql/website/apps/jwql/migrations/0024_nirspecmsatastats_nirspecwatastats_and_more.py new file mode 100644 index 000000000..938d4e443 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0024_nirspecmsatastats_nirspecwatastats_and_more.py @@ -0,0 +1,115 @@ +# Generated by Django 4.2.6 on 2024-05-07 15:15 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0023_centralstorage_fgsanomaly_filesystemcharacteristics_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='NIRSpecMsataStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('filename', models.CharField(blank=True, max_length=1000, null=True)), + ('date_obs', models.DateTimeField(blank=True, null=True)), + ('visit_id', models.CharField(blank=True, max_length=30, null=True)), + ('tafilter', models.CharField(blank=True, max_length=7, null=True)), + ('detector', models.CharField(blank=True, max_length=40, null=True)), + ('readout', models.CharField(blank=True, max_length=40, null=True)), + ('subarray', models.CharField(blank=True, max_length=40, null=True)), + ('num_refstars', models.IntegerField(blank=True, null=True)), + ('ta_status', models.CharField(blank=True, max_length=1000, null=True)), + ('v2halffacet', models.FloatField(blank=True, null=True)), + ('v3halffacet', models.FloatField(blank=True, null=True)), + ('v2msactr', models.FloatField(blank=True, null=True)), + ('v3msactr', models.FloatField(blank=True, null=True)), + ('lsv2offset', models.FloatField(blank=True, null=True)), + ('lsv3offset', models.FloatField(blank=True, null=True)), + ('lsoffsetmag', models.FloatField(blank=True, null=True)), + ('lsrolloffset', models.FloatField(blank=True, null=True)), + ('lsv2sigma', models.FloatField(blank=True, null=True)), + ('lsv3sigma', models.FloatField(blank=True, null=True)), + ('lsiterations', models.IntegerField(blank=True, null=True)), + ('guidestarid', models.IntegerField(blank=True, null=True)), + ('guidestarx', models.FloatField(blank=True, null=True)), + ('guidestary', models.FloatField(blank=True, null=True)), + ('guidestarroll', models.FloatField(blank=True, null=True)), + ('samx', models.FloatField(blank=True, null=True)), + ('samy', models.FloatField(blank=True, null=True)), + ('samroll', models.FloatField(blank=True, null=True)), + ('box_peak_value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('reference_star_mag', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('convergence_status', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100, null=True), size=None)), + ('reference_star_number', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('lsf_removed_status', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100, null=True), size=None)), + ('lsf_removed_reason', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100, null=True), size=None)), + ('lsf_removed_x', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('lsf_removed_y', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('planned_v2', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('planned_v3', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), + ('stars_in_fit', models.IntegerField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_msata_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.CreateModel( + name='NIRSpecWataStats', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('filename', models.CharField(blank=True, max_length=1000, null=True)), + ('date_obs', models.DateTimeField(blank=True, null=True)), + ('visit_id', models.CharField(blank=True, max_length=30, null=True)), + ('tafilter', models.CharField(blank=True, max_length=7, null=True)), + ('readout', models.CharField(blank=True, max_length=40, null=True)), + ('ta_status', models.CharField(blank=True, max_length=100, null=True)), + ('star_name', models.IntegerField(blank=True, null=True)), + ('star_ra', models.FloatField(blank=True, null=True)), + ('star_dec', models.FloatField(blank=True, null=True)), + ('star_mag', models.FloatField(blank=True, null=True)), + ('star_catalog', models.IntegerField(blank=True, null=True)), + ('planned_v2', models.FloatField(blank=True, null=True)), + ('planned_v3', models.FloatField(blank=True, null=True)), + ('stamp_start_col', models.IntegerField(blank=True, null=True)), + ('stamp_start_row', models.IntegerField(blank=True, null=True)), + ('star_detector', models.CharField(blank=True, max_length=40, null=True)), + ('max_val_box', models.FloatField(blank=True, null=True)), + ('max_val_box_col', models.IntegerField(blank=True, null=True)), + ('max_val_box_row', models.IntegerField(blank=True, null=True)), + ('iterations', models.IntegerField(blank=True, null=True)), + ('corr_col', models.IntegerField(blank=True, null=True)), + ('corr_row', models.IntegerField(blank=True, null=True)), + ('stamp_final_col', models.FloatField(blank=True, null=True)), + ('stamp_final_row', models.FloatField(blank=True, null=True)), + ('detector_final_col', models.FloatField(blank=True, null=True)), + ('detector_final_row', models.FloatField(blank=True, null=True)), + ('final_sci_x', models.FloatField(blank=True, null=True)), + ('final_sci_y', models.FloatField(blank=True, null=True)), + ('measured_v2', models.FloatField(blank=True, null=True)), + ('measured_v3', models.FloatField(blank=True, null=True)), + ('ref_v2', models.FloatField(blank=True, null=True)), + ('ref_v3', models.FloatField(blank=True, null=True)), + ('v2_offset', models.FloatField(blank=True, null=True)), + ('v3_offset', models.FloatField(blank=True, null=True)), + ('sam_x', models.FloatField(blank=True, null=True)), + ('sam_y', models.FloatField(blank=True, null=True)), + ('entry_date', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'db_table': 'nirspec_wata_stats', + 'managed': True, + 'unique_together': {('id', 'entry_date')}, + }, + ), + migrations.DeleteModel( + name='NIRSpecTaStats', + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0025_rootfileinfo_comment_rootfileinfo_exp_comment.py b/jwql/website/apps/jwql/migrations/0025_rootfileinfo_comment_rootfileinfo_exp_comment.py new file mode 100644 index 000000000..4a1192834 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0025_rootfileinfo_comment_rootfileinfo_exp_comment.py @@ -0,0 +1,23 @@ +# Generated by Django 5.0.7 on 2024-07-24 21:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0024_nirspecmsatastats_nirspecwatastats_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='rootfileinfo', + name='comment', + field=models.TextField(blank=True, default='', help_text='Anomaly Comment Field'), + ), + migrations.AddField( + model_name='rootfileinfo', + name='exp_comment', + field=models.TextField(blank=True, default='', help_text='Anomaly Exposure Comment Field'), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py b/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py new file mode 100644 index 000000000..25d6b921b --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0026_alter_fgsdarkdarkcurrent_amplifier_and_more.py @@ -0,0 +1,744 @@ +# Generated by Django 5.0.7 on 2024-08-30 15:18 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0025_rootfileinfo_comment_rootfileinfo_exp_comment'), + ] + + operations = [ + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='fgsdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='miridarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nircamdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirissdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='amplifier', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_amplitude1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_amplitude2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_peak1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_peak2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_width1', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='double_gauss_width2', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_amplitude', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_chisq', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_peak', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='gauss_width', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='hist_amplitudes', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='hist_dark_values', + field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(default=0.0), size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='mean', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='stdev', + field=models.FloatField(blank=True, default=0.0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='baseline_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='detector', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='mean_dark_image_file', + field=models.CharField(default='empty', max_length=1000, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(help_text='source file names', max_length=1000), blank=True, default='empty', null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='type', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='aperture', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='files_found', + field=models.IntegerField(blank=True, default=0, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='instrument', + field=models.CharField(blank=True, default='empty', max_length=7, null=True), + ), + migrations.AlterField( + model_name='nirspecdarkqueryhistory', + name='readpattern', + field=models.CharField(blank=True, default='empty', max_length=40, null=True), + ), + migrations.AlterField( + model_name='rootfileinfo', + name='exp_comment', + field=models.TextField(blank=True, default='', help_text='Anomaly Comment Field'), + ), + ] diff --git a/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py b/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py new file mode 100644 index 000000000..e6f785d02 --- /dev/null +++ b/jwql/website/apps/jwql/migrations/0027_alter_fgsbadpixelstats_source_files_and_more.py @@ -0,0 +1,89 @@ +# Generated by Django 5.1 on 2024-11-12 19:09 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('jwql', '0026_alter_fgsdarkdarkcurrent_amplifier_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='fgsbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='fgsdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miribadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='miridarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircambadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nircamdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirissdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecbadpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkdarkcurrent', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + migrations.AlterField( + model_name='nirspecdarkpixelstats', + name='source_files', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(default='empty', help_text='source file names', max_length=1000), blank=True, null=True, size=None), + ), + ] diff --git a/jwql/website/apps/jwql/models.py b/jwql/website/apps/jwql/models.py index 715b1bb5b..13f3a8dbc 100644 --- a/jwql/website/apps/jwql/models.py +++ b/jwql/website/apps/jwql/models.py @@ -30,6 +30,23 @@ from django.db import models +from jwql.utils.constants import ( + DEFAULT_MODEL_CHARFIELD, + DEFAULT_MODEL_COMMENT, + MAX_LEN_APERTURE, + MAX_LEN_DETECTOR, + MAX_LEN_FILTER, + MAX_LEN_GRATING, + MAX_LEN_INSTRUMENT, + MAX_LEN_OBS, + MAX_LEN_PATH, + MAX_LEN_PROPOSAL, + MAX_LEN_PUPIL, + MAX_LEN_READPATTERN, + MAX_LEN_SUBARRAY, + MAX_LEN_TYPE, + MAX_LEN_USER, +) INSTRUMENT_LIST = (('FGS', 'FGS'), ('MIRI', 'MIRI'), @@ -42,7 +59,7 @@ class Archive(models.Model): """A class defining the model used to hold information needed for the archive pages.""" # Fields - instrument = models.CharField(max_length=7, help_text="Instrument name", primary_key=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, help_text="Instrument name", primary_key=True) # … # Metadata @@ -59,9 +76,9 @@ class Proposal(models.Model): """A class defining the model used to hold information about a given proposal""" # Fields prop_id = models.CharField(max_length=5, help_text="5-digit proposal ID string") - thumbnail_path = models.CharField(max_length=100, help_text='Path to the proposal thumbnail', default='') + thumbnail_path = models.CharField(max_length=MAX_LEN_PATH, help_text='Path to the proposal thumbnail', default=DEFAULT_MODEL_CHARFIELD) archive = models.ForeignKey(Archive, blank=False, null=False, on_delete=models.CASCADE) - category = models.CharField(max_length=10, help_text="Category Type", default='') + category = models.CharField(max_length=10, help_text="Category Type", default=DEFAULT_MODEL_CHARFIELD) # Metadata class Meta: @@ -78,7 +95,7 @@ def __str__(self): class Observation(models.Model): """A class defining the model used to hold information about an observation from a given proposal""" # Fields - obsnum = models.CharField(max_length=3, help_text='Observation number, as a 3 digit string') + obsnum = models.CharField(max_length=MAX_LEN_OBS, help_text='Observation number, as a 3 digit string') number_of_files = models.IntegerField(help_text='Number of files in the proposal', default=0) obsstart = models.FloatField(help_text='Time of the beginning of the observation in MJD', default=0.) obsend = models.FloatField(help_text='Time of the end of the observation in MJD', default=0.) @@ -99,21 +116,23 @@ def __str__(self): class RootFileInfo(models.Model): """ All info stored with root file for ease of sorting """ - instrument = models.CharField(max_length=7, help_text="Instrument name") + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, help_text="Instrument name") obsnum = models.ForeignKey(Observation, blank=False, null=False, on_delete=models.CASCADE) - proposal = models.CharField(max_length=5, help_text="5-digit proposal ID string") + proposal = models.CharField(max_length=MAX_LEN_PROPOSAL, help_text="5-digit proposal ID string") root_name = models.TextField(primary_key=True, max_length=300) viewed = models.BooleanField(default=False) - filter = models.CharField(max_length=7, help_text="Instrument name", default='', null=True, blank=True) - aperture = models.CharField(max_length=40, help_text="Aperture", default='', null=True, blank=True) - detector = models.CharField(max_length=40, help_text="Detector", default='', null=True, blank=True) - read_patt_num = models.IntegerField(help_text='Read Pattern Number', default=0) - read_patt = models.CharField(max_length=40, help_text="Read Pattern", default='', null=True, blank=True) - grating = models.CharField(max_length=40, help_text="Grating", default='', null=True, blank=True) - subarray = models.CharField(max_length=40, help_text="Subarray", default='', null=True, blank=True) - pupil = models.CharField(max_length=40, help_text="Pupil", default='', null=True, blank=True) - exp_type = models.CharField(max_length=40, help_text="Exposure Type", default='', null=True, blank=True) + filter = models.CharField(max_length=MAX_LEN_FILTER, help_text="Instrument name", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, help_text="Aperture", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, help_text="Detector", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + read_patt_num = models.IntegerField(help_text='Read Pattern Number', default=1) + read_patt = models.CharField(max_length=MAX_LEN_READPATTERN, help_text="Read Pattern", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + grating = models.CharField(max_length=MAX_LEN_GRATING, help_text="Grating", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, help_text="Subarray", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + pupil = models.CharField(max_length=MAX_LEN_PUPIL, help_text="Pupil", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) + exp_type = models.CharField(max_length=MAX_LEN_TYPE, help_text="Exposure Type", default=DEFAULT_MODEL_CHARFIELD, null=True, blank=True) expstart = models.FloatField(help_text='Exposure Start Time', default=0.0) + comment = models.TextField(help_text="Anomaly Comment Field", default=DEFAULT_MODEL_COMMENT, null=False, blank=True) + exp_comment = models.TextField(help_text="Anomaly Comment Field", default=DEFAULT_MODEL_COMMENT, null=False, blank=True) # Metadata class Meta: @@ -134,7 +153,7 @@ class Anomalies(models.Model): primary_key=True, ) flag_date = models.DateTimeField(help_text="flag date", null=True, blank=True) - user = models.CharField(max_length=50, help_text="user", default='', null=True, blank=True) + user = models.CharField(max_length=MAX_LEN_USER, help_text="user", default='', null=True, blank=True) cosmic_ray_shower = models.BooleanField(default=False) diffraction_spike = models.BooleanField(default=False) excessive_saturation = models.BooleanField(default=False) @@ -189,3 +208,43 @@ class Meta: def __str__(self): """Container for all anomalies associated with each RootFileInfo object """ return self.root_file_info.root_name + + +def get_model_column_names(model_name): + """Return all column names for the input ``model_name`` as a list + + Parameters + ---------- + model_name : django.db.models.base.ModelBase + e.g. model_name = eval('NIRCamDarkDarkCurrent') + + Returns + ------- + colnames : list + List of column names + """ + return [f.name for f in model_name._meta.get_fields()] + + +def get_unique_values_per_column(model_name, column_name): + """Return a list of the unique values present in the column ``column_name`` in + the ``model_name`` model. + + Parameters + ---------- + model_name : django.db.models.base.ModelBase + e.g. model_name = eval('NIRCamDarkDarkCurrent') + + column_name : str + Column name to examine + + Returns + ------- + values : list + List of unique values in ``column_name`` + """ + query_set = model_name.objects.values(column_name).distinct() + values = [] + for row in query_set: + values.append(row[column_name]) + return values diff --git a/jwql/website/apps/jwql/monitor_models/bad_pixel.py b/jwql/website/apps/jwql/monitor_models/bad_pixel.py index 463331deb..f50a5a261 100644 --- a/jwql/website/apps/jwql/monitor_models/bad_pixel.py +++ b/jwql/website/apps/jwql/monitor_models/bad_pixel.py @@ -26,14 +26,16 @@ """ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. -from django.db import models from django.contrib.postgres.fields import ArrayField +from django.db import models + +from jwql.utils.constants import MAX_LEN_APERTURE, MAX_LEN_DETECTOR, MAX_LEN_FILENAME, MAX_LEN_INSTRUMENT, MAX_LEN_TYPE, DEFAULT_MODEL_CHARFIELD class FGSBadPixelQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) dark_start_time_mjd = models.FloatField(blank=True, null=True) dark_end_time_mjd = models.FloatField(blank=True, null=True) flat_start_time_mjd = models.FloatField(blank=True, null=True) @@ -52,15 +54,15 @@ class Meta: class FGSBadPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) class Meta: managed = True @@ -70,8 +72,8 @@ class Meta: class MIRIBadPixelQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) dark_start_time_mjd = models.FloatField(blank=True, null=True) dark_end_time_mjd = models.FloatField(blank=True, null=True) flat_start_time_mjd = models.FloatField(blank=True, null=True) @@ -90,15 +92,15 @@ class Meta: class MIRIBadPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) class Meta: managed = True @@ -108,8 +110,8 @@ class Meta: class NIRCamBadPixelQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) dark_start_time_mjd = models.FloatField(blank=True, null=True) dark_end_time_mjd = models.FloatField(blank=True, null=True) flat_start_time_mjd = models.FloatField(blank=True, null=True) @@ -128,15 +130,15 @@ class Meta: class NIRCamBadPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) class Meta: managed = True @@ -146,8 +148,8 @@ class Meta: class NIRISSBadPixelQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) dark_start_time_mjd = models.FloatField(blank=True, null=True) dark_end_time_mjd = models.FloatField(blank=True, null=True) flat_start_time_mjd = models.FloatField(blank=True, null=True) @@ -166,15 +168,15 @@ class Meta: class NIRISSBadPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) class Meta: managed = True @@ -184,8 +186,8 @@ class Meta: class NIRSpecBadPixelQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) dark_start_time_mjd = models.FloatField(blank=True, null=True) dark_end_time_mjd = models.FloatField(blank=True, null=True) flat_start_time_mjd = models.FloatField(blank=True, null=True) @@ -204,15 +206,15 @@ class Meta: class NIRSpecBadPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) class Meta: managed = True diff --git a/jwql/website/apps/jwql/monitor_models/bias.py b/jwql/website/apps/jwql/monitor_models/bias.py index c8245b9fe..62e539bc0 100644 --- a/jwql/website/apps/jwql/monitor_models/bias.py +++ b/jwql/website/apps/jwql/monitor_models/bias.py @@ -26,13 +26,15 @@ """ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. -from django.db import models from django.contrib.postgres.fields import ArrayField +from django.db import models + +from jwql.utils.constants import MAX_LEN_APERTURE, MAX_LEN_TIME, MAX_LEN_FILENAME, MAX_LEN_INSTRUMENT, MAX_LEN_PATH class NIRCamBiasQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -47,11 +49,11 @@ class Meta: class NIRCamBiasStats(models.Model): - aperture = models.CharField(blank=True, null=True) - uncal_filename = models.CharField(blank=True, null=True) - cal_filename = models.CharField(blank=True, null=True) - cal_image = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_image = models.CharField(max_length=MAX_LEN_PATH, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) mean = models.FloatField(blank=True, null=True) median = models.FloatField(blank=True, null=True) stddev = models.FloatField(blank=True, null=True) @@ -76,8 +78,8 @@ class Meta: class NIRISSBiasQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -92,11 +94,11 @@ class Meta: class NIRISSBiasStats(models.Model): - aperture = models.CharField(blank=True, null=True) - uncal_filename = models.CharField(blank=True, null=True) - cal_filename = models.CharField(blank=True, null=True) - cal_image = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_image = models.CharField(max_length=MAX_LEN_PATH, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) mean = models.FloatField(blank=True, null=True) median = models.FloatField(blank=True, null=True) stddev = models.FloatField(blank=True, null=True) @@ -121,8 +123,8 @@ class Meta: class NIRSpecBiasQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -137,11 +139,11 @@ class Meta: class NIRSpecBiasStats(models.Model): - aperture = models.CharField(blank=True, null=True) - uncal_filename = models.CharField(blank=True, null=True) - cal_filename = models.CharField(blank=True, null=True) - cal_image = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + cal_image = models.CharField(max_length=MAX_LEN_PATH, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) mean = models.FloatField(blank=True, null=True) median = models.FloatField(blank=True, null=True) stddev = models.FloatField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/claw.py b/jwql/website/apps/jwql/monitor_models/claw.py index a1c6c93e3..399134c8d 100644 --- a/jwql/website/apps/jwql/monitor_models/claw.py +++ b/jwql/website/apps/jwql/monitor_models/claw.py @@ -28,10 +28,21 @@ # Feel free to rename the models, but don't rename db_table values or field names. from django.db import models +from jwql.utils.constants import ( + MAX_LEN_DETECTOR, + MAX_LEN_TIME, + MAX_LEN_FILENAME, + MAX_LEN_FILTER, + MAX_LEN_INSTRUMENT, + MAX_LEN_OBS, + MAX_LEN_PROPOSAL, + MAX_LEN_PUPIL, +) + class NIRCamClawQueryHistory(models.Model): entry_date = models.DateTimeField(blank=True, null=True) - instrument = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) @@ -44,13 +55,13 @@ class Meta: class NIRCamClawStats(models.Model): entry_date = models.DateTimeField(blank=True, null=True) - filename = models.CharField(blank=True, null=True) - proposal = models.CharField(blank=True, null=True) - obs = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - filter = models.CharField(blank=True, null=True) - pupil = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) + filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + proposal = models.CharField(max_length=MAX_LEN_PROPOSAL, blank=True, null=True) + obs = models.CharField(max_length=MAX_LEN_OBS, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + filter = models.CharField(max_length=MAX_LEN_FILTER, blank=True, null=True) + pupil = models.CharField(max_length=MAX_LEN_PUPIL, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) expstart_mjd = models.FloatField(blank=True, null=True) effexptm = models.FloatField(blank=True, null=True) ra = models.FloatField(blank=True, null=True) @@ -60,7 +71,7 @@ class NIRCamClawStats(models.Model): median = models.FloatField(blank=True, null=True) stddev = models.FloatField(blank=True, null=True) frac_masked = models.FloatField(blank=True, null=True) - skyflat_filename = models.CharField(blank=True, null=True) + skyflat_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) doy = models.FloatField(blank=True, null=True) total_bkg = models.FloatField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/common.py b/jwql/website/apps/jwql/monitor_models/common.py index d0bed2afc..ebcf7c82a 100644 --- a/jwql/website/apps/jwql/monitor_models/common.py +++ b/jwql/website/apps/jwql/monitor_models/common.py @@ -116,7 +116,7 @@ from django.contrib.postgres.fields import ArrayField class NIRISSMyMonitorStats(models.Model): - aperture = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) mean = models.FloatField(blank=True, null=True) median = models.FloatField(blank=True, null=True) stddev = models.FloatField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/cosmic_ray.py b/jwql/website/apps/jwql/monitor_models/cosmic_ray.py index cdff2eb22..d9ead7599 100644 --- a/jwql/website/apps/jwql/monitor_models/cosmic_ray.py +++ b/jwql/website/apps/jwql/monitor_models/cosmic_ray.py @@ -27,13 +27,14 @@ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. from django.db import models -from django.contrib.postgres.fields import ArrayField + +from jwql.utils.constants import MAX_LEN_APERTURE, MAX_LEN_FILENAME, MAX_LEN_INSTRUMENT class FGSCosmicRayQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) files_found = models.IntegerField(blank=True, null=True) @@ -47,8 +48,8 @@ class Meta: class FGSCosmicRayStats(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - source_file = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + source_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) jump_count = models.IntegerField(blank=True, null=True) @@ -64,8 +65,8 @@ class Meta: class MIRICosmicRayQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) files_found = models.IntegerField(blank=True, null=True) @@ -79,8 +80,8 @@ class Meta: class MIRICosmicRayStats(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - source_file = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + source_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) jump_count = models.IntegerField(blank=True, null=True) @@ -96,8 +97,8 @@ class Meta: class NIRCamCosmicRayQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) files_found = models.IntegerField(blank=True, null=True) @@ -111,8 +112,8 @@ class Meta: class NIRCamCosmicRayStats(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - source_file = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + source_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) jump_count = models.IntegerField(blank=True, null=True) @@ -128,8 +129,8 @@ class Meta: class NIRISSCosmicRayQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) files_found = models.IntegerField(blank=True, null=True) @@ -143,8 +144,8 @@ class Meta: class NIRISSCosmicRayStats(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - source_file = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + source_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) jump_count = models.IntegerField(blank=True, null=True) @@ -160,8 +161,8 @@ class Meta: class NIRSpecCosmicRayQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) files_found = models.IntegerField(blank=True, null=True) @@ -175,8 +176,8 @@ class Meta: class NIRSpecCosmicRayStats(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - source_file = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + source_file = models.CharField(max_length=MAX_LEN_FILENAME, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) jump_count = models.IntegerField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/dark_current.py b/jwql/website/apps/jwql/monitor_models/dark_current.py index 41ae1ccac..b467d062b 100644 --- a/jwql/website/apps/jwql/monitor_models/dark_current.py +++ b/jwql/website/apps/jwql/monitor_models/dark_current.py @@ -26,35 +26,47 @@ """ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. -from django.db import models from django.contrib.postgres.fields import ArrayField +from django.db import models + +from jwql.utils.constants import ( + DEFAULT_MODEL_CHARFIELD, + MAX_LEN_AMPLIFIER, + MAX_LEN_APERTURE, + MAX_LEN_DETECTOR, + MAX_LEN_FILENAME, + MAX_LEN_INSTRUMENT, + MAX_LEN_READPATTERN, + MAX_LEN_TYPE, +) class FGSDarkDarkCurrent(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - amplifier = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) - mean = models.FloatField(blank=True, null=True) - stdev = models.FloatField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + amplifier = models.CharField(max_length=MAX_LEN_AMPLIFIER, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + mean = models.FloatField(default=0., blank=True, null=True) + stdev = models.FloatField(default=0., blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - gauss_amplitude = ArrayField(models.FloatField()) - gauss_peak = ArrayField(models.FloatField()) - gauss_width = ArrayField(models.FloatField()) - gauss_chisq = models.FloatField(blank=True, null=True) - double_gauss_amplitude1 = ArrayField(models.FloatField()) - double_gauss_peak1 = ArrayField(models.FloatField()) - double_gauss_width1 = ArrayField(models.FloatField()) - double_gauss_amplitude2 = ArrayField(models.FloatField()) - double_gauss_peak2 = ArrayField(models.FloatField()) - double_gauss_width2 = ArrayField(models.FloatField()) - double_gauss_chisq = models.FloatField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - hist_dark_values = ArrayField(models.FloatField()) - hist_amplitudes = ArrayField(models.FloatField()) + gauss_amplitude = ArrayField(models.FloatField(default=0.)) + gauss_peak = ArrayField(models.FloatField(default=0.)) + gauss_width = ArrayField(models.FloatField(default=0.)) + gauss_chisq = models.FloatField(default=0., blank=True, null=True) + double_gauss_amplitude1 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak1 = ArrayField(models.FloatField(default=0.)) + double_gauss_width1 = ArrayField(models.FloatField(default=0.)) + double_gauss_amplitude2 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak2 = ArrayField(models.FloatField(default=0.)) + double_gauss_width2 = ArrayField(models.FloatField(default=0.)) + double_gauss_chisq = models.FloatField(default=0., blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + hist_dark_values = ArrayField(models.FloatField(default=0.)) + hist_amplitudes = ArrayField(models.FloatField(default=0.)) class Meta: managed = True @@ -64,16 +76,17 @@ class Meta: class FGSDarkPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) class Meta: managed = True @@ -83,12 +96,12 @@ class Meta: class FGSDarkQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) - files_found = models.IntegerField(blank=True, null=True) + files_found = models.IntegerField(default=0, blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) class Meta: @@ -99,29 +112,30 @@ class Meta: class MIRIDarkDarkCurrent(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - amplifier = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) - mean = models.FloatField(blank=True, null=True) - stdev = models.FloatField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + amplifier = models.CharField(max_length=MAX_LEN_AMPLIFIER, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + mean = models.FloatField(default=0., blank=True, null=True) + stdev = models.FloatField(default=0., blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - gauss_amplitude = ArrayField(models.FloatField()) - gauss_peak = ArrayField(models.FloatField()) - gauss_width = ArrayField(models.FloatField()) - gauss_chisq = models.FloatField(blank=True, null=True) - double_gauss_amplitude1 = ArrayField(models.FloatField()) - double_gauss_peak1 = ArrayField(models.FloatField()) - double_gauss_width1 = ArrayField(models.FloatField()) - double_gauss_amplitude2 = ArrayField(models.FloatField()) - double_gauss_peak2 = ArrayField(models.FloatField()) - double_gauss_width2 = ArrayField(models.FloatField()) - double_gauss_chisq = models.FloatField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - hist_dark_values = ArrayField(models.FloatField()) - hist_amplitudes = ArrayField(models.FloatField()) + gauss_amplitude = ArrayField(models.FloatField(default=0.)) + gauss_peak = ArrayField(models.FloatField(default=0.)) + gauss_width = ArrayField(models.FloatField(default=0.)) + gauss_chisq = models.FloatField(default=0., blank=True, null=True) + double_gauss_amplitude1 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak1 = ArrayField(models.FloatField(default=0.)) + double_gauss_width1 = ArrayField(models.FloatField(default=0.)) + double_gauss_amplitude2 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak2 = ArrayField(models.FloatField(default=0.)) + double_gauss_width2 = ArrayField(models.FloatField(default=0.)) + double_gauss_chisq = models.FloatField(default=0., blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + hist_dark_values = ArrayField(models.FloatField(default=0.)) + hist_amplitudes = ArrayField(models.FloatField(default=0.)) class Meta: managed = True @@ -131,16 +145,17 @@ class Meta: class MIRIDarkPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) class Meta: managed = True @@ -150,12 +165,12 @@ class Meta: class MIRIDarkQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) - files_found = models.IntegerField(blank=True, null=True) + files_found = models.IntegerField(default=0, blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) class Meta: @@ -166,29 +181,30 @@ class Meta: class NIRCamDarkDarkCurrent(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - amplifier = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) - mean = models.FloatField(blank=True, null=True) - stdev = models.FloatField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + amplifier = models.CharField(max_length=MAX_LEN_AMPLIFIER, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + mean = models.FloatField(default=0., blank=True, null=True) + stdev = models.FloatField(default=0., blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - gauss_amplitude = ArrayField(models.FloatField()) - gauss_peak = ArrayField(models.FloatField()) - gauss_width = ArrayField(models.FloatField()) - gauss_chisq = models.FloatField(blank=True, null=True) - double_gauss_amplitude1 = ArrayField(models.FloatField()) - double_gauss_peak1 = ArrayField(models.FloatField()) - double_gauss_width1 = ArrayField(models.FloatField()) - double_gauss_amplitude2 = ArrayField(models.FloatField()) - double_gauss_peak2 = ArrayField(models.FloatField()) - double_gauss_width2 = ArrayField(models.FloatField()) - double_gauss_chisq = models.FloatField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - hist_dark_values = ArrayField(models.FloatField()) - hist_amplitudes = ArrayField(models.FloatField()) + gauss_amplitude = ArrayField(models.FloatField(default=0.)) + gauss_peak = ArrayField(models.FloatField(default=0.)) + gauss_width = ArrayField(models.FloatField(default=0.)) + gauss_chisq = models.FloatField(default=0., blank=True, null=True) + double_gauss_amplitude1 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak1 = ArrayField(models.FloatField(default=0.)) + double_gauss_width1 = ArrayField(models.FloatField(default=0.)) + double_gauss_amplitude2 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak2 = ArrayField(models.FloatField(default=0.)) + double_gauss_width2 = ArrayField(models.FloatField(default=0.)) + double_gauss_chisq = models.FloatField(default=0., blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + hist_dark_values = ArrayField(models.FloatField(default=0.)) + hist_amplitudes = ArrayField(models.FloatField(default=0.)) class Meta: managed = True @@ -198,16 +214,17 @@ class Meta: class NIRCamDarkPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) class Meta: managed = True @@ -217,12 +234,12 @@ class Meta: class NIRCamDarkQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) - files_found = models.IntegerField(blank=True, null=True) + files_found = models.IntegerField(default=0, blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) class Meta: @@ -233,29 +250,30 @@ class Meta: class NIRISSDarkDarkCurrent(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - amplifier = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) - mean = models.FloatField(blank=True, null=True) - stdev = models.FloatField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + amplifier = models.CharField(max_length=MAX_LEN_AMPLIFIER, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + mean = models.FloatField(default=0., blank=True, null=True) + stdev = models.FloatField(default=0., blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - gauss_amplitude = ArrayField(models.FloatField()) - gauss_peak = ArrayField(models.FloatField()) - gauss_width = ArrayField(models.FloatField()) - gauss_chisq = models.FloatField(blank=True, null=True) - double_gauss_amplitude1 = ArrayField(models.FloatField()) - double_gauss_peak1 = ArrayField(models.FloatField()) - double_gauss_width1 = ArrayField(models.FloatField()) - double_gauss_amplitude2 = ArrayField(models.FloatField()) - double_gauss_peak2 = ArrayField(models.FloatField()) - double_gauss_width2 = ArrayField(models.FloatField()) - double_gauss_chisq = models.FloatField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - hist_dark_values = ArrayField(models.FloatField()) - hist_amplitudes = ArrayField(models.FloatField()) + gauss_amplitude = ArrayField(models.FloatField(default=0.)) + gauss_peak = ArrayField(models.FloatField(default=0.)) + gauss_width = ArrayField(models.FloatField(default=0.)) + gauss_chisq = models.FloatField(default=0., blank=True, null=True) + double_gauss_amplitude1 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak1 = ArrayField(models.FloatField(default=0.)) + double_gauss_width1 = ArrayField(models.FloatField(default=0.)) + double_gauss_amplitude2 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak2 = ArrayField(models.FloatField(default=0.)) + double_gauss_width2 = ArrayField(models.FloatField(default=0.)) + double_gauss_chisq = models.FloatField(default=0., blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + hist_dark_values = ArrayField(models.FloatField(default=0.)) + hist_amplitudes = ArrayField(models.FloatField(default=0.)) class Meta: managed = True @@ -265,16 +283,17 @@ class Meta: class NIRISSDarkPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) class Meta: managed = True @@ -284,12 +303,12 @@ class Meta: class NIRISSDarkQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) - files_found = models.IntegerField(blank=True, null=True) + files_found = models.IntegerField(default=0, blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) class Meta: @@ -300,29 +319,30 @@ class Meta: class NIRSpecDarkDarkCurrent(models.Model): entry_date = models.DateTimeField(unique=True) - aperture = models.CharField(blank=True, null=True) - amplifier = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) - mean = models.FloatField(blank=True, null=True) - stdev = models.FloatField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + amplifier = models.CharField(max_length=MAX_LEN_AMPLIFIER, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + mean = models.FloatField(default=0., blank=True, null=True) + stdev = models.FloatField(default=0., blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - gauss_amplitude = ArrayField(models.FloatField()) - gauss_peak = ArrayField(models.FloatField()) - gauss_width = ArrayField(models.FloatField()) - gauss_chisq = models.FloatField(blank=True, null=True) - double_gauss_amplitude1 = ArrayField(models.FloatField()) - double_gauss_peak1 = ArrayField(models.FloatField()) - double_gauss_width1 = ArrayField(models.FloatField()) - double_gauss_amplitude2 = ArrayField(models.FloatField()) - double_gauss_peak2 = ArrayField(models.FloatField()) - double_gauss_width2 = ArrayField(models.FloatField()) - double_gauss_chisq = models.FloatField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - hist_dark_values = ArrayField(models.FloatField()) - hist_amplitudes = ArrayField(models.FloatField()) + gauss_amplitude = ArrayField(models.FloatField(default=0.)) + gauss_peak = ArrayField(models.FloatField(default=0.)) + gauss_width = ArrayField(models.FloatField(default=0.)) + gauss_chisq = models.FloatField(default=0., blank=True, null=True) + double_gauss_amplitude1 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak1 = ArrayField(models.FloatField(default=0.)) + double_gauss_width1 = ArrayField(models.FloatField(default=0.)) + double_gauss_amplitude2 = ArrayField(models.FloatField(default=0.)) + double_gauss_peak2 = ArrayField(models.FloatField(default=0.)) + double_gauss_width2 = ArrayField(models.FloatField(default=0.)) + double_gauss_chisq = models.FloatField(default=0., blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + hist_dark_values = ArrayField(models.FloatField(default=0.)) + hist_amplitudes = ArrayField(models.FloatField(default=0.)) class Meta: managed = True @@ -332,16 +352,17 @@ class Meta: class NIRSpecDarkPixelStats(models.Model): entry_date = models.DateTimeField(unique=True) - detector = models.CharField(blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) x_coord = ArrayField(models.IntegerField()) y_coord = ArrayField(models.IntegerField()) - type = models.CharField(blank=True, null=True) - source_files = models.TextField(blank=True, null=True) # This field type is a guess. + type = models.CharField(max_length=MAX_LEN_TYPE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + source_files = ArrayField(models.CharField(max_length=MAX_LEN_FILENAME, help_text="source file names", + default=DEFAULT_MODEL_CHARFIELD), blank=True, null=True) obs_start_time = models.DateTimeField(blank=True, null=True) obs_mid_time = models.DateTimeField(blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - mean_dark_image_file = models.CharField(blank=True, null=True) - baseline_file = models.CharField(blank=True, null=True) + mean_dark_image_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) + baseline_file = models.CharField(max_length=MAX_LEN_FILENAME, default=DEFAULT_MODEL_CHARFIELD, null=True) class Meta: managed = True @@ -351,12 +372,12 @@ class Meta: class NIRSpecDarkQueryHistory(models.Model): entry_date = models.DateTimeField(unique=True) - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - readpattern = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) + readpattern = models.CharField(max_length=MAX_LEN_READPATTERN, default=DEFAULT_MODEL_CHARFIELD, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) - files_found = models.IntegerField(blank=True, null=True) + files_found = models.IntegerField(default=0, blank=True, null=True) run_monitor = models.BooleanField(blank=True, null=True) class Meta: diff --git a/jwql/website/apps/jwql/monitor_models/edb.py b/jwql/website/apps/jwql/monitor_models/edb.py index 2cad15418..01a617af6 100644 --- a/jwql/website/apps/jwql/monitor_models/edb.py +++ b/jwql/website/apps/jwql/monitor_models/edb.py @@ -29,9 +29,11 @@ from django.db import models from django.contrib.postgres.fields import ArrayField +from jwql.utils.constants import MAX_LEN_DEPENDENCY_VALUE, MAX_LEN_MNEMONIC + class FGSEdbBlocksStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -48,7 +50,7 @@ class Meta: class FGSEdbDailyStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -65,14 +67,14 @@ class Meta: class FGSEdbEveryChangeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) time = ArrayField(models.DateTimeField()) mnemonic_value = ArrayField(models.FloatField()) median = models.FloatField(blank=True, null=True) stdev = models.FloatField(blank=True, null=True) - dependency_mnemonic = models.CharField(blank=True, null=True) - dependency_value = models.CharField(blank=True, null=True) + dependency_mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) + dependency_value = models.CharField(max_length=MAX_LEN_DEPENDENCY_VALUE, blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) class Meta: @@ -82,7 +84,7 @@ class Meta: class FGSEdbTimeIntervalStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -99,7 +101,7 @@ class Meta: class FGSEdbTimeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -113,7 +115,7 @@ class Meta: class MIRIEdbBlocksStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -130,7 +132,7 @@ class Meta: class MIRIEdbDailyStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -147,14 +149,14 @@ class Meta: class MIRIEdbEveryChangeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) time = ArrayField(models.DateTimeField()) mnemonic_value = ArrayField(models.FloatField()) median = models.FloatField(blank=True, null=True) stdev = models.FloatField(blank=True, null=True) - dependency_mnemonic = models.CharField(blank=True, null=True) - dependency_value = models.CharField(blank=True, null=True) + dependency_mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) + dependency_value = models.CharField(max_length=MAX_LEN_DEPENDENCY_VALUE, blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) class Meta: @@ -164,7 +166,7 @@ class Meta: class MIRIEdbTimeIntervalStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -181,7 +183,7 @@ class Meta: class MIRIEdbTimeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -195,7 +197,7 @@ class Meta: class NIRCamEdbBlocksStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -212,7 +214,7 @@ class Meta: class NIRCamEdbDailyStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -229,14 +231,14 @@ class Meta: class NIRCamEdbEveryChangeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) time = ArrayField(models.DateTimeField()) mnemonic_value = ArrayField(models.FloatField()) median = models.FloatField(blank=True, null=True) stdev = models.FloatField(blank=True, null=True) - dependency_mnemonic = models.CharField(blank=True, null=True) - dependency_value = models.CharField(blank=True, null=True) + dependency_mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) + dependency_value = models.CharField(max_length=MAX_LEN_DEPENDENCY_VALUE, blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) class Meta: @@ -246,7 +248,7 @@ class Meta: class NIRCamEdbTimeIntervalStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -263,7 +265,7 @@ class Meta: class NIRCamEdbTimeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -277,7 +279,7 @@ class Meta: class NIRISSEdbBlocksStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -294,7 +296,7 @@ class Meta: class NIRISSEdbDailyStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -311,14 +313,14 @@ class Meta: class NIRISSEdbEveryChangeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) time = ArrayField(models.DateTimeField()) mnemonic_value = ArrayField(models.FloatField()) median = models.FloatField(blank=True, null=True) stdev = models.FloatField(blank=True, null=True) - dependency_mnemonic = models.CharField(blank=True, null=True) - dependency_value = models.CharField(blank=True, null=True) + dependency_mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) + dependency_value = models.CharField(max_length=MAX_LEN_DEPENDENCY_VALUE, blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) class Meta: @@ -328,7 +330,7 @@ class Meta: class NIRISSEdbTimeIntervalStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -345,7 +347,7 @@ class Meta: class NIRISSEdbTimeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -359,7 +361,7 @@ class Meta: class NIRSpecEdbBlocksStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -376,7 +378,7 @@ class Meta: class NIRSpecEdbDailyStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -393,14 +395,14 @@ class Meta: class NIRSpecEdbEveryChangeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) time = ArrayField(models.DateTimeField()) mnemonic_value = ArrayField(models.FloatField()) median = models.FloatField(blank=True, null=True) stdev = models.FloatField(blank=True, null=True) - dependency_mnemonic = models.CharField(blank=True, null=True) - dependency_value = models.CharField(blank=True, null=True) + dependency_mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) + dependency_value = models.CharField(max_length=MAX_LEN_DEPENDENCY_VALUE, blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) class Meta: @@ -410,7 +412,7 @@ class Meta: class NIRSpecEdbTimeIntervalStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) @@ -427,7 +429,7 @@ class Meta: class NIRSpecEdbTimeStats(models.Model): - mnemonic = models.CharField(blank=True, null=True) + mnemonic = models.CharField(max_length=MAX_LEN_MNEMONIC, blank=True, null=True) latest_query = models.DateTimeField(blank=True, null=True) times = ArrayField(models.DateTimeField()) data = ArrayField(models.FloatField()) diff --git a/jwql/website/apps/jwql/monitor_models/grating.py b/jwql/website/apps/jwql/monitor_models/grating.py index 1c2029049..0ed84f1e6 100644 --- a/jwql/website/apps/jwql/monitor_models/grating.py +++ b/jwql/website/apps/jwql/monitor_models/grating.py @@ -27,7 +27,8 @@ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. from django.db import models -from django.contrib.postgres.fields import ArrayField + +from jwql.utils.constants import MAX_LEN_TIME class NIRSpecGratingQueryHistory(models.Model): @@ -44,7 +45,7 @@ class Meta: class NIRSpecGratingStats(models.Model): entry_date = models.DateTimeField(unique=True) - time = models.CharField(blank=True, null=True) + time = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) inrsh_gwa_adcmgain = models.FloatField(blank=True, null=True) inrsh_gwa_adcmoffset = models.FloatField(blank=True, null=True) inrsh_gwa_motor_vref = models.FloatField(blank=True, null=True) diff --git a/jwql/website/apps/jwql/monitor_models/readnoise.py b/jwql/website/apps/jwql/monitor_models/readnoise.py index 5e616aa71..815576047 100644 --- a/jwql/website/apps/jwql/monitor_models/readnoise.py +++ b/jwql/website/apps/jwql/monitor_models/readnoise.py @@ -26,13 +26,26 @@ """ # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. -from django.db import models from django.contrib.postgres.fields import ArrayField +from django.db import models + +from jwql.utils.constants import ( + MAX_LEN_APERTURE, + MAX_LEN_DETECTOR, + MAX_LEN_DIFF_IMAGE, + MAX_LEN_TIME, + MAX_LEN_FILENAME, + MAX_LEN_INSTRUMENT, + MAX_LEN_NGROUPS, + MAX_LEN_NINTS, + MAX_LEN_READPATTERN, + MAX_LEN_SUBARRAY, +) class FGSReadnoiseQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -47,20 +60,20 @@ class Meta: class FGSReadnoiseStats(models.Model): - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - readnoise_filename = models.CharField(blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + read_pattern = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + nints = models.CharField(max_length=MAX_LEN_NINTS, blank=True, null=True) + ngroups = models.CharField(max_length=MAX_LEN_NGROUPS, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) + readnoise_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) full_image_mean = models.FloatField(blank=True, null=True) full_image_stddev = models.FloatField(blank=True, null=True) full_image_n = ArrayField(models.FloatField()) full_image_bin_centers = ArrayField(models.FloatField()) - readnoise_diff_image = models.CharField(blank=True, null=True) + readnoise_diff_image = models.CharField(max_length=MAX_LEN_DIFF_IMAGE, blank=True, null=True) diff_image_mean = models.FloatField(blank=True, null=True) diff_image_stddev = models.FloatField(blank=True, null=True) diff_image_n = ArrayField(models.FloatField()) @@ -90,8 +103,8 @@ class Meta: class MIRIReadnoiseQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -106,20 +119,20 @@ class Meta: class MIRIReadnoiseStats(models.Model): - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - readnoise_filename = models.CharField(blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + read_pattern = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + nints = models.CharField(max_length=MAX_LEN_NINTS, blank=True, null=True) + ngroups = models.CharField(max_length=MAX_LEN_NGROUPS, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) + readnoise_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) full_image_mean = models.FloatField(blank=True, null=True) full_image_stddev = models.FloatField(blank=True, null=True) full_image_n = ArrayField(models.FloatField()) full_image_bin_centers = ArrayField(models.FloatField()) - readnoise_diff_image = models.CharField(blank=True, null=True) + readnoise_diff_image = models.CharField(max_length=MAX_LEN_DIFF_IMAGE, blank=True, null=True) diff_image_mean = models.FloatField(blank=True, null=True) diff_image_stddev = models.FloatField(blank=True, null=True) diff_image_n = ArrayField(models.FloatField()) @@ -149,8 +162,8 @@ class Meta: class NIRCamReadnoiseQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -165,20 +178,20 @@ class Meta: class NIRCamReadnoiseStats(models.Model): - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - readnoise_filename = models.CharField(blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + read_pattern = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + nints = models.CharField(max_length=MAX_LEN_NINTS, blank=True, null=True) + ngroups = models.CharField(max_length=MAX_LEN_NGROUPS, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) + readnoise_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) full_image_mean = models.FloatField(blank=True, null=True) full_image_stddev = models.FloatField(blank=True, null=True) full_image_n = ArrayField(models.FloatField()) full_image_bin_centers = ArrayField(models.FloatField()) - readnoise_diff_image = models.CharField(blank=True, null=True) + readnoise_diff_image = models.CharField(max_length=MAX_LEN_DIFF_IMAGE, blank=True, null=True) diff_image_mean = models.FloatField(blank=True, null=True) diff_image_stddev = models.FloatField(blank=True, null=True) diff_image_n = ArrayField(models.FloatField()) @@ -208,8 +221,8 @@ class Meta: class NIRISSReadnoiseQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -224,20 +237,20 @@ class Meta: class NIRISSReadnoiseStats(models.Model): - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - readnoise_filename = models.CharField(blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + read_pattern = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + nints = models.CharField(max_length=MAX_LEN_NINTS, blank=True, null=True) + ngroups = models.CharField(max_length=MAX_LEN_NGROUPS, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) + readnoise_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) full_image_mean = models.FloatField(blank=True, null=True) full_image_stddev = models.FloatField(blank=True, null=True) full_image_n = ArrayField(models.FloatField()) full_image_bin_centers = ArrayField(models.FloatField()) - readnoise_diff_image = models.CharField(blank=True, null=True) + readnoise_diff_image = models.CharField(max_length=MAX_LEN_DIFF_IMAGE, blank=True, null=True) diff_image_mean = models.FloatField(blank=True, null=True) diff_image_stddev = models.FloatField(blank=True, null=True) diff_image_n = ArrayField(models.FloatField()) @@ -267,8 +280,8 @@ class Meta: class NIRSpecReadnoiseQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -283,20 +296,20 @@ class Meta: class NIRSpecReadnoiseStats(models.Model): - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - readnoise_filename = models.CharField(blank=True, null=True) + uncal_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + read_pattern = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + nints = models.CharField(max_length=MAX_LEN_NINTS, blank=True, null=True) + ngroups = models.CharField(max_length=MAX_LEN_NGROUPS, blank=True, null=True) + expstart = models.CharField(max_length=MAX_LEN_TIME, blank=True, null=True) + readnoise_filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) full_image_mean = models.FloatField(blank=True, null=True) full_image_stddev = models.FloatField(blank=True, null=True) full_image_n = ArrayField(models.FloatField()) full_image_bin_centers = ArrayField(models.FloatField()) - readnoise_diff_image = models.CharField(blank=True, null=True) + readnoise_diff_image = models.CharField(max_length=MAX_LEN_DIFF_IMAGE, blank=True, null=True) diff_image_mean = models.FloatField(blank=True, null=True) diff_image_stddev = models.FloatField(blank=True, null=True) diff_image_n = ArrayField(models.FloatField()) diff --git a/jwql/website/apps/jwql/monitor_models/ta.py b/jwql/website/apps/jwql/monitor_models/ta.py index 93a8b269b..34ef20988 100644 --- a/jwql/website/apps/jwql/monitor_models/ta.py +++ b/jwql/website/apps/jwql/monitor_models/ta.py @@ -24,15 +24,29 @@ For more information please see: ```https://docs.djangoproject.com/en/2.0/topics/db/models/``` """ + # This is an auto-generated Django model module. # Feel free to rename the models, but don't rename db_table values or field names. -from django.db import models from django.contrib.postgres.fields import ArrayField +from django.db import models + +from jwql.utils.constants import ( + MAX_LEN_APERTURE, + MAX_LEN_DETECTOR, + MAX_LEN_FILENAME, + MAX_LEN_FILTER, + MAX_LEN_GENERIC_TEXT, + MAX_LEN_INSTRUMENT, + MAX_LEN_PATH, + MAX_LEN_READPATTERN, + MAX_LEN_SUBARRAY, + MAX_LEN_VISIT, +) class MIRITaQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -42,31 +56,31 @@ class MIRITaQueryHistory(models.Model): class Meta: managed = True - db_table = 'miri_ta_query_history' - unique_together = (('id', 'entry_date'),) + db_table = "miri_ta_query_history" + unique_together = (("id", "entry_date"),) class MIRITaStats(models.Model): entry_date = models.DateTimeField(unique=True) - cal_file_name = models.CharField(blank=True, null=True) + cal_file_name = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) obs_end_time = models.DateTimeField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) targx = models.FloatField(blank=True, null=True) targy = models.FloatField(blank=True, null=True) offset = models.FloatField(blank=True, null=True) - full_im_path = models.CharField(blank=True, null=True) - zoom_im_path = models.CharField(blank=True, null=True) + full_im_path = models.CharField(max_length=MAX_LEN_PATH, blank=True, null=True) + zoom_im_path = models.CharField(max_length=MAX_LEN_PATH, blank=True, null=True) class Meta: managed = True - db_table = 'miri_ta_stats' - unique_together = (('id', 'entry_date'),) + db_table = "miri_ta_stats" + unique_together = (("id", "entry_date"),) class NIRSpecTaQueryHistory(models.Model): - instrument = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) + instrument = models.CharField(max_length=MAX_LEN_INSTRUMENT, blank=True, null=True) + aperture = models.CharField(max_length=MAX_LEN_APERTURE, blank=True, null=True) start_time_mjd = models.FloatField(blank=True, null=True) end_time_mjd = models.FloatField(blank=True, null=True) entries_found = models.IntegerField(blank=True, null=True) @@ -76,46 +90,103 @@ class NIRSpecTaQueryHistory(models.Model): class Meta: managed = True - db_table = 'nirspec_ta_query_history' - unique_together = (('id', 'entry_date'),) - + db_table = "nirspec_ta_query_history" + unique_together = (("id", "entry_date"),) + + +class NIRSpecWataStats(models.Model): + filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + date_obs = models.DateTimeField(blank=True, null=True) + visit_id = models.CharField(max_length=MAX_LEN_VISIT, blank=True, null=True) + tafilter = models.CharField(max_length=MAX_LEN_FILTER, blank=True, null=True) + readout = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + ta_status = models.CharField(max_length=MAX_LEN_GENERIC_TEXT, blank=True, null=True) + star_name = models.IntegerField(blank=True, null=True) + star_ra = models.FloatField(blank=True, null=True) + star_dec = models.FloatField(blank=True, null=True) + star_mag = models.FloatField(blank=True, null=True) + star_catalog = models.IntegerField(blank=True, null=True) + planned_v2 = models.FloatField(blank=True, null=True) + planned_v3 = models.FloatField(blank=True, null=True) + stamp_start_col = models.IntegerField(blank=True, null=True) + stamp_start_row = models.IntegerField(blank=True, null=True) + star_detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + max_val_box = models.FloatField(blank=True, null=True) + max_val_box_col = models.IntegerField(blank=True, null=True) + max_val_box_row = models.IntegerField(blank=True, null=True) + iterations = models.IntegerField(blank=True, null=True) + corr_col = models.IntegerField(blank=True, null=True) + corr_row = models.IntegerField(blank=True, null=True) + stamp_final_col = models.FloatField(blank=True, null=True) + stamp_final_row = models.FloatField(blank=True, null=True) + detector_final_col = models.FloatField(blank=True, null=True) + detector_final_row = models.FloatField(blank=True, null=True) + final_sci_x = models.FloatField(blank=True, null=True) + final_sci_y = models.FloatField(blank=True, null=True) + measured_v2 = models.FloatField(blank=True, null=True) + measured_v3 = models.FloatField(blank=True, null=True) + ref_v2 = models.FloatField(blank=True, null=True) + ref_v3 = models.FloatField(blank=True, null=True) + v2_offset = models.FloatField(blank=True, null=True) + v3_offset = models.FloatField(blank=True, null=True) + sam_x = models.FloatField(blank=True, null=True) + sam_y = models.FloatField(blank=True, null=True) + entry_date = models.DateTimeField(blank=True, null=True) -class NIRSpecTaStats(models.Model): + class Meta: + managed = True + db_table = "nirspec_wata_stats" + unique_together = (("id", "entry_date"),) + + +class NIRSpecMsataStats(models.Model): + filename = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + date_obs = models.DateTimeField(blank=True, null=True) + visit_id = models.CharField(max_length=MAX_LEN_VISIT, blank=True, null=True) + tafilter = models.CharField(max_length=MAX_LEN_FILTER, blank=True, null=True) + detector = models.CharField(max_length=MAX_LEN_DETECTOR, blank=True, null=True) + readout = models.CharField(max_length=MAX_LEN_READPATTERN, blank=True, null=True) + subarray = models.CharField(max_length=MAX_LEN_SUBARRAY, blank=True, null=True) + num_refstars = models.IntegerField(blank=True, null=True) + ta_status = models.CharField(max_length=MAX_LEN_FILENAME, blank=True, null=True) + v2halffacet = models.FloatField(blank=True, null=True) + v3halffacet = models.FloatField(blank=True, null=True) + v2msactr = models.FloatField(blank=True, null=True) + v3msactr = models.FloatField(blank=True, null=True) + lsv2offset = models.FloatField(blank=True, null=True) + lsv3offset = models.FloatField(blank=True, null=True) + lsoffsetmag = models.FloatField(blank=True, null=True) + lsrolloffset = models.FloatField(blank=True, null=True) + lsv2sigma = models.FloatField(blank=True, null=True) + lsv3sigma = models.FloatField(blank=True, null=True) + lsiterations = models.IntegerField(blank=True, null=True) + guidestarid = models.IntegerField(blank=True, null=True) + guidestarx = models.FloatField(blank=True, null=True) + guidestary = models.FloatField(blank=True, null=True) + guidestarroll = models.FloatField(blank=True, null=True) + samx = models.FloatField(blank=True, null=True) + samy = models.FloatField(blank=True, null=True) + samroll = models.FloatField(blank=True, null=True) + box_peak_value = ArrayField(models.FloatField()) + reference_star_mag = ArrayField(models.FloatField()) + convergence_status = ArrayField( + models.CharField(max_length=MAX_LEN_GENERIC_TEXT, blank=True, null=True) + ) + reference_star_number = ArrayField(models.IntegerField()) + lsf_removed_status = ArrayField( + models.CharField(max_length=MAX_LEN_GENERIC_TEXT, blank=True, null=True) + ) + lsf_removed_reason = ArrayField( + models.CharField(max_length=MAX_LEN_GENERIC_TEXT, blank=True, null=True) + ) + lsf_removed_x = ArrayField(models.FloatField()) + lsf_removed_y = ArrayField(models.FloatField()) + planned_v2 = ArrayField(models.FloatField()) + planned_v3 = ArrayField(models.FloatField()) + stars_in_fit = models.IntegerField(blank=True, null=True) entry_date = models.DateTimeField(blank=True, null=True) - uncal_filename = models.CharField(blank=True, null=True) - aperture = models.CharField(blank=True, null=True) - detector = models.CharField(blank=True, null=True) - subarray = models.CharField(blank=True, null=True) - read_pattern = models.CharField(blank=True, null=True) - nints = models.CharField(blank=True, null=True) - ngroups = models.CharField(blank=True, null=True) - expstart = models.CharField(blank=True, null=True) - full_image_mean = models.FloatField(blank=True, null=True) - full_image_stddev = models.FloatField(blank=True, null=True) - full_image_n = ArrayField(models.FloatField()) - full_image_bin_centers = ArrayField(models.FloatField()) - diff_image_mean = models.FloatField(blank=True, null=True) - diff_image_stddev = models.FloatField(blank=True, null=True) - diff_image_n = ArrayField(models.FloatField()) - diff_image_bin_centers = ArrayField(models.FloatField()) - amp1_mean = models.FloatField(blank=True, null=True) - amp1_stddev = models.FloatField(blank=True, null=True) - amp1_n = ArrayField(models.FloatField()) - amp1_bin_centers = ArrayField(models.FloatField()) - amp2_mean = models.FloatField(blank=True, null=True) - amp2_stddev = models.FloatField(blank=True, null=True) - amp2_n = ArrayField(models.FloatField()) - amp2_bin_centers = ArrayField(models.FloatField()) - amp3_mean = models.FloatField(blank=True, null=True) - amp3_stddev = models.FloatField(blank=True, null=True) - amp3_n = ArrayField(models.FloatField()) - amp3_bin_centers = ArrayField(models.FloatField()) - amp4_mean = models.FloatField(blank=True, null=True) - amp4_stddev = models.FloatField(blank=True, null=True) - amp4_n = ArrayField(models.FloatField()) - amp4_bin_centers = ArrayField(models.FloatField()) class Meta: managed = True - db_table = 'nirspec_ta_stats' - unique_together = (('id', 'entry_date'),) + db_table = "nirspec_msata_stats" + unique_together = (("id", "entry_date"),) diff --git a/jwql/website/apps/jwql/monitor_pages/__init__.py b/jwql/website/apps/jwql/monitor_pages/__init__.py index ed184d7ff..ba9fc9d4a 100644 --- a/jwql/website/apps/jwql/monitor_pages/__init__.py +++ b/jwql/website/apps/jwql/monitor_pages/__init__.py @@ -1 +1,44 @@ +import os + from .monitor_cosmic_rays_bokeh import CosmicRayMonitor + +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS + +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + from jwql.website.apps.jwql.monitor_models.bad_pixel import FGSBadPixelQueryHistory, FGSBadPixelStats, MIRIBadPixelQueryHistory, MIRIBadPixelStats + from jwql.website.apps.jwql.monitor_models.bad_pixel import NIRCamBadPixelQueryHistory, NIRCamBadPixelStats, NIRISSBadPixelQueryHistory, NIRISSBadPixelStats + from jwql.website.apps.jwql.monitor_models.bad_pixel import NIRSpecBadPixelQueryHistory, NIRSpecBadPixelStats + + from jwql.website.apps.jwql.monitor_models.bias import NIRCamBiasQueryHistory + + from jwql.website.apps.jwql.monitor_models.claw import NIRCamClawQueryHistory, NIRCamClawStats + + from jwql.website.apps.jwql.monitor_models.common import Monitor + + from jwql.website.apps.jwql.monitor_models.cosmic_ray import FGSCosmicRayQueryHistory + + from jwql.website.apps.jwql.monitor_models.dark_current import FGSDarkDarkCurrent, FGSDarkPixelStats, FGSDarkQueryHistory + from jwql.website.apps.jwql.monitor_models.dark_current import MIRIDarkDarkCurrent, MIRIDarkPixelStats, MIRIDarkQueryHistory + from jwql.website.apps.jwql.monitor_models.dark_current import NIRCamDarkDarkCurrent, NIRCamDarkPixelStats, NIRCamDarkQueryHistory + from jwql.website.apps.jwql.monitor_models.dark_current import NIRISSDarkDarkCurrent, NIRISSDarkPixelStats, NIRISSDarkQueryHistory + from jwql.website.apps.jwql.monitor_models.dark_current import NIRSpecDarkDarkCurrent, NIRSpecDarkPixelStats, NIRSpecDarkQueryHistory + + from jwql.website.apps.jwql.monitor_models.edb import FGSEdbBlocksStats, FGSEdbDailyStats, FGSEdbEveryChangeStats, FGSEdbTimeIntervalStats, FGSEdbTimeStats + from jwql.website.apps.jwql.monitor_models.edb import MIRIEdbBlocksStats, MIRIEdbDailyStats, MIRIEdbEveryChangeStats, MIRIEdbTimeIntervalStats, MIRIEdbTimeStats + from jwql.website.apps.jwql.monitor_models.edb import NIRCamEdbBlocksStats, NIRCamEdbDailyStats, NIRCamEdbEveryChangeStats, NIRCamEdbTimeIntervalStats, NIRCamEdbTimeStats + from jwql.website.apps.jwql.monitor_models.edb import NIRISSEdbBlocksStats, NIRISSEdbDailyStats, NIRISSEdbEveryChangeStats, NIRISSEdbTimeIntervalStats, NIRISSEdbTimeStats + from jwql.website.apps.jwql.monitor_models.edb import NIRSpecEdbBlocksStats, NIRSpecEdbDailyStats, NIRSpecEdbEveryChangeStats, NIRSpecEdbTimeIntervalStats, NIRSpecEdbTimeStats + + from jwql.website.apps.jwql.monitor_models.grating import NIRSpecGratingQueryHistory + + from jwql.website.apps.jwql.monitor_models.readnoise import FGSReadnoiseQueryHistory, FGSReadnoiseStats + from jwql.website.apps.jwql.monitor_models.readnoise import MIRIReadnoiseQueryHistory, MIRIReadnoiseStats + from jwql.website.apps.jwql.monitor_models.readnoise import NIRCamReadnoiseQueryHistory, NIRCamReadnoiseStats + from jwql.website.apps.jwql.monitor_models.readnoise import NIRISSReadnoiseQueryHistory, NIRISSReadnoiseStats + from jwql.website.apps.jwql.monitor_models.readnoise import NIRSpecReadnoiseQueryHistory, NIRSpecReadnoiseStats + + from jwql.website.apps.jwql.monitor_models.ta import MIRITaQueryHistory diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py index a3722ce13..8830a6b3d 100755 --- a/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_bad_pixel_bokeh.py @@ -32,20 +32,23 @@ import numpy as np from sqlalchemy import and_, func -from jwql.database.database_interface import get_unique_values_per_column, session -from jwql.database.database_interface import NIRCamBadPixelQueryHistory, NIRCamBadPixelStats -from jwql.database.database_interface import NIRISSBadPixelQueryHistory, NIRISSBadPixelStats -from jwql.database.database_interface import MIRIBadPixelQueryHistory, MIRIBadPixelStats -from jwql.database.database_interface import NIRSpecBadPixelQueryHistory, NIRSpecBadPixelStats -from jwql.database.database_interface import FGSBadPixelQueryHistory, FGSBadPixelStats from jwql.utils.constants import BAD_PIXEL_MONITOR_MAX_POINTS_TO_PLOT, BAD_PIXEL_TYPES, DARKS_BAD_PIXEL_TYPES from jwql.utils.constants import DETECTOR_PER_INSTRUMENT, FLATS_BAD_PIXEL_TYPES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.permissions import set_permissions from jwql.utils.utils import filesystem_path, get_config, read_png, save_png +from jwql.website.apps.jwql.models import get_unique_values_per_column SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) OUTPUT_DIR = get_config()['outputs'] +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + from jwql.website.apps.jwql.monitor_models.bad_pixel import * + class BadPixelPlots(): """Class for creating the bad pixel monitor plots and figures to be displayed @@ -98,7 +101,16 @@ def modify_bokeh_saved_html(self): lines_to_remove = ["", '', ' ', - ''] + '', + ' '] # Our Django-related lines that need to be at the top of the file hstring = """href="{{'/jwqldb/%s_bad_pixel_stats'%inst.lower()}}" name=test_link class="btn btn-primary my-2" type="submit">Go to JWQLDB page""" @@ -162,7 +174,8 @@ def run(self): template_dir = os.path.join(os.path.dirname(__file__), '../templates') template_file = os.path.join(template_dir, 'bad_pixel_monitor_savefile_basic.html') temp_vars = {'inst': self.instrument, 'plot_script': script, 'plot_div': div} - self._html = file_html(tabs, CDN, f'{self.instrument} bad pix monitor', template_file, temp_vars) + self._html = file_html(tabs, CDN, title=f'{self.instrument} bad pix monitor', + template=template_file, template_variables=temp_vars) # Modify the html such that our Django-related lines are kept in place, # which will allow the page to keep the same formatting and styling as @@ -263,40 +276,33 @@ def __init__(self, pixel_table, instrument, detector): self.get_trending_data(badtype) def get_most_recent_entry(self): - """Get all nedded data from the database tables. + """For the given detector, get the latest entry for each bad pixel type """ - # For the given detector, get the latest entry for each bad pixel type - subq = (session - .query(self.pixel_table.type, func.max(self.pixel_table.entry_date).label("max_created")) - .filter(self.pixel_table.detector == self.detector) - .group_by(self.pixel_table.type) - .subquery() - ) - - query = (session.query(self.pixel_table) - .join(subq, self.pixel_table.entry_date == subq.c.max_created) - ) - - latest_entries_by_type = query.all() - session.close() - - # Organize the results - for row in latest_entries_by_type: - self.new_bad_pix[row.type] = (row.x_coord, row.y_coord) - self.background_file[row.type] = row.source_files[0] - self.obs_start_time[row.type] = row.obs_start_time - self.obs_end_time[row.type] = row.obs_end_time - self.num_files[row.type] = len(row.source_files) - self.baseline_file[row.type] = row.baseline_file - - # If no data is retrieved from the database at all, add a dummy generic entry - if len(self.new_bad_pix.keys()) == 0: - self.new_bad_pix[self.badtypes[0]] = ([], []) - self.background_file[self.badtypes[0]] = '' - self.obs_start_time[self.badtypes[0]] = datetime.datetime.today() - self.obs_end_time[self.badtypes[0]] = datetime.datetime.today() - self.num_files[self.badtypes[0]] = 0 - self.baseline_file[self.badtypes[0]] = '' + + bad_pixel_types = self.pixel_table.objects.values('type').distinct() + + for bad_type in bad_pixel_types: + bad_filters = {'detector__iexact': self.detector, + 'type': bad_type} + + record = (self.pixel_table.objects + .filter(**bad_filters) + .order_by("-obs_end_time").first()) + + if record is None: + self.new_bad_pix[bad_type] = ([], []) + self.background_file[bad_type] = '' + self.obs_start_time[bad_type] = datetime.datetime.today() + self.obs_end_time[bad_type] = datetime.datetime.today() + self.num_files[bad_type] = 0 + self.baseline_file[bad_type] = '' + else: + self.new_bad_pix[bad_type] = (record.x_coord, record.y_coord) + self.background_file[bad_type] = record.source_file + self.obs_start_time[bad_type] = record.obs_start_time + self.obs_end_time[bad_type] = record.obs_end_time + self.num_files[bad_type] = len(record.source_files) + self.baseline_file[bad_type] = record.baseline_file def get_trending_data(self, badpix_type): """Retrieve and organize the data needed to produce the trending plot. @@ -306,21 +312,20 @@ def get_trending_data(self, badpix_type): badpix_type : str The type of bad pixel to query for, e.g. 'dead' """ - # Query database for all data in the table with a matching detector and bad pixel type - all_entries_by_type = session.query(self.pixel_table.type, self.pixel_table.detector, func.array_length(self.pixel_table.x_coord, 1), - self.pixel_table.obs_mid_time) \ - .filter(and_(self.pixel_table.detector == self.detector, self.pixel_table.type == badpix_type)) \ - .all() + filters = {"type": badpix_type, + "detector": self.detector} + + all_entries_by_type = self.pixel_table.objects.filter(**filters).all() - # Organize the results num_pix = [] times = [] + for i, row in enumerate(all_entries_by_type): if i == 0: - badtype = row[0] - detector = row[1] - num_pix.append(row[2]) - times.append(row[3]) + badtype = row.type + detector = row.detector + num_pix.append(len(row.x_coord)) + times.append(row.obs_mid_time) # If there was no data in the database, create an empty entry if len(num_pix) == 0: @@ -329,9 +334,7 @@ def get_trending_data(self, badpix_type): num_pix = [0] times = [datetime.datetime.today()] - # Add results to self.trending_data - self.trending_data[badpix_type] = (detector, num_pix, times) - session.close() + self.trending_data[badtype] = (detector, num_pix, times) class NewBadPixPlot(): diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py index 685392906..4c3526ebd 100644 --- a/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_bias_bokeh.py @@ -279,7 +279,8 @@ def modify_bokeh_saved_html(self): """ # Insert into our html template and save temp_vars = {'inst': self.instrument, 'plot_script': self.script, 'plot_div': self.div} - html_lines = file_html(self.tabs, CDN, f'{self.instrument} bias monitor', self.html_file, temp_vars) + html_lines = file_html(self.tabs, CDN, title=f'{self.instrument} bias monitor', + template=self.html_file, template_variables=temp_vars) lines = html_lines.split('\n') diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_dark_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_dark_bokeh.py index 1a4e7a670..3e51cf83d 100755 --- a/jwql/website/apps/jwql/monitor_pages/monitor_dark_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_dark_bokeh.py @@ -27,19 +27,22 @@ from datetime import datetime, timedelta import numpy as np from PIL import Image -from sqlalchemy import func -from sqlalchemy.sql.expression import and_ - -from jwql.database.database_interface import get_unique_values_per_column, session -from jwql.database.database_interface import NIRCamDarkPixelStats, NIRCamDarkDarkCurrent -from jwql.database.database_interface import NIRISSDarkPixelStats, NIRISSDarkDarkCurrent -from jwql.database.database_interface import MIRIDarkPixelStats, MIRIDarkDarkCurrent -from jwql.database.database_interface import NIRSpecDarkPixelStats, NIRSpecDarkDarkCurrent -from jwql.database.database_interface import FGSDarkPixelStats, FGSDarkDarkCurrent -from jwql.utils.constants import FULL_FRAME_APERTURES -from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE + +from jwql.utils.constants import FULL_FRAME_APERTURES, JWST_INSTRUMENT_NAMES_MIXEDCASE +from jwql.utils.constants import ON_GITHUB_ACTIONS, ON_READTHEDOCS from jwql.utils.utils import get_config, read_png from jwql.website.apps.jwql.bokeh_utils import PlaceholderPlot +from jwql.website.apps.jwql.models import get_model_column_names, get_unique_values_per_column + +if not ON_GITHUB_ACTIONS and not ON_READTHEDOCS: + # Need to set up django apps before we can access the models + import django # noqa: E402 (module level import not at top of file) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jwql.website.jwql_proj.settings") + django.setup() + + # Import * is okay here because this module specifically only contains database models + # for this monitor + from jwql.website.apps.jwql.monitor_models.dark_current import * # noqa: E402 (module level import not at top of file) SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) OUTPUTS_DIR = get_config()['outputs'] @@ -199,7 +202,7 @@ def create_plot(self): title_str = f'{self.aperture}: Dark Rate Histogram' x_label = 'Dark Rate (DN/sec)' y_label = 'Number of Pixels' - self.plot = PlaceholderPlot(title_str, x_label, y_label).create() + self.plot = PlaceholderPlot(title_str, x_label, y_label).plot class DarkImagePlot(): @@ -294,12 +297,12 @@ def identify_tables(self): """Determine which dark current database tables as associated with a given instrument""" mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument.lower()] - self.pixel_table = eval('{}DarkPixelStats'.format(mixed_case_name)) - self.stats_table = eval('{}DarkDarkCurrent'.format(mixed_case_name)) + self.pixel_table = eval(f'{mixed_case_name}DarkPixelStats') + self.stats_table = eval(f'{mixed_case_name}DarkDarkCurrent') # Get a list of column names for each - self.stats_table_columns = self.stats_table.metadata.tables[f'{self.instrument.lower()}_dark_dark_current'].columns.keys() - self.pixel_table_columns = self.pixel_table.metadata.tables[f'{self.instrument.lower()}_dark_pixel_stats'].columns.keys() + self.stats_table_columns = get_model_column_names(self.stats_table) + self.pixel_table_columns = get_model_column_names(self.pixel_table) def retrieve_data(self, aperture, get_pixtable_for_detector=False): """Get all nedded data from the database tables. @@ -314,9 +317,7 @@ def retrieve_data(self, aperture, get_pixtable_for_detector=False): detector associated with the given aperture. """ # Query database for all data in DarkDarkCurrent with a matching aperture - self.stats_data = session.query(self.stats_table) \ - .filter(self.stats_table.aperture == aperture) \ - .all() + self.stats_data = self.stats_table.objects.filter(aperture__iexact=aperture).all() if get_pixtable_for_detector: self.detector = aperture.split('_')[0].upper() @@ -330,19 +331,17 @@ def retrieve_data(self, aperture, get_pixtable_for_detector=False): # For the given detector, get the latest entry for each bad pixel type, and # return the bad pixel type, detector, and mean dark image file - subq = (session - .query(self.pixel_table.type, func.max(self.pixel_table.entry_date).label("max_created")) - .filter(self.pixel_table.detector == self.detector) - .group_by(self.pixel_table.type) - .subquery() - ) - - query = (session.query(self.pixel_table.type, self.pixel_table.detector, self.pixel_table.mean_dark_image_file) - .join(subq, self.pixel_table.entry_date == subq.c.max_created) - ) + bad_pixel_types = self.pixel_table.objects.values('type').distinct() + for bad_type in bad_pixel_types: + bad_filters = {'detector__iexact': self.detector, 'type': bad_type} - self.pixel_data = query.all() - session.close() + # Note that this function is currently never called with get_pixtable_for_detector = True + # 'record' below is a dictionary. e.g {'type': 'dead', + # 'detector': 'NRCA1', + # 'mean_dark_image_file': 'nircam_nrca1_full_59607.0_to_59865.91846797105_mean_slope_image.fits', + # 'obs_end_time': datetime.datetime(2022, 8, 3, 1, 33)} + record = self.pixel_table.objects.values('type', 'detector', 'mean_dark_image_file', 'obs_end_time').filter(**bad_filters).order_by("-obs_end_time").first() + self.pixel_data.append(record) class DarkMonitorPlots(): @@ -554,11 +553,12 @@ def get_latest_histogram_data(self): # amplifier values (note that these are strings e.g. '1'), and the # values are tuples of (x, y) lists for idx in most_recent_idx: - self.hist_data[self.db.stats_data[idx].amplifier] = (self.db.stats_data[idx].hist_dark_values, - self.db.stats_data[idx].hist_amplitudes) + idx_int = int(idx) # np.where returns a 64-bit int, but QuerySets must be indexed using an int() + self.hist_data[self.db.stats_data[idx_int].amplifier] = (self.db.stats_data[idx_int].hist_dark_values, + self.db.stats_data[idx_int].hist_amplitudes) # Keep track of the observation date of the most recent entry - self.hist_date = self.db.stats_data[most_recent_idx[0]].obs_mid_time + self.hist_date = self.db.stats_data[int(most_recent_idx[0])].obs_mid_time def get_trending_data(self): """Organize data for the trending plot. Here we need all the data for diff --git a/jwql/website/apps/jwql/monitor_pages/monitor_readnoise_bokeh.py b/jwql/website/apps/jwql/monitor_pages/monitor_readnoise_bokeh.py index d78e64de5..bb98736f4 100644 --- a/jwql/website/apps/jwql/monitor_pages/monitor_readnoise_bokeh.py +++ b/jwql/website/apps/jwql/monitor_pages/monitor_readnoise_bokeh.py @@ -127,9 +127,6 @@ def plot_readnoise_amplifers(self): self.amp_plots = [] for amp in ['1', '2', '3', '4']: - amp_plot = figure(title='Amp {}'.format(amp), width=280, height=280, x_axis_type='datetime') - amp_plot.xaxis[0].ticker.desired_num_ticks = 4 - if self.db.query_results: readnoise_vals = np.array([getattr(result, 'amp{}_mean'.format(amp)) for result in self.db.query_results]) else: @@ -148,13 +145,23 @@ def plot_readnoise_amplifers(self): ngroups=ngroups, readnoise=readnoise_vals)) + min_rn = np.min(readnoise_vals) + max_rn = np.max(readnoise_vals) + delta_rn = max_rn - min_rn + plot_max = max_rn + 0.5 * delta_rn + plot_min = min_rn - 0.5 * delta_rn + circle_radius = 0.01 * (plot_max - plot_min) + + amp_plot = figure(title='Amp {}'.format(amp), width=280, height=280, x_axis_type='datetime', y_range=(plot_min, plot_max)) + amp_plot.xaxis[0].ticker.desired_num_ticks = 4 + amp_plot.add_tools(HoverTool(tooltips=[("file", "@file"), ("time", "@expstarts"), ("nints", "@nints"), ("ngroups", "@ngroups"), ("readnoise", "@readnoise")])) - amp_plot.circle(x='expstarts', y='readnoise', source=source) + amp_plot.circle(x='expstarts', y='readnoise', radius=circle_radius, radius_dimension='y', source=source) amp_plot.xaxis.axis_label = 'Date' amp_plot.yaxis.axis_label = 'Mean Readnoise [DN]' @@ -204,7 +211,8 @@ def plot_readnoise_histogram(self): self.readnoise_histogram.add_tools(HoverTool(tooltips=[("Data (x, y)", "(@x, @y)"), ])) - self.readnoise_histogram.circle(x='x', y='y', source=source) + circle_radius = 0.005 * (hist_xr_end - hist_xr_start) + self.readnoise_histogram.circle(x='x', y='y', radius=circle_radius, radius_dimension='x', source=source) self.readnoise_histogram.xaxis.axis_label = 'Readnoise Difference [DN]' self.readnoise_histogram.yaxis.axis_label = 'Number of Pixels' diff --git a/jwql/website/apps/jwql/monitor_views.py b/jwql/website/apps/jwql/monitor_views.py index fa6f48204..b0943c54a 100644 --- a/jwql/website/apps/jwql/monitor_views.py +++ b/jwql/website/apps/jwql/monitor_views.py @@ -88,7 +88,7 @@ def background_monitor(request): def bad_pixel_monitor(request, inst): - """Generate the dark monitor page for a given instrument + """Generate the bad pixel monitor page for a given instrument Parameters ---------- @@ -342,13 +342,12 @@ def msata_monitoring_ajax(request): JsonResponse object Outgoing response sent to the webpage """ - # retrieve existing monitor html content + # Make plots and extract visualization components monitor = msata_monitor.MSATA() - div, script1, script2 = monitor.read_existing_html() + monitor.plots_for_app() - context = {'script1': script1, - 'script2': script2, - 'div': div} + context = {'script': monitor.script, + 'div': monitor.div} return JsonResponse(context, json_dumps_params={'indent': 2}) @@ -391,12 +390,11 @@ def wata_monitoring_ajax(request): JsonResponse object Outgoing response sent to the webpage """ - # retrieve existing monitor html content + # Make plots and extract visualization components monitor = wata_monitor.WATA() - div, script1, script2 = monitor.read_existing_html() + monitor.plots_for_app() - context = {'script1': script1, - 'script2': script2, - 'div': div} + context = {'script': monitor.script, + 'div': monitor.div} return JsonResponse(context, json_dumps_params={'indent': 2}) diff --git a/jwql/website/apps/jwql/static/css/jwql.css b/jwql/website/apps/jwql/static/css/jwql.css index 26229492a..b8e7b5c89 100644 --- a/jwql/website/apps/jwql/static/css/jwql.css +++ b/jwql/website/apps/jwql/static/css/jwql.css @@ -19,6 +19,12 @@ background-color: #f2f2f2; } +.anomaly_form button { + display: block; /* Makes the button a block element so it takes a new line */ + margin-top: 10px; /* Space between the comment field and the button */ + margin-left: 0; /* Align button to left, full width of parent div */ +} + .anomaly_choice { list-style: none; } @@ -365,6 +371,11 @@ display: inline-block; } + .image_views { + margin-left: 2%; + margin-right: 2%; + } + #loading { text-align:center; margin: 0 auto; @@ -620,6 +631,11 @@ font-size: 0.65rem; } + .wrapped-error-text { + max-width: 650px; /* Set the maximum width */ + word-wrap: break-word; /* Ensure words break if they exceed the width */ +} + /*Format the version identifier text in bottom corner*/ #version-div { float: right; diff --git a/jwql/website/apps/jwql/static/js/jwql.js b/jwql/website/apps/jwql/static/js/jwql.js index df47c63e7..895da4fea 100644 --- a/jwql/website/apps/jwql/static/js/jwql.js +++ b/jwql/website/apps/jwql/static/js/jwql.js @@ -310,13 +310,15 @@ function determine_page_title_obs(instrument, proposal, observation) { } } + /** - * Construct a 4-column table from an input dictionary. The 4 columns - * correspond to: key, value, key, value. - * @dictionary {dict} jsonified dictionary + * Populate a user provided html table from a dictionary + * @param {Object} dictionary - Python style dictionary + * @param {String} table_name - The table name in that will be updated + * @param {Integer} nr_paired_columns - How many paired columns per row. One "paired column" is 2 columns (1 key, 1 value) */ -function make_table_from_dict(dictionary) { - var tableBody = document.getElementById("table-body"); +function make_table_from_dict(dictionary, table_name, nr_paired_columns) { + var tableBody = document.getElementById(table_name); // Extract keys and values from the dictionary var keys = Object.keys(dictionary); var values = Object.values(dictionary); @@ -326,19 +328,19 @@ function make_table_from_dict(dictionary) { var maxLength = keys.length // Populate the table dynamically - for (var i = 0; i < maxLength; i+=2) { + // Create a row with dynamic amount of paired_columns (Key Column with Value Column) + var table_index = 0 + for (var i = 0; i < maxLength; i+=nr_paired_columns) { var row = document.createElement("tr"); - var row = tableBody.insertRow(i/2) - var cell1 = row.insertCell(0) - var cell2 = row.insertCell(1) - var cell3 = row.insertCell(2) - var cell4 = row.insertCell(3) - - cell1.textContent = i < keys.length ? keys[i]+':' : ""; - cell2.textContent = i < keys.length ? values[i] : ""; - cell3.textContent = (i+1) < keys.length ? keys[i+1]+':' : ""; - cell4.textContent = (i+1) < keys.length ? values[i+1] : ""; - + var row = tableBody.insertRow(i/nr_paired_columns) + // Fill cells in as pairs + for (var columnx = 0; columnx < nr_paired_columns*2; columnx+=2){ + var key_cell = row.insertCell(columnx) + var value_cell = row.insertCell(columnx + 1) + key_cell.textContent = i < keys.length ? keys[table_index]+':' : ""; + value_cell.textContent = i < keys.length ? values[table_index] : ""; + table_index++ + } tableBody.appendChild(row); } return tableBody; @@ -1040,8 +1042,7 @@ function update_msata_page(base_url) { // Build div content var content = data["div"]; - content += data["script1"]; - content += data["script2"]; + content += data["script"]; /* Add the content to the div * Note: + + + + + +
+

Don't Panic!

+

{{ error_message }}

+

{{ exception_message }}

+ SpaceCAT + +
+ + + +{% endblock %} \ No newline at end of file diff --git a/jwql/website/apps/jwql/templates/explore_image.html b/jwql/website/apps/jwql/templates/explore_image.html index 62ac88c9b..84ca74bcc 100644 --- a/jwql/website/apps/jwql/templates/explore_image.html +++ b/jwql/website/apps/jwql/templates/explore_image.html @@ -102,9 +102,9 @@

Data Settings

Submit Anomaly
- {% if form.errors %} + {% if anomaly_form.errors %}
- {% for field in form %} + {% for field in anomaly_form %} {% for error in field.errors %}
{{ error|escape }} @@ -118,12 +118,14 @@
Submit Anomaly
{{ csrf_input }} - {% for field in form %} + {% for field in anomaly_form %} {% for subwidget in field.subwidgets %}
  • {{subwidget}}
  • {% endfor %} {% endfor %} - +
    Comments
    + {{ comment_form.comment }} +
    diff --git a/jwql/website/apps/jwql/templates/jwql_query.html b/jwql/website/apps/jwql/templates/jwql_query.html index 098bdd4ae..6f007f967 100644 --- a/jwql/website/apps/jwql/templates/jwql_query.html +++ b/jwql/website/apps/jwql/templates/jwql_query.html @@ -573,8 +573,9 @@

    Dynamic Query Form


    - - + + +
    diff --git a/jwql/website/apps/jwql/templates/query_submit.html b/jwql/website/apps/jwql/templates/query_submit.html index 585e55111..d10162b4f 100644 --- a/jwql/website/apps/jwql/templates/query_submit.html +++ b/jwql/website/apps/jwql/templates/query_submit.html @@ -29,10 +29,12 @@

    Images of Queried Instruments

    -
    -
    - +
    +
    +
    +
    +

    diff --git a/jwql/website/apps/jwql/templates/thumbnails_per_obs.html b/jwql/website/apps/jwql/templates/thumbnails_per_obs.html index 84e0bc540..6a3fe80bf 100644 --- a/jwql/website/apps/jwql/templates/thumbnails_per_obs.html +++ b/jwql/website/apps/jwql/templates/thumbnails_per_obs.html @@ -28,9 +28,12 @@

    {{ inst }} Images

    -
    -
    -
    +
    +
    +
    +
    +
    +

    @@ -88,8 +91,8 @@

    Proposal Information for {{ prop }}

    {% endif %} - + - + {% endblock %} diff --git a/jwql/website/apps/jwql/templates/view_exposure.html b/jwql/website/apps/jwql/templates/view_exposure.html index 7b9d71856..57a8c9398 100644 --- a/jwql/website/apps/jwql/templates/view_exposure.html +++ b/jwql/website/apps/jwql/templates/view_exposure.html @@ -8,7 +8,7 @@ {% block content %} -
    +

    Exposure {{ group_root }}

    @@ -18,7 +18,7 @@

    Exposure {{ group_root }}

    Observation:
    Visit:
    - +
    Visit Status: {{ basic_info.visit_status }}
    @@ -38,6 +38,7 @@

    Exposure {{ group_root }}

    Exp Start: {{ expstart_str }}
    +
    @@ -48,7 +49,7 @@

    Exposure {{ group_root }}

    @@ -216,7 +217,20 @@

    Exposure {{ group_root }}

    -
    +
    +
    Anomalies Within Group
    + + + + + +
    +
    +
    Submit Anomaly for Group
    @@ -245,7 +259,9 @@
    Submit Anomaly for Group
    {% endfor %} {% endfor %} - +
    Exposure Comments
    + {{ exposure_comment_form.exp_comment }} +
    @@ -264,7 +280,7 @@
    Submit Anomaly for Group
    {% elif 'uncal' in suffixes %} {% elif suffixes|length == 1 %} - + {% else %} Unable to show image for: {{suffixes}} {% endif %} diff --git a/jwql/website/apps/jwql/templates/view_image.html b/jwql/website/apps/jwql/templates/view_image.html index 90dd52c87..52ea8dc29 100644 --- a/jwql/website/apps/jwql/templates/view_image.html +++ b/jwql/website/apps/jwql/templates/view_image.html @@ -8,7 +8,7 @@ {% block content %} -
    +

    {{ file_root }}

    @@ -48,7 +48,7 @@

    {{ file_root }}

    @@ -85,7 +85,7 @@

    {{ file_root }}

    + title="Preview image for {{ file_root }}" width=1000rem height=auto>
    @@ -118,9 +118,9 @@

    {{ file_root }}

    Submit Anomaly
    - {% if form.errors %} + {% if anomaly_form.errors %}
    - {% for field in form %} + {% for field in anomaly_form %} {% for error in field.errors %}
    {{ error|escape }} @@ -134,14 +134,16 @@
    Submit Anomaly
    {{ csrf_input }} - {% for field in form %} + {% for field in anomaly_form %}
      {% for subwidget in field.subwidgets %}
    • {{subwidget}}
    • {% endfor %}
    {% endfor %} - +
    Comments
    + {{ comment_form.comment }} +
    diff --git a/jwql/website/apps/jwql/urls.py b/jwql/website/apps/jwql/urls.py index 03b7e8708..27a38296c 100644 --- a/jwql/website/apps/jwql/urls.py +++ b/jwql/website/apps/jwql/urls.py @@ -85,6 +85,8 @@ path('jwqldb/', views.jwqldb_table_viewer, name='jwqldb_table_viewer'), path('log_view/', views.log_view, name='log_view'), path('query_submit/', views.query_submit, name='query_submit'), + path('query_download/', views.query_download, name='query_download'), + re_path(r'^(?P({}))/$'.format(instruments), views.instrument, name='instrument'), re_path(r'^(?P({}))/archive/$'.format(instruments), views.archived_proposals, name='archive'), re_path(r'^(?P({}))/unlooked/$'.format(instruments), views.unlooked_images, name='unlooked'), diff --git a/jwql/website/apps/jwql/views.py b/jwql/website/apps/jwql/views.py index 0ea0281ad..80dafba37 100644 --- a/jwql/website/apps/jwql/views.py +++ b/jwql/website/apps/jwql/views.py @@ -44,47 +44,50 @@ import csv import datetime -import json import glob +import json import logging -import os import operator +import os import socket from astropy.time import Time -from bokeh.layouts import layout from bokeh.embed import components +from bokeh.layouts import layout from django.core.paginator import Paginator from django.http import HttpResponse, JsonResponse from django.shortcuts import redirect, render -import numpy as np from sqlalchemy import inspect from jwql.database.database_interface import load_connection from jwql.utils import monitor_utils +from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE, QUERY_CONFIG_TEMPLATE, URL_DICT, QueryConfigKeys from jwql.utils.interactive_preview_image import InteractivePreviewImg -from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE, URL_DICT, QUERY_CONFIG_TEMPLATE, QueryConfigKeys -from jwql.utils.utils import filename_parser, filesystem_path, get_base_url, get_config -from jwql.utils.utils import get_rootnames_for_instrument_proposal, query_unformat - -from .data_containers import build_table -from .data_containers import get_acknowledgements -from .data_containers import get_additional_exposure_info -from .data_containers import get_available_suffixes -from .data_containers import get_anomaly_form -from .data_containers import get_dashboard_components -from .data_containers import get_edb_components -from .data_containers import get_explorer_extension_names -from .data_containers import get_header_info -from .data_containers import get_image_info -from .data_containers import get_instrument_looks -from .data_containers import get_rootnames_from_query -from .data_containers import random_404_page -from .data_containers import text_scrape -from .data_containers import thumbnails_ajax -from .data_containers import thumbnails_query_ajax -from .forms import JwqlQueryForm -from .forms import FileSearchForm +from jwql.utils.utils import filename_parser, get_base_url, get_config, get_rootnames_for_instrument_proposal, query_unformat + +from .data_containers import ( + build_table, + get_acknowledgements, + get_additional_exposure_info, + get_anomaly_form, + get_available_suffixes, + get_comment_form, + get_exp_comment_form, + get_dashboard_components, + get_edb_components, + get_explorer_extension_names, + get_group_anomalies, + get_header_info, + get_image_info, + get_instrument_looks, + get_rootnames_from_query, + random_404_page, + text_scrape, + thumbnails_ajax, + thumbnails_query_ajax, +) +from .forms import FileSearchForm, JwqlQueryForm + if not os.environ.get("READTHEDOCS"): from .models import RootFileInfo from astropy.io import fits @@ -130,6 +133,7 @@ def jwql_query(request): parameters[QueryConfigKeys.DATE_RANGE] = form.cleaned_data['date_range'] parameters[QueryConfigKeys.PROPOSAL_CATEGORY] = form.cleaned_data['proposal_category'] parameters[QueryConfigKeys.SORT_TYPE] = form.cleaned_data['sort_type'] + parameters[QueryConfigKeys.NUM_PER_PAGE] = form.cleaned_data['num_per_page'] parameters[QueryConfigKeys.ANOMALIES] = all_anomalies parameters[QueryConfigKeys.APERTURES] = all_apers parameters[QueryConfigKeys.FILTERS] = all_filters @@ -142,7 +146,13 @@ def jwql_query(request): # save the query config settings to a session request.session['query_config'] = parameters - return redirect('/query_submit') + # Check if the download button value exists in the POST message (meaning Download was pressed) + download_button_value = request.POST.get('download_jwstqueryform', None) + if(download_button_value): + return redirect('/query_download') + else: + # submit was pressed go to the query_submit page + return redirect('/query_submit') context = {'form': form, 'inst': ''} @@ -771,6 +781,36 @@ def query_submit(request): return render(request, template, context) +def query_download(request): + """Download query results in csv format + + Parameters + ---------- + request : HttpRequest object + Incoming request from the webpage. + + Returns + ------- + response : HttpResponse object + Outgoing response sent to the webpage (csv file to be downloaded) + """ + parameters = request.session.get("query_config", QUERY_CONFIG_TEMPLATE.copy()) + filtered_rootnames = get_rootnames_from_query(parameters) + + today = datetime.datetime.now().strftime('%Y%m%d_%H:%M') + filename = f'jwql_query_{today}.csv' + response = HttpResponse(content_type='text/csv') + response['Content-Disposition'] = f'attachment; filename="{filename}"' + + header_row = ["Index", "Name"] + writer = csv.writer(response) + writer.writerow(header_row) + for index, rootname in enumerate(filtered_rootnames): + writer.writerow([index, rootname]) + + return response + + def unlooked_images(request, inst): """Generate the page listing all unlooked images in the database @@ -878,7 +918,8 @@ def explore_image(request, inst, file_root, filetype): else: raise FileNotFoundError(f'WARNING: {full_fits_file} does not exist!') - form = get_anomaly_form(request, inst, file_root) + anomaly_form = get_anomaly_form(request, inst, file_root) + comment_form = get_comment_form(request, file_root) context = {'inst': inst, 'file_root': file_root, @@ -887,7 +928,8 @@ def explore_image(request, inst, file_root, filetype): 'extension_groups': extension_groups, 'extension_ints': extension_ints, 'base_url': get_base_url(), - 'form': form} + 'anomaly_form': anomaly_form, + 'comment_form': comment_form} return render(request, template, context) @@ -1138,6 +1180,8 @@ def view_exposure(request, inst, group_root): # Get the anomaly submission form form = get_anomaly_form(request, inst, group_root) + group_anomalies = get_group_anomalies(group_root) + exposure_comment_form = get_exp_comment_form(request, group_root) # if we get to this page without any navigation data, # previous/next buttons will be hidden @@ -1171,6 +1215,8 @@ def view_exposure(request, inst, group_root): # Get our current views RootFileInfo model and send our "viewed/new" information root_file_info = RootFileInfo.objects.filter(root_name__startswith=group_root) + if len(root_file_info) == 0: + return generate_error_view(request, inst, f"No groups starting with {group_root} currently in JWQL database.") viewed = all([rf.viewed for rf in root_file_info]) # Convert expstart from MJD to a date @@ -1178,7 +1224,14 @@ def view_exposure(request, inst, group_root): # Create one dict of info to show at the top of the page, and another dict of info # to show in the collapsible text box. - basic_info, additional_info = get_additional_exposure_info(root_file_info, image_info) + try: + basic_info, additional_info = get_additional_exposure_info(root_file_info, image_info) + except FileNotFoundError as e: + return generate_error_view(request, inst, + "Looks like at least one of your files has not yet been ingested into the JWQL database. \ + If this is a newer observation, please wait a few hours and try again. \ + If this observation is over a day old please contact JWQL support.", + exception_message=f"Received Error: '{e}'") # Build the context context = {'base_url': get_base_url(), @@ -1196,7 +1249,9 @@ def view_exposure(request, inst, group_root): 'marked_viewed': viewed, 'expstart_str': expstart_str, 'basic_info': basic_info, - 'additional_info': additional_info} + 'additional_info': additional_info, + 'group_anomalies': group_anomalies, + 'exposure_comment_form': exposure_comment_form} return render(request, template, context) @@ -1240,7 +1295,8 @@ def view_image(request, inst, file_root): 'Please add them, so that they will appear in a ' 'consistent order on the webpage.')) - form = get_anomaly_form(request, inst, file_root) + anomaly_form = get_anomaly_form(request, inst, file_root) + comment_form = get_comment_form(request, file_root) prop_id = file_root[2:7] @@ -1290,10 +1346,35 @@ def view_image(request, inst, file_root): 'num_ints': image_info['num_ints'], 'available_ints': image_info['available_ints'], 'total_ints': image_info['total_ints'], - 'form': form, + 'anomaly_form': anomaly_form, + 'comment_form': comment_form, 'marked_viewed': root_file_info.viewed, 'expstart_str': expstart_str, 'basic_info': basic_info, 'additional_info': additional_info} return render(request, template, context) + + +def generate_error_view(request, inst, error_message, exception_message=""): + """Generate the error view page + + Parameters + ---------- + request : HttpRequest object + Incoming request from the webpage + inst : str + Name of JWST instrument + error_message : str + Custom Error Message to be seen in error_view.html + exception_message: str + if an exception caused this to be generated, pass the exception message along for display + + Returns + ------- + HttpResponse object + Outgoing response sent to the webpage + """ + template = 'error_view.html' + context = {'base_url': get_base_url(), 'inst': inst, 'error_message': error_message, 'exception_message': exception_message} + return render(request, template, context) diff --git a/jwql/website/jwql_proj/settings.py b/jwql/website/jwql_proj/settings.py index a8adf9fa3..6568eceeb 100644 --- a/jwql/website/jwql_proj/settings.py +++ b/jwql/website/jwql_proj/settings.py @@ -38,7 +38,7 @@ SECRET_KEY = get_config()['django_secret_key'] # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True +DEBUG = get_config()['django_debug'] ALLOWED_HOSTS = ['*'] diff --git a/presentations/jwql_presentation.pdf b/presentations/jwql_presentation.pdf deleted file mode 100644 index 99641c67b..000000000 Binary files a/presentations/jwql_presentation.pdf and /dev/null differ diff --git a/presentations/jwql_tips_2019_07.pdf b/presentations/jwql_tips_2019_07.pdf deleted file mode 100644 index 7f83548b5..000000000 Binary files a/presentations/jwql_tips_2019_07.pdf and /dev/null differ diff --git a/presentations/jwql_web_app.pdf b/presentations/jwql_web_app.pdf deleted file mode 100644 index cb059894e..000000000 Binary files a/presentations/jwql_web_app.pdf and /dev/null differ diff --git a/pyproject.toml b/pyproject.toml index b17920154..51a66c40b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,53 +3,65 @@ name = "jwql" description = "The James Webb Space Telescope Quicklook Project" readme = "README.md" authors = [ - { name = "Matthew Bourque" }, - { name = "Lauren Chambers" }, - { name = "Misty Cracraft" }, - { name = "Mike Engesser" }, - { name = "Mees Fix" }, - { name = "Joe Filippazzo" }, - { name = "Bryan Hilbert" }, + { name = "Matthew Bourque" }, + { name = "Lauren Chambers" }, + { name = "Misty Cracraft" }, + { name = "Mike Engesser" }, + { name = "Mees Fix" }, + { name = "Joe Filippazzo" }, + { name = "Bryan Hilbert" }, +] +keywords = [ + "astronomy", + "python", +] +classifiers = [ + "Programming Language :: Python", ] -keywords = ["astronomy", "python"] -classifiers = ["Programming Language :: Python"] dependencies = [ - "asdf", - "astropy", - "astroquery", - "bokeh>=3", - "crds", - "cryptography", - "django", - "inflection", - "jinja2", - "jsonschema", - "jwst", - "jwst_reffiles", - "matplotlib", - "numpy", - "numpydoc", - "pandas", - "psycopg2-binary", - "pysiaf", - "pyvo", - "scipy", - "sqlalchemy", - "stdatamodels", - "wtforms", + "asdf>=3.1.0,<4", + "astropy>=6.0,<7", + "astroquery>=0.4.7,<0.5.0", + "beautifulsoup4>=4.12.3,<5", + "bokeh>=3.4.0,<4", + "celery>=5.3.6,<6", + "crds>=11.17.19,<12", + "django>=5.0.3,<6", + "gunicorn>=22.0.0,<23.0.0", + "inflection>=0.5.1,<0.6", + "jsonschema>=4.21.1,<5", + "jwst>=1.13.4,<2", + "jwst_backgrounds>=1.2.0,<1.3.0", + "jwst_reffiles>=1.0.1", + "matplotlib>=3.8.3,<4", + "numpy>=1.26.4,<2", + "pandas>=2.2.1,<3", + "psycopg2-binary>=2.9.9,<3", + "pysiaf>=0.22.0", + "pyvo>=1.5.1,<2", + "redis>=5.0.3,<6", + "scipy>=1.12.0,<2", + "selenium>=4.18.1,<5", + "sqlalchemy>=2.0.29,<3", + "wtforms>=3.1.2,<4", +] +dynamic = [ + "version", ] -dynamic = ["version"] +requires-python = ">=3.10" [project.optional-dependencies] test = [ - "pytest", - "pytest-cov", - "pytest-mock", + "pytest", + "pytest-cov", + "pytest-mock", ] docs = [ - "sphinx", - "sphinx_rtd_theme", - "stsci_rtd_theme", + "numpydoc", + "sphinx", + "sphinx_rtd_theme", + "stsci_rtd_theme", + "sphinx_automodapi", ] [project.license] @@ -57,7 +69,12 @@ file = "LICENSE" content-type = "text/plain" [build-system] -requires = ["setuptools>=68.0.0", "numpy", "wheel", "setuptools_scm"] +requires = [ + "setuptools>=68.0.0", + "numpy", + "wheel", + "setuptools_scm", +] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -72,4 +89,6 @@ namespaces = false junit_family = "xunit2" [tool.pytest.ini_options] -norecursedirs = ["jwql/website/apps/jwql/static"] \ No newline at end of file +norecursedirs = [ + "jwql/website/apps/jwql/static", +] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 0edf7d9ea..000000000 --- a/requirements.txt +++ /dev/null @@ -1,41 +0,0 @@ -astropy==5.3.3 -astroquery==0.4.6 -bandit==1.7.5 -beautifulsoup4==4.12.2 -bokeh==3.3.0 -celery==5.3.4 -cryptography==41.0.7 -django==4.2.5 -inflection==0.5.1 -ipython==8.16.1 -jinja2==3.1.2 -jsonschema==4.19.1 -jwst==1.12.3 -jwst_backgrounds==1.2.0 -matplotlib==3.8.0 -nodejs==20.8.0 -numpy==1.25.2 -numpydoc==1.5.0 -pandas==2.1.1 -psycopg2-binary==2.9.7 -pysiaf==0.20.0 -pysqlite3==0.5.2 -pytest==7.4.2 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -pyvo==1.4.2 -pyyaml==6.0.1 -redis==5.0.0 -ruff==0.1.6 -scipy==1.11.4 -selenium==4.13.0 -setuptools==68.2.2 -sphinx==7.2.6 -sphinx_rtd_theme==1.3.0 -sqlalchemy==2.0.23 -stdatamodels==1.8.3 -stsci_rtd_theme==1.0.0 -twine==4.0.2 -vine==5.1.0 -wtforms==3.0.1 -git+https://github.com/spacetelescope/jwst_reffiles#egg=jwst_reffiles diff --git a/rtd_requirements.txt b/rtd_requirements.txt deleted file mode 100644 index abac6b128..000000000 --- a/rtd_requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -sphinx_automodapi>=0.15.0 -bokeh==2.4.3 -celery==5.3.4 -cython>=3.0.0 -django==4.2.5 -docutils>=0.18.1 -jwst==1.12.3 -pygments==2.16.1 -pytest==7.4.2 -redis==5.0.0 -selenium==4.13.0 -sphinx==7.2.6 -stsci_rtd_theme==1.0.0 -tomli==2.0.1 -git+https://github.com/spacetelescope/jwst_reffiles diff --git a/style_guide/example.py b/style_guide/example.py index 47db3b3a0..d145529de 100644 --- a/style_guide/example.py +++ b/style_guide/example.py @@ -73,7 +73,7 @@ # Global variables should be avoided, but if used should be named with # all-caps -A_GLOBAL_VARIABLE = 'foo' # type: str +A_GLOBAL_VARIABLE = "foo" # type: str @log_fail @@ -92,14 +92,14 @@ def my_main_function(path: str, filter: str) -> None: The filter to process (e.g. "F606W"). """ - logging.info('Using {} as an input file'.format(path)) + logging.info("Using {} as an input file".format(path)) an_int = 1 # type: int a_float = 3.14 # type: float a_bool = True # type: bool - a_list = ['Dog', 'Cat', 'Turtle', False, 7] # type: List[Union[str, bool, int]] - a_tuple = ('Dog', 'Cat', 'Turtle', False, 7) # type: Tuple[str, str, str, bool, int] - a_dict = {'key1': 'value1', 'key2': 'value2'} # type: Dict[str, str] + a_list = ["Dog", "Cat", "Turtle", False, 7] # type: List[Union[str, bool, int]] + a_tuple = ("Dog", "Cat", "Turtle", False, 7) # type: Tuple[str, str, str, bool, int] + a_dict = {"key1": "value1", "key2": "value2"} # type: Dict[str, str] an_obj = object() # type: object result = some_other_function(an_int, a_float, a_bool, a_list, a_tuple, a_dict, an_obj) # type: Optional[int] @@ -117,21 +117,13 @@ def parse_args() -> argparse.Namespace: """ # Create help strings - path_help = 'The path to the input file.' # type: str + path_help = "The path to the input file." # type: str filter_help = 'The filter to process (e.g. "F606W").' # type: str # Add arguments parser = argparse.ArgumentParser() # type: argparse.ArgumentParser - parser.add_argument('path', - type=str, - default=os.getcwd(), - help=path_help) - parser.add_argument('-f --filter', - dest='filter', - type=str, - required=False, - default='F606W', - help=filter_help) + parser.add_argument("path", type=str, default=os.getcwd(), help=path_help) + parser.add_argument("-f --filter", dest="filter", type=str, required=False, default="F606W", help=filter_help) # Parse args args = parser.parse_args() # type: argparse.Namespace @@ -140,8 +132,7 @@ def parse_args() -> argparse.Namespace: @log_timing -def some_other_function(an_int: int, a_float: float, a_bool: bool, a_list: List[Any], - a_tuple: Tuple[Any], a_dict: Dict[Any, Any], an_obj: object) -> int: +def some_other_function(an_int: int, a_float: float, a_bool: bool, a_list: List[Any], a_tuple: Tuple[Any], a_dict: Dict[Any, Any], an_obj: object) -> int: """This function just does a bunch of nonsense. But it serves as a decent example of some things. @@ -170,8 +161,8 @@ def some_other_function(an_int: int, a_float: float, a_bool: bool, a_list: List[ """ # File I/O should be handeled with 'with open' when possible - with open('my_file', 'w') as f: - f.write('My favorite integer is {}'.format(an_int)) + with open("my_file", "w") as f: + f.write("My favorite integer is {}".format(an_int)) # Operators should be separated by spaces logging.info(a_float + a_float) @@ -179,10 +170,9 @@ def some_other_function(an_int: int, a_float: float, a_bool: bool, a_list: List[ return an_int -if __name__ == '__main__': - +if __name__ == "__main__": # Configure logging - module = os.path.basename(__file__).strip('.py') + module = os.path.basename(__file__).strip(".py") configure_logging(module) args = parse_args() # type: argparse.Namespace diff --git a/style_guide/typing_demo/typing_demo_1.py b/style_guide/typing_demo/typing_demo_1.py index 33c11e397..ad4bc30d0 100644 --- a/style_guide/typing_demo/typing_demo_1.py +++ b/style_guide/typing_demo/typing_demo_1.py @@ -7,8 +7,7 @@ """ import sys -from typing import (List, Set, Dict, Tuple, Union, Optional, Callable, - Iterable, Any) +from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union assert sys.version_info >= (3, 6) # PEP 526 added variable annotations @@ -18,7 +17,7 @@ a_string: str = "jwql" a_list: List[int] = [1] a_set: Set[int] = {1, 2, 3} -a_dict: Dict[str, bool] = {'jwql': True} # Have to specify both keys and values +a_dict: Dict[str, bool] = {"jwql": True} # Have to specify both keys and values # For python versions prior to 3.6, the variable annotation syntax uses comments: # annotated_variable = 1 # type: int @@ -68,6 +67,7 @@ def a_generator() -> Iterable[int]: # Type annotations are stored in __annotations__, either as a local variable # or as an object attribute. + def print_annotations(arg: Any) -> bool: if not hasattr(arg, "__annotations__"): print("Sorry, that argument doesn't have its own __annotations__.") @@ -76,8 +76,7 @@ def print_annotations(arg: Any) -> bool: return bool(arg.__annotations__) -for name in ["an_integer", "a_generic_function", "two_arg_function", - "func_alias", "anon_func", "a_generator"]: +for name in ["an_integer", "a_generic_function", "two_arg_function", "func_alias", "anon_func", "a_generator"]: var = locals()[name] print(f"Annotations for {name}:") if not print_annotations(var):