Skip to content

Commit

Permalink
Merge branch 'main' into script/preview
Browse files Browse the repository at this point in the history
  • Loading branch information
zacharyburnett authored Oct 31, 2023
2 parents 0d6f0d6 + 67f2c8f commit 2ad995d
Show file tree
Hide file tree
Showing 10 changed files with 124 additions and 94 deletions.
98 changes: 39 additions & 59 deletions .github/workflows/data.yml
Original file line number Diff line number Diff line change
@@ -1,88 +1,70 @@
name: check and update webbpsf and crds cache

on:
workflow_call:
outputs:
crds_context:
value: ${{ jobs.data.outputs.crds_context }}
crds_path:
value: ${{ jobs.data.outputs.crds_path }}
crds_server:
value: ${{ jobs.data.outputs.crds_server }}
webbpsf_hash:
value: ${{ jobs.data.outputs.webbpsf_hash }}
path:
value: ${{ jobs.path.outputs.path }}
webbpsf_path:
value: ${{ jobs.data.outputs.webbpsf_path }}
value: ${{ jobs.webbpsf_path.outputs.path }}
webbpsf_hash:
value: ${{ jobs.webbpsf_data.outputs.hash }}
workflow_dispatch:
schedule:
- cron: "42 4 * * 3"

env:
DATA_PATH: /tmp/data

jobs:
webbpsf-data:
path:
runs-on: ubuntu-latest
outputs:
path: ${{ steps.path.outputs.path }}
steps:
- id: path
run: echo "path=${{ env.DATA_PATH }}" >> $GITHUB_OUTPUT
webbpsf_path:
needs: [ path ]
runs-on: ubuntu-latest
outputs:
path: ${{ steps.path.outputs.path }}
steps:
- id: path
run: echo "path=${{ env.DATA_PATH }}/webbpsf-data" >> $GITHUB_OUTPUT
webbpsf_data:
if: (github.repository == 'spacetelescope/romancal' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || contains(github.event.pull_request.labels.*.name, 'update webbpsf data')))
name: fetch, check, and possibly update webbpsf data cache
needs: [ webbpsf_path ]
name: download and cache WebbPSF data
runs-on: ubuntu-latest
env:
DATA_PATH: /tmp/data
WEBBPSF_DATA_URL: https://stsci.box.com/shared/static/qxpiaxsjwo15ml6m4pkhtk36c9jgj70k.gz
outputs:
path: ${{ steps.cache_path.outputs.path }}
hash: ${{ steps.data_hash.outputs.hash }}
steps:
- id: cache_path
run: |
echo "path=${{ env.DATA_PATH }}" >> $GITHUB_OUTPUT
- run: mkdir -p tmp/data
- run: wget ${{ env.WEBBPSF_DATA_URL }} -O tmp/webbpsf-data.tar.gz
- id: data_hash
run: |
mkdir -p tmp/data
wget ${{ env.WEBBPSF_DATA_URL }} -O tmp/webbpsf-data.tar.gz
echo "hash=$( shasum tmp/webbpsf-data.tar.gz | cut -d ' ' -f 1 )" >> $GITHUB_OUTPUT
run: echo "hash=$( shasum tmp/webbpsf-data.tar.gz | cut -d ' ' -f 1 )" >> $GITHUB_OUTPUT
- id: cache_check
uses: actions/cache@v3
with:
path: ${{ steps.cache_path.outputs.path }}
path: ${{ needs.webbpsf_path.outputs.path }}
key: webbpsf-${{ steps.data_hash.outputs.hash }}
- if: ${{ steps.cache_check.outputs.cache-hit != 'true' }}
name: Initialize cache
run: |
mkdir -p ${{ steps.cache_path.outputs.path }}
tar -xzvf tmp/webbpsf-data.tar.gz -C ${{ steps.cache_path.outputs.path }}
data:
needs:
[webbpsf-data]
run: mkdir -p ${{ env.DATA_PATH }}
- if: ${{ steps.cache_check.outputs.cache-hit != 'true' }}
run: tar -xzvf tmp/webbpsf-data.tar.gz -C ${{ env.DATA_PATH }}
webbpsf_hash:
needs: [ webbpsf_path, webbpsf_data ]
# run data job if webbpsf-data succeeds or is skipped. This allows
# this data job to always fetch the crds context even if the webbpsf data fetching
# was skipped (and an existing cache will be used for the webbpsf data).
if: always() && (needs.webbpsf-data.result == 'success' || needs.webbpsf-data.result == 'skipped')
name: retrieve current CRDS context, and WebbPSF data
if: always() && (needs.webbpsf_data.result == 'success' || needs.webbpsf_data.result == 'skipped')
name: retrieve latest data cache key
runs-on: ubuntu-latest
env:
OBSERVATORY: roman
CRDS_SERVER_URL: https://roman-crds.stsci.edu
CRDS_PATH: /tmp/data
GH_TOKEN: ${{ github.token }}
outputs:
crds_context: ${{ steps.crds_context.outputs.pmap }}
crds_path: ${{ steps.crds_path.outputs.path }}
crds_server: ${{ steps.crds_server.outputs.url }}
webbpsf_hash: ${{ steps.webbpsf_hash.outputs.hash }}
webbpsf_path: ${{ steps.webbpsf_path.outputs.path }}
hash: ${{ steps.hash.outputs.hash }}
steps:
# crds:
- id: crds_context
run: >
echo "pmap=$(
curl -s -X POST -d '{"jsonrpc": "1.0", "method": "get_default_context", "params": ["${{ env.OBSERVATORY }}"], "id": 1}' ${{ env.CRDS_SERVER_URL }}/json/ |
python -c "import sys, json; print(json.load(sys.stdin)['result'])"
)" >> $GITHUB_OUTPUT
# Get default CRDS_CONTEXT without installing crds client
# See https://hst-crds.stsci.edu/static/users_guide/web_services.html#generic-request
- id: crds_path
run: echo "path=${{ env.CRDS_PATH }}" >> $GITHUB_OUTPUT
- id: crds_server
run: echo "url=${{ env.CRDS_SERVER_URL }}" >> $GITHUB_OUTPUT
# webbpsf:
- id: webbpsf_hash
- id: hash
run: |
# use actions/gh-actions-cache to allow filtering by key
gh extension install actions/gh-actions-cache
Expand All @@ -93,5 +75,3 @@ jobs:
echo "HASH=$HASH"
echo "hash=$HASH" >> $GITHUB_OUTPUT
if [ "$HASH" == '' ]; then exit 1; fi
- id: webbpsf_path
run: echo "path=${{ steps.crds_path.outputs.path }}/webbpsf-data" >> $GITHUB_OUTPUT
17 changes: 9 additions & 8 deletions .github/workflows/roman_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,31 +20,32 @@ concurrency:
cancel-in-progress: true

jobs:
data:
uses: ./.github/workflows/data.yml
check:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
with:
envs: |
- linux: check-dependencies
- linux: build-dist
data:
uses: ./.github/workflows/data.yml
crds_contexts:
uses: spacetelescope/crds/.github/workflows/contexts.yml@master
test:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
needs: [ data ]
needs: [ data, crds_contexts ]
with:
setenv: |
WEBBPSF_PATH: ${{ needs.data.outputs.webbpsf_path }}
CRDS_PATH: ${{ needs.data.outputs.crds_path }}
CRDS_SERVER_URL: ${{ needs.data.outputs.crds_server }}
CRDS_PATH: ${{ needs.data.outputs.path }}/crds_cache
CRDS_SERVER_URL: https://roman-crds.stsci.edu
CRDS_CLIENT_RETRY_COUNT: 3
CRDS_CLIENT_RETRY_DELAY_SECONDS: 20
DD_SERVICE: romancal
DD_ENV: ci
DD_GIT_REPOSITORY_URL: ${{ github.repositoryUrl }}
DD_GIT_COMMIT_SHA: ${{ github.sha }}
DD_GIT_BRANCH: ${{ github.ref_name }}
cache-path: ${{ needs.data.outputs.crds_path }}
cache-key: data-${{ needs.data.outputs.webbpsf_hash }}-${{ needs.data.outputs.crds_context }}
cache-path: ${{ needs.data.outputs.path }}
cache-key: data-${{ needs.data.outputs.webbpsf_hash }}-${{ needs.crds_contexts.outputs.roman }}
cache-restore-keys: webbpsf-${{ needs.data.outputs.webbpsf_hash }}
envs: |
- linux: py39-oldestdeps-webbpsf-cov
Expand Down
14 changes: 8 additions & 6 deletions .github/workflows/roman_ci_cron.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,20 @@ jobs:
data:
if: (github.repository == 'spacetelescope/romancal' && (github.event_name == 'schedule' || github.event_name == 'push' || github.event_name == 'workflow_dispatch' || contains(github.event.pull_request.labels.*.name, 'run scheduled tests')))
uses: ./.github/workflows/data.yml
crds_contexts:
uses: spacetelescope/crds/.github/workflows/contexts.yml@master
test:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main
needs: [ data ]
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
needs: [ data, crds_contexts ]
with:
setenv: |
WEBBPSF_PATH: ${{ needs.data.outputs.webbpsf_path }}
CRDS_PATH: ${{ needs.data.outputs.crds_path }}
CRDS_SERVER_URL: ${{ needs.data.outputs.crds_server }}
CRDS_PATH: ${{ needs.data.outputs.path }}/crds_cache
CRDS_SERVER_URL: https://roman-crds.stsci.edu
CRDS_CLIENT_RETRY_COUNT: 3
CRDS_CLIENT_RETRY_DELAY_SECONDS: 20
cache-path: ${{ needs.data.outputs.crds_path }}
cache-key: data-${{ needs.data.outputs.webbpsf_hash }}-${{ needs.data.outputs.crds_context }}
cache-path: ${{ needs.data.outputs.path }}
cache-key: data-${{ needs.data.outputs.webbpsf_hash }}-${{ needs.crds_contexts.outputs.roman }}
cache-restore-keys: webbpsf-${{ needs.data.outputs.webbpsf_hash }}
envs: |
- macos: py39-webbpsf
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ repos:
args: ["--py38-plus"]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.1.1'
rev: 'v0.1.3'
hooks:
- id: ruff
args: ["--fix"]
Expand All @@ -43,7 +43,7 @@ repos:
- id: isort

- repo: https://github.com/psf/black
rev: 23.10.0
rev: 23.10.1
hooks:
- id: black

Expand Down
3 changes: 2 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ dark
general
-------

- Update pipeline code to run through tweakreg with single files and associations [#960]

- Update regression tests with new data and update ramp fitting tests to use ols_cas22 [#911]

- Fix bug with ``ModelContainer.get_crds_parameters`` being a property not a method [#846]
Expand Down Expand Up @@ -42,7 +44,6 @@ ramp_fitting

- Make uneven ramp fitting the default [#877]


- Update Ramp fitting code to support the ``stcal`` changes to the ramp fitting
interface which were necessary to support jump detection on uneven ramps [#933]

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ sdp = [

[project.entry-points]
'stpipe.steps' = { romancal = 'romancal.stpipe.integration:get_steps' }
pytest11 = {webbpsf = 'pytest_plugin.webbpsf_plugin'}

[project.scripts]
roman_static_preview = 'romancal.scripts.preview:command'
Expand Down
Empty file added pytest_plugin/__init__.py
Empty file.
File renamed without changes.
50 changes: 34 additions & 16 deletions romancal/pipeline/exposure_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@

# step imports
from romancal.assign_wcs import AssignWcsStep
from romancal.associations.exceptions import AssociationNotValidError
from romancal.associations.load_as_asn import LoadAsLevel2Asn

# from romancal.associations.exceptions import AssociationNotValidError
# from romancal.associations.load_as_asn import LoadAsLevel2Asn
from romancal.dark_current import DarkCurrentStep
from romancal.datamodels import ModelContainer
from romancal.dq_init import dq_init_step
from romancal.flatfield import FlatFieldStep
from romancal.jump import jump_step
Expand All @@ -23,6 +25,7 @@
from romancal.refpix import RefPixStep
from romancal.saturation import SaturationStep
from romancal.source_detection import SourceDetectionStep
from romancal.tweakreg import TweakRegStep

from ..stpipe import RomanPipeline

Expand Down Expand Up @@ -62,6 +65,7 @@ class ExposurePipeline(RomanPipeline):
"flatfield": FlatFieldStep,
"photom": PhotomStep,
"source_detection": SourceDetectionStep,
"tweakreg": TweakRegStep,
}

# start the actual processing
Expand All @@ -85,22 +89,21 @@ def process(self, input):
return

if file_type == "asn":
try:
asn = LoadAsLevel2Asn.load(input, basename=self.output_file)
except AssociationNotValidError:
log.debug("Error opening file:")
return
asn = ModelContainer.read_asn(input)

# Build a list of observations to process
expos_file = []
n_members = 0
if file_type == "asdf":
expos_file = [input]
elif file_type == "asn":
for product in asn["products"]:
n_members = len(product["members"])
for member in product["members"]:
expos_file.append(member["expname"])

results = []
tweakreg_input = ModelContainer()
for in_file in expos_file:
if isinstance(in_file, str):
input_filename = basename(in_file)
Expand All @@ -117,7 +120,6 @@ def process(self, input):
if input_filename:
result.meta.filename = input_filename
result = self.saturation(result)
# pdb.set_trace()

# Test for fully saturated data
if is_fully_saturated(result):
Expand Down Expand Up @@ -149,29 +151,45 @@ def process(self, input):
result = self.dark_current(result)
result = self.jump(result)
result = self.rampfit(result)

result = self.assign_wcs(result)
if result.meta.exposure.type == "WFI_IMAGE":
result = self.flatfield(result)
else:
log.info("Flat Field step is being SKIPPED")
result.meta.cal_step.flat_field = "SKIPPED"

if result.meta.exposure.type == "WFI_IMAGE":
result = self.flatfield(result)
result = self.photom(result)
result = self.source_detection(result)
if file_type == "asn":
tweakreg_input.append(result)
log.info(
f"Number of models to tweakreg: {len(tweakreg_input._models), n_members}"
)
else:
log.info("Photom and source detection steps are being SKIPPED")
log.info("Flat Field step is being SKIPPED")
log.info("Photom step is being SKIPPED")
log.info("Source Detection step is being SKIPPED")
log.info("Tweakreg step is being SKIPPED")
result.meta.cal_step.flat_field = "SKIPPED"
result.meta.cal_step.photom = "SKIPPED"
result.meta.cal_step.source_detection = "SKIPPED"
result.meta.cal_step.tweakreg = "SKIPPED"

# setup output_file for saving
self.setup_output(result)
log.info("Roman exposure calibration pipeline ending...")

self.output_use_model = True
results.append(result)

# Now that all the exposures are collated, run tweakreg
# Note: this does not cover the case where the asn mixes imaging and spectral
# observations. This should not occur on-prem
if result.meta.exposure.type == "WFI_IMAGE":
if file_type == "asdf":
mc_result = self.tweakreg([result])
result = mc_result._models.pop()
if file_type == "asn":
result = self.tweakreg(tweakreg_input)

log.info("Roman exposure calibration pipeline ending...")

return results

def setup_output(self, input):
Expand Down
Loading

0 comments on commit 2ad995d

Please sign in to comment.