diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 0000000..acaf4d5 --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,16 @@ +# Changes here will be overwritten by Copier +_commit: v1.4.1 +_src_path: gh:lincc-frameworks/python-project-template +author_email: maxwest@uw.edu +author_name: Maxine West +create_example_module: false +custom_install: true +include_docs: true +include_notebooks: true +mypy_type_checking: none +package_name: koffi +preferred_linter: black +project_license: BSD +project_name: koffi +use_gitlfs: none +use_isort: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..76e043c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,63 @@ + + +## Change Description + +- [ ] My PR includes a link to the issue that I am addressing + + + +## Solution Description + + + + +## Code Quality +- [ ] I have read the Contribution Guide +- [ ] My code follows the code style of this project +- [ ] My code builds (or compiles) cleanly without any errors or warnings +- [ ] My code contains relevant comments and necessary documentation + +## Project-Specific Pull Request Checklists + + +### Bug Fix Checklist +- [ ] My fix includes a new test that breaks as a result of the bug (if possible) +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### New Feature Checklist +- [ ] I have added or updated the docstrings associated with my feature using the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover my new feature +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### Documentation Change Checklist +- [ ] Any updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) + +### Build/CI Change Checklist +- [ ] If required or optional dependencies have changed (including version numbers), I have updated the README to reflect this +- [ ] If this is a new CI setup, I have added the associated badge to the README + + + +### Other Change Checklist +- [ ] Any new or updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html). +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover any changes +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) diff --git a/.github/workflows/build-documentation.yml b/.github/workflows/build-documentation.yml new file mode 100644 index 0000000..e329f57 --- /dev/null +++ b/.github/workflows/build-documentation.yml @@ -0,0 +1,34 @@ +# This workflow will install Python dependencies, build the package and then build the documentation. + +name: Build documentation + + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + if [ -f docs/requirements.txt ]; then pip install -r docs/requirements.txt; fi + pip install . + - name: Install notebook requirements + run: | + sudo apt-get install pandoc + - name: Build docs + run: | + sphinx-build -T -E -b html -d docs/build/doctrees ./docs docs/build/html diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..dfcf56b --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,36 @@ +# This workflow will install Python dependencies, then perform static linting analysis. +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Lint + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Analyze code with linter + + uses: psf/black@stable + with: + src: ./src diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml new file mode 100644 index 0000000..5367eb6 --- /dev/null +++ b/.github/workflows/publish-to-pypi.yml @@ -0,0 +1,39 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/smoke-test.yml b/.github/workflows/smoke-test.yml new file mode 100644 index 0000000..928fdca --- /dev/null +++ b/.github/workflows/smoke-test.yml @@ -0,0 +1,38 @@ +# This workflow will run daily at 06:45. +# It will install Python dependencies and run tests with a variety of Python versions. +# See documentation for help debugging smoke test issues: +# https://lincc-ppt.readthedocs.io/en/latest/practices/ci_testing.html#version-culprit + +name: Unit test smoke test + +on: + schedule: + - cron: 45 6 * * * + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: List dependencies + run: | + pip list + - name: Run unit tests with pytest + run: | + python -m pytest tests diff --git a/.github/workflows/testing-and-coverage.yml b/.github/workflows/testing-and-coverage.yml new file mode 100644 index 0000000..8b5fd56 --- /dev/null +++ b/.github/workflows/testing-and-coverage.yml @@ -0,0 +1,37 @@ +# This workflow will install Python dependencies, run tests and report code coverage with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Unit test and code coverage + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Run unit tests with pytest + run: | + python -m pytest tests --cov=koffi --cov-report=xml + - name: Upload coverage report to codecov + uses: codecov/codecov-action@v3 diff --git a/.gitignore b/.gitignore index a2b96fa..6f294ff 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,143 @@ -.DS_Store -KOFFI.egg-info/ +# Byte-compiled / optimized / DLL files __pycache__/ -koffi_tools/__pycache__/ -tests/__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python build/ +develop-eggs/ dist/ -.pytest_cache +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +_version.py + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ .coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ -*.py[cod] +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +_readthedocs/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# vscode +.vscode/ + +# dask +dask-worker-space/ + +# tmp directory +tmp/ + +# Mac OS +.DS_Store \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d08084b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,110 @@ +repos: + + # Compare the local template version to the latest remote template version + # This hook should always pass. It will print a message if the local version + # is out of date. + - repo: https://github.com/lincc-frameworks/pre-commit-hooks + rev: v0.1.1 + hooks: + - id: check-lincc-frameworks-template-version + name: Check template version + description: Compare current template version against latest + verbose: true + + # Clear output from jupyter notebooks so that only the input cells are committed. + - repo: local + hooks: + - id: jupyter-nb-clear-output + name: Clear output from Jupyter notebooks + description: Clear output from Jupyter notebooks. + files: \.ipynb$ + stages: [commit] + language: system + entry: jupyter nbconvert --clear-output + + # Run unit tests, verify that they pass. Note that coverage is run against + # the ./src directory here because that is what will be committed. In the + # github workflow script, the coverage is run against the installed package + # and uploaded to Codecov by calling pytest like so: + # `python -m pytest --cov= --cov-report=xml` + - repo: local + hooks: + - id: pytest-check + name: Run unit tests + description: Run unit tests with pytest. + entry: bash -c "if python -m pytest --co -qq; then python -m pytest --cov=./src --cov-report=html; fi" + language: system + pass_filenames: false + always_run: true + + # prevents committing directly branches named 'main' and 'master'. + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: no-commit-to-branch + name: Prevent main branch commits + description: Prevent the user from committing directly to the primary branch. + - id: check-added-large-files + name: Check for large files + description: Prevent the user from committing very large files. + args: ['--maxkb=500'] + + # verify that pyproject.toml is well formed + - repo: https://github.com/abravalheri/validate-pyproject + rev: v0.12.1 + hooks: + - id: validate-pyproject + name: Validate pyproject.toml + description: Verify that pyproject.toml adheres to the established schema. + + # Automatically sort the imports used in .py files + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort (python files in src/ and tests/) + description: Sort and organize imports in .py files. + types: [python] + files: ^(src|tests)/ + + + # Analyze the code style and report code that doesn't adhere. + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + types: [python] + files: ^(src|tests)/ + # It is recommended to specify the latest version of Python + # supported by your project here, or alternatively use + # pre-commit's default_language_version, see + # https://pre-commit.com/#top_level-default_language_version + language_version: python3.10 + + + # Make sure Sphinx can build the documentation while explicitly omitting + # notebooks from the docs, so users don't have to wait through the execution + # of each notebook or each commit. By default, these will be checked in the + # GitHub workflows. + - repo: local + hooks: + - id: sphinx-build + name: Build documentation with Sphinx + entry: sphinx-build + language: system + always_run: true + exclude_types: [file, symlink] + args: + [ + "-M", # Run sphinx in make mode, so we can use -D flag later + # Note: -M requires next 3 args to be builder, source, output + "html", # Specify builder + "./docs", # Source directory of documents + "./_readthedocs", # Output directory for rendered documents + "-T", # Show full trace back on exception + "-E", # Don't use saved env; always read all files + "-d", # Flag for cached environment and doctrees + "./docs/_build/doctrees", # Directory + "-D", # Flag to override settings in conf.py + "exclude_patterns=notebooks/*", # Exclude our notebooks from pre-commit + ] diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..79bfc27 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,22 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt + - method: pip + path: . diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..a5622f1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,31 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -T -E -d _build/doctrees -D language=en +EXCLUDENB ?= -D exclude_patterns="notebooks/*","_build","**.ipynb_checkpoints" +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = ../_readthedocs/ + +.PHONY: help clean Makefile no-nb no-notebooks + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Build all Sphinx docs locally, except the notebooks +no-nb no-notebooks: + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(EXCLUDENB) $(O) + +# Cleans up files generated by the build process +clean: + rm -r "_build/doctrees" + rm -r "$(BUILDDIR)" + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..0a2cf2a --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,47 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + + +import os +import sys + +import autoapi +from importlib.metadata import version + +# Define path to the code to be documented **relative to where conf.py (this file) is kept** +sys.path.insert(0, os.path.abspath('../src/')) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "koffi" +copyright = "2023, Maxine West" +author = "Maxine West" +release = version("koffi") +# for example take major/minor +version = ".".join(release.split(".")[:2]) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx.ext.mathjax", "sphinx.ext.napoleon", "sphinx.ext.viewcode"] + +extensions.append("autoapi.extension") +extensions.append("nbsphinx") + +templates_path = [] +exclude_patterns = ['_build', '**.ipynb_checkpoints'] + +master_doc = "index" # This assumes that sphinx-build is called from the root directory +html_show_sourcelink = False # Remove 'view source code' from top of page (for html, not python) +add_module_names = False # Remove namespaces from class/method signatures + +autoapi_type = "python" +autoapi_dirs = ["../src"] +autoapi_ignore = ["*/__main__.py", "*/_version.py"] +autoapi_add_toc_tree_entry = False +autoapi_member_order = "bysource" + +html_theme = "sphinx_rtd_theme" diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..8104343 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,13 @@ +.. koffi documentation main file. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to koffi's documentation! +======================================================================================== + +.. toctree:: + :hidden: + + Home page + API Reference + Notebooks diff --git a/docs/notebooks.rst b/docs/notebooks.rst new file mode 100644 index 0000000..7f7e544 --- /dev/null +++ b/docs/notebooks.rst @@ -0,0 +1,6 @@ +Notebooks +======================================================================================== + +.. toctree:: + + Introducing Jupyter Notebooks diff --git a/docs/notebooks/README.md b/docs/notebooks/README.md new file mode 100644 index 0000000..a521ae1 --- /dev/null +++ b/docs/notebooks/README.md @@ -0,0 +1 @@ +Put your Jupyter notebooks here :) diff --git a/docs/notebooks/intro_notebook.ipynb b/docs/notebooks/intro_notebook.ipynb new file mode 100644 index 0000000..2e7779f --- /dev/null +++ b/docs/notebooks/intro_notebook.ipynb @@ -0,0 +1,111 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "accepting-editor", + "metadata": { + "cell_marker": "\"\"\"" + }, + "source": [ + "# Introducing Jupyter Notebooks\n", + "\n", + "_(The example used here is JamesALeedham's notebook: [intro.ipynb](https://github.com/JamesALeedham/Sphinx-Autosummary-Recursion/blob/master/docs/notebooks/intro.ipynb))_\n", + "\n", + "First, set up the environment:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "actual-thirty", + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib\n", + "import matplotlib.pyplot as pl\n", + "import numpy as np\n", + "\n", + "try:\n", + " from IPython import get_ipython\n", + " get_ipython().run_line_magic('matplotlib', 'inline')\n", + "except AttributeError:\n", + " print('Magic function can only be used in IPython environment')\n", + " matplotlib.use('Agg')\n", + "\n", + "pl.rcParams[\"figure.figsize\"] = [15, 8]" + ] + }, + { + "cell_type": "markdown", + "id": "coral-upper", + "metadata": { + "cell_marker": "\"\"\"", + "lines_to_next_cell": 1 + }, + "source": [ + "Then, define a function that creates a pretty graph:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "funded-protection", + "metadata": { + "lines_to_next_cell": 1 + }, + "outputs": [], + "source": [ + "def SineAndCosineWaves():\n", + " # Get a large number of X values for a nice smooth curve. Using Pi as np.sin requires radians...\n", + " x = np.linspace(0, 2 * np.pi, 180)\n", + " # Convert radians to degrees to make for a meaningful X axis (1 radian = 57.29* degrees)\n", + " xdeg = 57.29577951308232 * np.array(x)\n", + " # Calculate the sine of each value of X\n", + " y = np.sin(x)\n", + " # Calculate the cosine of each value of X\n", + " z = np.cos(x)\n", + " # Plot the sine wave in blue, using degrees rather than radians on the X axis\n", + " pl.plot(xdeg, y, color='blue', label='Sine wave')\n", + " # Plot the cos wave in green, using degrees rather than radians on the X axis\n", + " pl.plot(xdeg, z, color='green', label='Cosine wave')\n", + " pl.xlabel(\"Degrees\")\n", + " # More sensible X axis values\n", + " pl.xticks(np.arange(0, 361, 45))\n", + " pl.legend()\n", + " pl.show()" + ] + }, + { + "cell_type": "markdown", + "id": "thorough-cutting", + "metadata": { + "cell_marker": "\"\"\"" + }, + "source": [ + "Finally, call that function to display the graph:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "imported-uruguay", + "metadata": {}, + "outputs": [], + "source": [ + "SineAndCosineWaves()" + ] + } + ], + "metadata": { + "jupytext": { + "cell_markers": "\"\"\"" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..5c0f7d8 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,9 @@ +sphinx==6.1.3 +sphinx-rtd-theme==1.2.0 +sphinx-autoapi==2.0.1 +nbsphinx +ipython +jupytext +jupyter +matplotlib +numpy diff --git a/pyproject.toml b/pyproject.toml index 154e283..f0dc46a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,28 +1,65 @@ -[build-system] -requires = ["setuptools", "setuptools-scm"] -build-backend = "setuptools.build_meta" - [project] -name = "KOFFI" -description = "Known Objects From FITS Indices" -version = "0.1.1" +name = "koffi" +license = {file = "LICENSE"} +readme = "README.md" authors = [ - {name = "Max West", email = "maxwest@uw.edu"} + { name = "Maxine West", email = "maxwest@uw.edu" } +] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: BSD License", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Programming Language :: Python", ] -requires-python = ">=3.7" +dynamic = ["version"] dependencies = [ + "ipykernel", # Support for Jupyter notebooks "astropy", "astroquery", "dask", "tqdm" ] -license = {text = "BSD 3-Clause License"} -# formatting configuration as specified at developer.lsst.io +# On a mac, install optional dependencies with `pip install '.[dev]'` (include the single quotes) +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-cov", # Used to report total code coverage + "pre-commit", # Used to run checks before finalizing a git commit + "sphinx==6.1.3", # Used to automatically generate documentation + "sphinx-rtd-theme==1.2.0", # Used to render documentation + "sphinx-autoapi==2.0.1", # Used to automatically generate api documentation + "black", # Used for static linting of files + # if you add dependencies here while experimenting in a notebook and you + # want that notebook to render in your documentation, please add the + # dependencies to ./docs/requirements.txt as well. + "nbconvert", # Needed for pre-commit check to clear output from Python notebooks + "nbsphinx", # Used to integrate Python notebooks into Sphinx documentation + "ipython", # Also used in building notebooks into Sphinx + "matplotlib", # Used in sample notebook intro_notebook.ipynb + "numpy", # Used in sample notebook intro_notebook.ipynb +] + +[build-system] +requires = [ + "setuptools>=62", # Used to build and package the Python project + "setuptools_scm>=6.2", # Gets release version from git. Makes it available programmatically +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +write_to = "src/koffi/_version.py" + +[tool.pytest.ini_options] +testpaths = [ + "tests", +] + [tool.black] line-length = 110 -target-version = ["py38"] [tool.isort] profile = "black" -line_length = 110 \ No newline at end of file +line_length = 110 diff --git a/src/koffi/koffi.py b/src/koffi/koffi.py index d448021..0e615ac 100644 --- a/src/koffi/koffi.py +++ b/src/koffi/koffi.py @@ -30,6 +30,7 @@ from koffi_tools.image_metadata import * from koffi_tools.potential_source import * + def skybot_search_frame(image): """ Gets all known objects within the frame of a single FITS image from SkyBoT. @@ -222,7 +223,7 @@ def jpl_search_frame(image): feed = url.read().decode("utf-8") results = json.loads(feed) - if 'warning' in results.keys() and results['warning'] == "no matching records": + if "warning" in results.keys() and results["warning"] == "no matching records": return [] num_results = results["n_second_pass"] diff --git a/src/koffi/script.py b/src/koffi/script.py index 84fadf2..29b54ce 100644 --- a/src/koffi/script.py +++ b/src/koffi/script.py @@ -1,9 +1,11 @@ -from .koffi import ImageMetadata, skybot_search_frame, jpl_search_frame -from astropy.table import QTable -from astropy import units as u import argparse import sys +from astropy import units as u +from astropy.table import QTable + +from .koffi import ImageMetadata, jpl_search_frame, skybot_search_frame + def main(): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) @@ -35,7 +37,7 @@ def main(): ys.append(y) table = QTable( - data=[names, ras, decs, xs, ys], + data=[names, ras, decs, xs, ys], names=["Object Name", "RA", "Dec", "x", "y"], ) if args.format == "QTable": diff --git a/src/koffi_tools/__init__.py b/src/koffi_tools/__init__.py index bdd55b3..be4325e 100644 --- a/src/koffi_tools/__init__.py +++ b/src/koffi_tools/__init__.py @@ -1,2 +1,2 @@ from .image_metadata import * -from .potential_source import * \ No newline at end of file +from .potential_source import * diff --git a/src/koffi_tools/image_metadata.py b/src/koffi_tools/image_metadata.py index 3df9442..613e14e 100644 --- a/src/koffi_tools/image_metadata.py +++ b/src/koffi_tools/image_metadata.py @@ -59,7 +59,10 @@ def populate_from_fits_file(self, filename, mjd_key="MJD_OBS", mjd_val=None): if "DATE-AVG" in hdu_list[0].header: self.set_epoch(Time(hdu_list[0].header["DATE-AVG"], format="isot")) elif "DATE-OBS" in hdu_list[0].header and "EXPTIME" in hdu_list[0].header: - self.set_epoch(Time(hdu_list[0].header["DATE-OBS"], format="isot") + TimeDelta(hdu_list[0].header["EXPTIME"], format="sec")/2) + self.set_epoch( + Time(hdu_list[0].header["DATE-OBS"], format="isot") + + TimeDelta(hdu_list[0].header["EXPTIME"], format="sec") / 2 + ) elif self.get_header_element(mjd_key) is not None: self.set_epoch(Time(self.get_header_element(mjd_key), format="mjd")) @@ -67,10 +70,10 @@ def populate_from_fits_file(self, filename, mjd_key="MJD_OBS", mjd_val=None): # Since this doesn't seem to be standardized, we try some # documented versions. observat = self.get_header_element("OBSERVAT") - observatories = MPC.get_observatory_codes() # get list of MPC-defined observatory codes + observatories = MPC.get_observatory_codes() # get list of MPC-defined observatory codes obs_lat = self.get_header_element("OBS-LAT") lat_obs = self.get_header_element("LAT-OBS") - if observat is not None and observat in observatories['Code']: + if observat is not None and observat in observatories["Code"]: self.obs_code = observat self.obs_loc_set = True elif obs_lat is not None: diff --git a/tests/koffi/conftest.py b/tests/koffi/conftest.py new file mode 100644 index 0000000..e69de29