diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 0000000..7999658 --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,24 @@ +# Changes here will be overwritten by Copier +_commit: v2.0.2 +_src_path: gh:lincc-frameworks/python-project-template +author_email: awoldag@uw.edu +author_name: LINCC Frameworks +create_example_module: false +custom_install: true +enforce_style: +- ruff_lint +- ruff_format +failure_notification: [] +include_benchmarks: true +include_docs: true +include_notebooks: true +mypy_type_checking: none +package_name: kbmod_ml +project_license: MIT +project_name: kbmod_ml +project_organization: dirac-institute +python_versions: +- '3.9' +- '3.10' +- '3.11' +- '3.12' diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000..b1a286b --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..343a755 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,24 @@ +# For explanation of this file and uses see +# https://git-scm.com/docs/gitattributes +# https://developer.lsst.io/git/git-lfs.html#using-git-lfs-enabled-repositories +# https://lincc-ppt.readthedocs.io/en/latest/practices/git-lfs.html +# +# Used by https://github.com/lsst/afwdata.git +# *.boost filter=lfs diff=lfs merge=lfs -text +# *.dat filter=lfs diff=lfs merge=lfs -text +# *.fits filter=lfs diff=lfs merge=lfs -text +# *.gz filter=lfs diff=lfs merge=lfs -text +# +# apache parquet files +# *.parq filter=lfs diff=lfs merge=lfs -text +# +# sqlite files +# *.sqlite3 filter=lfs diff=lfs merge=lfs -text +# +# gzip files +# *.gz filter=lfs diff=lfs merge=lfs -text +# +# png image files +# *.png filter=lfs diff=lfs merge=lfs -text + +.git_archival.txt export-subst \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/0-general_issue.md b/.github/ISSUE_TEMPLATE/0-general_issue.md new file mode 100644 index 0000000..84bb0d7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/0-general_issue.md @@ -0,0 +1,8 @@ +--- +name: General issue +about: Quickly create a general issue +title: '' +labels: '' +assignees: '' + +--- \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/1-bug_report.md b/.github/ISSUE_TEMPLATE/1-bug_report.md new file mode 100644 index 0000000..16b6b71 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1-bug_report.md @@ -0,0 +1,17 @@ +--- +name: Bug report +about: Tell us about a problem to fix +title: 'Short description' +labels: 'bug' +assignees: '' + +--- +**Bug report** + + +**Before submitting** +Please check the following: + +- [ ] I have described the situation in which the bug arose, including what code was executed, information about my environment, and any applicable data others will need to reproduce the problem. +- [ ] I have included available evidence of the unexpected behavior (including error messages, screenshots, and/or plots) as well as a description of what I expected instead. +- [ ] If I have a solution in mind, I have provided an explanation and/or pseudocode and/or task list. diff --git a/.github/ISSUE_TEMPLATE/2-feature_request.md b/.github/ISSUE_TEMPLATE/2-feature_request.md new file mode 100644 index 0000000..908ff72 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2-feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: 'Short description' +labels: 'enhancement' +assignees: '' + +--- + +**Feature request** + + +**Before submitting** +Please check the following: + +- [ ] I have described the purpose of the suggested change, specifying what I need the enhancement to accomplish, i.e. what problem it solves. +- [ ] I have included any relevant links, screenshots, environment information, and data relevant to implementing the requested feature, as well as pseudocode for how I want to access the new functionality. +- [ ] If I have ideas for how the new feature could be implemented, I have provided explanations and/or pseudocode and/or task lists for the steps. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..3b5ca19 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "monthly" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..76e043c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,63 @@ + + +## Change Description + +- [ ] My PR includes a link to the issue that I am addressing + + + +## Solution Description + + + + +## Code Quality +- [ ] I have read the Contribution Guide +- [ ] My code follows the code style of this project +- [ ] My code builds (or compiles) cleanly without any errors or warnings +- [ ] My code contains relevant comments and necessary documentation + +## Project-Specific Pull Request Checklists + + +### Bug Fix Checklist +- [ ] My fix includes a new test that breaks as a result of the bug (if possible) +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### New Feature Checklist +- [ ] I have added or updated the docstrings associated with my feature using the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover my new feature +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### Documentation Change Checklist +- [ ] Any updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) + +### Build/CI Change Checklist +- [ ] If required or optional dependencies have changed (including version numbers), I have updated the README to reflect this +- [ ] If this is a new CI setup, I have added the associated badge to the README + + + +### Other Change Checklist +- [ ] Any new or updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html). +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover any changes +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) diff --git a/.github/workflows/asv-main.yml b/.github/workflows/asv-main.yml new file mode 100644 index 0000000..f6a6f29 --- /dev/null +++ b/.github/workflows/asv-main.yml @@ -0,0 +1,101 @@ +# This workflow will run benchmarks with airspeed velocity (asv), +# store the new results in the "benchmarks" branch and publish them +# to a dashboard on GH Pages. + +name: Run ASV benchmarks for main + +on: + push: + branches: [ main ] + +env: + PYTHON_VERSION: "3.10" + WORKING_DIR: ${{ github.workspace }}/benchmarks + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + setup-python: + runs-on: ubuntu-latest + + steps: + - name: Cache Python ${{ env.PYTHON_VERSION }} + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: python-${{ env.PYTHON_VERSION }} + + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + asv-main: + runs-on: ubuntu-latest + needs: setup-python + + permissions: + contents: write + + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + + steps: + - name: Checkout main branch of the repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Cache Python ${{ env.PYTHON_VERSION }} + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: python-${{ env.PYTHON_VERSION }} + + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install asv==0.6.1 virtualenv tabulate + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + - name: Create ASV machine config file + run: asv machine --machine gh-runner --yes + + - name: Fetch previous results from the "benchmarks" branch + run: | + if git ls-remote --exit-code origin benchmarks > /dev/null 2>&1; then + git merge origin/benchmarks \ + --allow-unrelated-histories \ + --no-commit + mv ../_results . + fi + + - name: Run ASV for the main branch + run: asv run ALL --skip-existing --verbose || true + + - name: Submit new results to the "benchmarks" branch + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: benchmarks + folder: ${{ env.WORKING_DIR }}/_results + target-folder: _results + + - name: Generate dashboard HTML + run: | + asv show + asv publish + + - name: Deploy to Github pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: gh-pages + folder: ${{ env.WORKING_DIR }}/_html \ No newline at end of file diff --git a/.github/workflows/asv-nightly.yml b/.github/workflows/asv-nightly.yml new file mode 100644 index 0000000..80a2d78 --- /dev/null +++ b/.github/workflows/asv-nightly.yml @@ -0,0 +1,93 @@ +# This workflow will run daily at 06:45. +# It will run benchmarks with airspeed velocity (asv) +# and compare performance with the previous nightly build. + +name: Run benchmarks nightly job + +on: + schedule: + - cron: 45 6 * * * + workflow_dispatch: + +env: + PYTHON_VERSION: "3.10" + WORKING_DIR: ${{ github.workspace }}/benchmarks + NIGHTLY_HASH_FILE: nightly-hash + +jobs: + + asv-nightly: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + + steps: + - name: Checkout main branch of the repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Cache Python ${{ env.PYTHON_VERSION }} + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: python-${{ env.PYTHON_VERSION }} + + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install asv==0.6.1 virtualenv + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + - name: Create ASV machine config file + run: asv machine --machine gh-runner --yes + + - name: Fetch previous results from the "benchmarks" branch + run: | + if git ls-remote --exit-code origin benchmarks > /dev/null 2>&1; then + git merge origin/benchmarks \ + --allow-unrelated-histories \ + --no-commit + mv ../_results . + fi + + - name: Get nightly dates under comparison + id: nightly-dates + run: | + echo "yesterday=$(date -d yesterday +'%Y-%m-%d')" >> $GITHUB_OUTPUT + echo "today=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT + + - name: Use last nightly commit hash from cache + uses: actions/cache@v4 + with: + path: ${{ env.WORKING_DIR }} + key: nightly-results-${{ steps.nightly-dates.outputs.yesterday }} + + - name: Run comparison of main against last nightly build + run: | + HASH_FILE=${{ env.NIGHTLY_HASH_FILE }} + CURRENT_HASH=${{ github.sha }} + if [ -f $HASH_FILE ]; then + PREV_HASH=$(cat $HASH_FILE) + asv continuous $PREV_HASH $CURRENT_HASH --verbose || true + asv compare $PREV_HASH $CURRENT_HASH --sort ratio --verbose + fi + echo $CURRENT_HASH > $HASH_FILE + + - name: Update last nightly hash in cache + uses: actions/cache@v4 + with: + path: ${{ env.WORKING_DIR }} + key: nightly-results-${{ steps.nightly-dates.outputs.today }} \ No newline at end of file diff --git a/.github/workflows/asv-pr.yml b/.github/workflows/asv-pr.yml new file mode 100644 index 0000000..bf5aed6 --- /dev/null +++ b/.github/workflows/asv-pr.yml @@ -0,0 +1,86 @@ +# This workflow will run benchmarks with airspeed velocity (asv) for pull requests. +# It will compare the performance of the main branch with the performance of the merge +# with the new changes. It then publishes a comment with this assessment by triggering +# the publish-benchmarks-pr workflow. +# Based on https://securitylab.github.com/research/github-actions-preventing-pwn-requests/. +name: Run benchmarks for PR + +on: + pull_request: + branches: [ main ] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.10" + WORKING_DIR: ${{ github.workspace }}/benchmarks + ARTIFACTS_DIR: ${{ github.workspace }}/artifacts + +jobs: + setup-python: + runs-on: ubuntu-latest + steps: + - name: Cache Python ${{ env.PYTHON_VERSION }} + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: python-${{ env.PYTHON_VERSION }} + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + asv-pr: + runs-on: ubuntu-latest + needs: setup-python + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + steps: + - name: Checkout PR branch of the repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Display Workflow Run Information + run: | + echo "Workflow Run ID: ${{ github.run_id }}" + - name: Cache Python ${{ env.PYTHON_VERSION }} + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: python-${{ env.PYTHON_VERSION }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install asv==0.6.1 virtualenv tabulate lf-asv-formatter + - name: Make artifacts directory + run: mkdir -p ${{ env.ARTIFACTS_DIR }} + - name: Save pull request number + run: echo ${{ github.event.pull_request.number }} > ${{ env.ARTIFACTS_DIR }}/pr + - name: Get current job logs URL + uses: Tiryoh/gha-jobid-action@v1 + id: jobs + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + job_name: ${{ github.job }} + - name: Create ASV machine config file + run: asv machine --machine gh-runner --yes + - name: Save comparison of PR against main branch + run: | + git remote add upstream https://github.com/${{ github.repository }}.git + git fetch upstream + asv continuous upstream/main HEAD --verbose || true + asv compare upstream/main HEAD --sort ratio --verbose | tee output + python -m lf_asv_formatter --asv_version "$(echo asv --version)" + printf "\n\nClick [here]($STEP_URL) to view all benchmarks." >> output + mv output ${{ env.ARTIFACTS_DIR }} + env: + STEP_URL: "${{ steps.jobs.outputs.html_url }}#step:11:1" + - name: Upload artifacts (PR number and benchmarks output) + uses: actions/upload-artifact@v4 + with: + name: benchmark-artifacts + path: ${{ env.ARTIFACTS_DIR }} \ No newline at end of file diff --git a/.github/workflows/build-documentation.yml b/.github/workflows/build-documentation.yml new file mode 100644 index 0000000..638e7b6 --- /dev/null +++ b/.github/workflows/build-documentation.yml @@ -0,0 +1,38 @@ +# This workflow will install Python dependencies, build the package and then build the documentation. + +name: Build documentation + + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + if [ -f docs/requirements.txt ]; then pip install -r docs/requirements.txt; fi + pip install . + - name: Install notebook requirements + run: | + sudo apt-get install pandoc + - name: Build docs + run: | + sphinx-build -T -E -b html -d docs/build/doctrees ./docs docs/build/html diff --git a/.github/workflows/pre-commit-ci.yml b/.github/workflows/pre-commit-ci.yml new file mode 100644 index 0000000..a57e221 --- /dev/null +++ b/.github/workflows/pre-commit-ci.yml @@ -0,0 +1,35 @@ +# This workflow runs pre-commit hooks on pushes and pull requests to main +# to enforce coding style. To ensure correct configuration, please refer to: +# https://lincc-ppt.readthedocs.io/en/latest/practices/ci_precommit.html +name: Run pre-commit hooks + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + pre-commit-ci: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - uses: pre-commit/action@v3.0.1 + with: + extra_args: --all-files --verbose + env: + SKIP: "check-lincc-frameworks-template-version,no-commit-to-branch,check-added-large-files,validate-pyproject,sphinx-build,pytest-check" + - uses: pre-commit-ci/lite-action@v1.0.2 + if: failure() && github.event_name == 'pull_request' && github.event.pull_request.draft == false \ No newline at end of file diff --git a/.github/workflows/publish-benchmarks-pr.yml b/.github/workflows/publish-benchmarks-pr.yml new file mode 100644 index 0000000..45ed928 --- /dev/null +++ b/.github/workflows/publish-benchmarks-pr.yml @@ -0,0 +1,53 @@ +# This workflow publishes a benchmarks comment on a pull request. It is triggered after the +# benchmarks are computed in the asv-pr workflow. This separation of concerns allows us limit +# access to the target repository private tokens and secrets, increasing the level of security. +# Based on https://securitylab.github.com/research/github-actions-preventing-pwn-requests/. +name: Publish benchmarks comment to PR + +on: + workflow_run: + workflows: ["Run benchmarks for PR"] + types: [completed] + +jobs: + upload-pr-comment: + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + permissions: + issues: write + pull-requests: write + steps: + - name: Display Workflow Run Information + run: | + echo "Workflow Run ID: ${{ github.event.workflow_run.id }}" + echo "Head SHA: ${{ github.event.workflow_run.head_sha }}" + echo "Head Branch: ${{ github.event.workflow_run.head_branch }}" + echo "Conclusion: ${{ github.event.workflow_run.conclusion }}" + echo "Event: ${{ github.event.workflow_run.event }}" + - name: Download artifact + uses: dawidd6/action-download-artifact@v3 + with: + name: benchmark-artifacts + run_id: ${{ github.event.workflow_run.id }} + - name: Extract artifacts information + id: pr-info + run: | + printf "PR number: $(cat pr)\n" + printf "Output:\n$(cat output)" + printf "pr=$(cat pr)" >> $GITHUB_OUTPUT + - name: Find benchmarks comment + uses: peter-evans/find-comment@v3 + id: find-comment + with: + issue-number: ${{ steps.pr-info.outputs.pr }} + comment-author: 'github-actions[bot]' + body-includes: view all benchmarks + - name: Create or update benchmarks comment + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + issue-number: ${{ steps.pr-info.outputs.pr }} + body-path: output + edit-mode: replace \ No newline at end of file diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml new file mode 100644 index 0000000..f7cecc2 --- /dev/null +++ b/.github/workflows/publish-to-pypi.yml @@ -0,0 +1,37 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://github.com/pypa/gh-action-pypi-publish#trusted-publishing + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/smoke-test.yml b/.github/workflows/smoke-test.yml new file mode 100644 index 0000000..95c085c --- /dev/null +++ b/.github/workflows/smoke-test.yml @@ -0,0 +1,42 @@ +# This workflow will run daily at 06:45. +# It will install Python dependencies and run tests with a variety of Python versions. +# See documentation for help debugging smoke test issues: +# https://lincc-ppt.readthedocs.io/en/latest/practices/ci_testing.html#version-culprit + +name: Unit test smoke test + +on: + + # Runs this workflow automatically + schedule: + - cron: 45 6 * * * + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.9', '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install -e .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: List dependencies + run: | + pip list + - name: Run unit tests with pytest + run: | + python -m pytest \ No newline at end of file diff --git a/.github/workflows/testing-and-coverage.yml b/.github/workflows/testing-and-coverage.yml new file mode 100644 index 0000000..ee8e29b --- /dev/null +++ b/.github/workflows/testing-and-coverage.yml @@ -0,0 +1,38 @@ +# This workflow will install Python dependencies, run tests and report code coverage with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Unit test and code coverage + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.9', '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install -e .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Run unit tests with pytest + run: | + python -m pytest --cov=kbmod_ml --cov-report=xml + - name: Upload coverage report to codecov + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..50990fe --- /dev/null +++ b/.gitignore @@ -0,0 +1,150 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +_version.py + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +_readthedocs/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# vscode +.vscode/ + +# dask +dask-worker-space/ + +# tmp directory +tmp/ + +# Mac OS +.DS_Store + +# Airspeed Velocity performance results +_results/ +_html/ + +# Project initialization script +.initialize_new_project.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..b8a8974 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,100 @@ +repos: + # Compare the local template version to the latest remote template version + # This hook should always pass. It will print a message if the local version + # is out of date. + - repo: https://github.com/lincc-frameworks/pre-commit-hooks + rev: v0.1.2 + hooks: + - id: check-lincc-frameworks-template-version + name: Check template version + description: Compare current template version against latest + verbose: true + # Clear output from jupyter notebooks so that only the input cells are committed. + - repo: local + hooks: + - id: jupyter-nb-clear-output + name: Clear output from Jupyter notebooks + description: Clear output from Jupyter notebooks. + files: \.ipynb$ + stages: [commit] + language: system + entry: jupyter nbconvert --clear-output + # Prevents committing directly branches named 'main' and 'master'. + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: no-commit-to-branch + name: Prevent main branch commits + description: Prevent the user from committing directly to the primary branch. + - id: check-added-large-files + name: Check for large files + description: Prevent the user from committing very large files. + args: ['--maxkb=500'] + # Verify that pyproject.toml is well formed + - repo: https://github.com/abravalheri/validate-pyproject + rev: v0.12.1 + hooks: + - id: validate-pyproject + name: Validate pyproject.toml + description: Verify that pyproject.toml adheres to the established schema. + # Verify that GitHub workflows are well formed + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.0 + hooks: + - id: check-github-workflows + args: ["--verbose"] + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.2.1 + hooks: + - id: ruff + name: Lint code using ruff; sort and organize imports + types_or: [ python, pyi ] + args: ["--fix"] + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.2.1 + hooks: + - id: ruff-format + name: Format code using ruff + types_or: [ python, pyi, jupyter ] + # Make sure Sphinx can build the documentation while explicitly omitting + # notebooks from the docs, so users don't have to wait through the execution + # of each notebook or each commit. By default, these will be checked in the + # GitHub workflows. + - repo: local + hooks: + - id: sphinx-build + name: Build documentation with Sphinx + entry: sphinx-build + language: system + always_run: true + exclude_types: [file, symlink] + args: + [ + "-M", # Run sphinx in make mode, so we can use -D flag later + # Note: -M requires next 3 args to be builder, source, output + "html", # Specify builder + "./docs", # Source directory of documents + "./_readthedocs", # Output directory for rendered documents + "-T", # Show full trace back on exception + "-E", # Don't use saved env; always read all files + "-d", # Flag for cached environment and doctrees + "./docs/_build/doctrees", # Directory + "-D", # Flag to override settings in conf.py + "exclude_patterns=notebooks/*", # Exclude our notebooks from pre-commit + ] + # Run unit tests, verify that they pass. Note that coverage is run against + # the ./src directory here because that is what will be committed. In the + # github workflow script, the coverage is run against the installed package + # and uploaded to Codecov by calling pytest like so: + # `python -m pytest --cov= --cov-report=xml` + - repo: local + hooks: + - id: pytest-check + name: Run unit tests + description: Run unit tests with pytest. + entry: bash -c "if python -m pytest --co -qq; then python -m pytest --cov=./src --cov-report=html; fi" + language: system + pass_filenames: false + always_run: true diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..79bfc27 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,22 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt + - method: pip + path: . diff --git a/.setup_dev.sh b/.setup_dev.sh new file mode 100644 index 0000000..d8cd955 --- /dev/null +++ b/.setup_dev.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +# This script should be run by new developers to install this package in +# editable mode and configure their local environment + +echo "Checking virtual environment" +if [ -z "${VIRTUAL_ENV}" ] && [ -z "${CONDA_PREFIX}" ]; then + echo 'No virtual environment detected: none of $VIRTUAL_ENV or $CONDA_PREFIX is set.' + echo + echo "=== This script is going to install the project in the system python environment ===" + echo "Proceed? [y/N]" + read -r RESPONCE + if [ "${RESPONCE}" != "y" ]; then + echo "See https://lincc-ppt.readthedocs.io/ for details." + echo "Exiting." + exit 1 + fi + +fi + +echo "Checking pip version" +MINIMUM_PIP_VERSION=22 +pipversion=( $(python -m pip --version | awk '{print $2}' | sed 's/\./ /g') ) +if let "${pipversion[0]}<${MINIMUM_PIP_VERSION}"; then + echo "Insufficient version of pip found. Requires at least version ${MINIMUM_PIP_VERSION}." + echo "See https://lincc-ppt.readthedocs.io/ for details." + exit 1 +fi + +echo "Installing package and runtime dependencies in local environment" +python -m pip install -e . > /dev/null + +echo "Installing developer dependencies in local environment" +python -m pip install -e .'[dev]' > /dev/null +if [ -f docs/requirements.txt ]; then python -m pip install -r docs/requirements.txt; fi + +echo "Installing pre-commit" +pre-commit install > /dev/null + +####################################################### +# Include any additional configurations below this line +####################################################### diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7ac9917 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 LINCC Frameworks + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..df0a4c3 --- /dev/null +++ b/README.md @@ -0,0 +1,50 @@ +# kbmod_ml + +[![Template](https://img.shields.io/badge/Template-LINCC%20Frameworks%20Python%20Project%20Template-brightgreen)](https://lincc-ppt.readthedocs.io/en/latest/) + +[![PyPI](https://img.shields.io/pypi/v/kbmod_ml?color=blue&logo=pypi&logoColor=white)](https://pypi.org/project/kbmod_ml/) +[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/dirac-institute/kbmod_ml/smoke-test.yml)](https://github.com/dirac-institute/kbmod_ml/actions/workflows/smoke-test.yml) +[![Codecov](https://codecov.io/gh/dirac-institute/kbmod_ml/branch/main/graph/badge.svg)](https://codecov.io/gh/dirac-institute/kbmod_ml) +[![Read The Docs](https://img.shields.io/readthedocs/kbmod-ml)](https://kbmod-ml.readthedocs.io/) +[![Benchmarks](https://img.shields.io/github/actions/workflow/status/dirac-institute/kbmod_ml/asv-main.yml?label=benchmarks)](https://dirac-institute.github.io/kbmod_ml/) + +This project was automatically generated using the LINCC-Frameworks +[python-project-template](https://github.com/lincc-frameworks/python-project-template). + +A repository badge was added to show that this project uses the python-project-template, however it's up to +you whether or not you'd like to display it! + +For more information about the project template see the +[documentation](https://lincc-ppt.readthedocs.io/en/latest/). + +## Dev Guide - Getting Started + +Before installing any dependencies or writing code, it's a great idea to create a +virtual environment. LINCC-Frameworks engineers primarily use `conda` to manage virtual +environments. If you have conda installed locally, you can run the following to +create and activate a new environment. + +``` +>> conda create env -n python=3.10 +>> conda activate +``` + +Once you have created a new environment, you can install this project for local +development using the following commands: + +``` +>> pip install -e .'[dev]' +>> pre-commit install +>> conda install pandoc +``` + +Notes: +1. The single quotes around `'[dev]'` may not be required for your operating system. +2. `pre-commit install` will initialize pre-commit for this local repository, so + that a set of tests will be run prior to completing a local commit. For more + information, see the Python Project Template documentation on + [pre-commit](https://lincc-ppt.readthedocs.io/en/latest/practices/precommit.html) +3. Install `pandoc` allows you to verify that automatic rendering of Jupyter notebooks + into documentation for ReadTheDocs works as expected. For more information, see + the Python Project Template documentation on + [Sphinx and Python Notebooks](https://lincc-ppt.readthedocs.io/en/latest/practices/sphinx.html#python-notebooks) diff --git a/benchmarks/__init__.py b/benchmarks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json new file mode 100644 index 0000000..6b9e80d --- /dev/null +++ b/benchmarks/asv.conf.json @@ -0,0 +1,80 @@ +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + // The name of the project being benchmarked. + "project": "kbmod_ml", + // The project's homepage. + "project_url": "https://github.com/dirac-institute/kbmod_ml", + // The URL or local path of the source code repository for the + // project being benchmarked. + "repo": "..", + // List of branches to benchmark. If not provided, defaults to "master" + // (for git) or "tip" (for mercurial). + "branches": [ + "HEAD" + ], + "install_command": [ + "python -m pip install {wheel_file}" + ], + "build_command": [ + "python -m build --wheel -o {build_cache_dir} {build_dir}" + ], + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + "dvcs": "git", + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "virtualenv", + // the base URL to show a commit for the project. + "show_commit_url": "https://github.com/dirac-institute/kbmod_ml/commit/", + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + "pythons": [ + "3.10" + ], + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list indicates to just test against the default (latest) + // version. + "matrix": { + "Cython": [], + "build": [], + "packaging": [] + }, + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks". + "benchmark_dir": ".", + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env". + "env_dir": "env", + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + "results_dir": "_results", + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + "html_dir": "_html", + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + // `asv` will cache wheels of the recent builds in each + // environment, making them faster to install next time. This is + // number of builds to keep, per environment. + "build_cache_size": 8 + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // } +} \ No newline at end of file diff --git a/benchmarks/benchmarks.py b/benchmarks/benchmarks.py new file mode 100644 index 0000000..575155a --- /dev/null +++ b/benchmarks/benchmarks.py @@ -0,0 +1,16 @@ +"""Two sample benchmarks to compute runtime and memory usage. + +For more information on writing benchmarks: +https://asv.readthedocs.io/en/stable/writing_benchmarks.html.""" + +from kbmod_ml import example_benchmarks + + +def time_computation(): + """Time computations are prefixed with 'time'.""" + example_benchmarks.runtime_computation() + + +def mem_list(): + """Memory computations are prefixed with 'mem' or 'peakmem'.""" + return example_benchmarks.memory_computation() diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..a5622f1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,31 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -T -E -d _build/doctrees -D language=en +EXCLUDENB ?= -D exclude_patterns="notebooks/*","_build","**.ipynb_checkpoints" +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = ../_readthedocs/ + +.PHONY: help clean Makefile no-nb no-notebooks + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Build all Sphinx docs locally, except the notebooks +no-nb no-notebooks: + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(EXCLUDENB) $(O) + +# Cleans up files generated by the build process +clean: + rm -r "_build/doctrees" + rm -r "$(BUILDDIR)" + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..109b574 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,58 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + + +import os +import sys +from importlib.metadata import version + +# Define path to the code to be documented **relative to where conf.py (this file) is kept** +sys.path.insert(0, os.path.abspath("../src/")) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "kbmod_ml" +copyright = "2023, LINCC Frameworks" +author = "LINCC Frameworks" +release = version("kbmod_ml") +# for example take major/minor +version = ".".join(release.split(".")[:2]) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx.ext.mathjax", "sphinx.ext.napoleon", "sphinx.ext.viewcode"] + +extensions.append("autoapi.extension") +extensions.append("nbsphinx") + +# -- sphinx-copybutton configuration ---------------------------------------- +extensions.append("sphinx_copybutton") +## sets up the expected prompt text from console blocks, and excludes it from +## the text that goes into the clipboard. +copybutton_exclude = ".linenos, .gp" +copybutton_prompt_text = ">> " + +## lets us suppress the copy button on select code blocks. +copybutton_selector = "div:not(.no-copybutton) > div.highlight > pre" + +templates_path = [] +exclude_patterns = ["_build", "**.ipynb_checkpoints"] + +# This assumes that sphinx-build is called from the root directory +master_doc = "index" +# Remove 'view source code' from top of page (for html, not python) +html_show_sourcelink = False +# Remove namespaces from class/method signatures +add_module_names = False + +autoapi_type = "python" +autoapi_dirs = ["../src"] +autoapi_ignore = ["*/__main__.py", "*/_version.py"] +autoapi_add_toc_tree_entry = False +autoapi_member_order = "bysource" + +html_theme = "sphinx_rtd_theme" diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..95c8bb1 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,50 @@ +.. kbmod_ml documentation main file. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to kbmod_ml's documentation! +======================================================================================== + +Dev Guide - Getting Started +--------------------------- + +Before installing any dependencies or writing code, it's a great idea to create a +virtual environment. LINCC-Frameworks engineers primarily use `conda` to manage virtual +environments. If you have conda installed locally, you can run the following to +create and activate a new environment. + +.. code-block:: console + + >> conda create env -n python=3.10 + >> conda activate + + +Once you have created a new environment, you can install this project for local +development using the following commands: + +.. code-block:: console + + >> pip install -e .'[dev]' + >> pre-commit install + >> conda install pandoc + + +Notes: + +1) The single quotes around ``'[dev]'`` may not be required for your operating system. +2) ``pre-commit install`` will initialize pre-commit for this local repository, so + that a set of tests will be run prior to completing a local commit. For more + information, see the Python Project Template documentation on + `pre-commit `_. +3) Installing ``pandoc`` allows you to verify that automatic rendering of Jupyter notebooks + into documentation for ReadTheDocs works as expected. For more information, see + the Python Project Template documentation on + `Sphinx and Python Notebooks `_. + + +.. toctree:: + :hidden: + + Home page + API Reference + Notebooks diff --git a/docs/notebooks.rst b/docs/notebooks.rst new file mode 100644 index 0000000..7f7e544 --- /dev/null +++ b/docs/notebooks.rst @@ -0,0 +1,6 @@ +Notebooks +======================================================================================== + +.. toctree:: + + Introducing Jupyter Notebooks diff --git a/docs/notebooks/README.md b/docs/notebooks/README.md new file mode 100644 index 0000000..a521ae1 --- /dev/null +++ b/docs/notebooks/README.md @@ -0,0 +1 @@ +Put your Jupyter notebooks here :) diff --git a/docs/notebooks/intro_notebook.ipynb b/docs/notebooks/intro_notebook.ipynb new file mode 100644 index 0000000..0589b29 --- /dev/null +++ b/docs/notebooks/intro_notebook.ipynb @@ -0,0 +1,84 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "textblock1", + "metadata": { + "cell_marker": "\"\"\"" + }, + "source": [ + "# Introducing Jupyter Notebooks in Sphinx\n", + "\n", + "This notebook showcases very basic functionality of rendering your jupyter notebooks as tutorials inside your sphinx documentation.\n", + "\n", + "As part of the LINCC Frameworks python project template, your notebooks will be executed AND rendered at document build time.\n", + "\n", + "You can read more about Sphinx, ReadTheDocs, and building notebooks in [LINCC's documentation](https://lincc-ppt.readthedocs.io/en/latest/practices/sphinx.html)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "codeblock1", + "metadata": {}, + "outputs": [], + "source": [ + "def sierpinsky(order):\n", + " \"\"\"Define a method that will create a Sierpinsky triangle of given order,\n", + " and will print it out.\"\"\"\n", + " triangles = [\"*\"]\n", + " for i in range(order):\n", + " spaces = \" \" * (2**i)\n", + " triangles = [spaces + triangle + spaces for triangle in triangles] + [\n", + " triangle + \" \" + triangle for triangle in triangles\n", + " ]\n", + " print(f\"Printing order {order} triangle\")\n", + " print(\"\\n\".join(triangles))" + ] + }, + { + "cell_type": "markdown", + "id": "textblock2", + "metadata": { + "cell_marker": "\"\"\"", + "lines_to_next_cell": 1 + }, + "source": [ + "Then, call our method a few times. This will happen on the fly during notebook rendering." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "codeblock2", + "metadata": {}, + "outputs": [], + "source": [ + "for order in range(3):\n", + " sierpinsky(order)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "codeblock3", + "metadata": {}, + "outputs": [], + "source": [ + "sierpinsky(4)" + ] + } + ], + "metadata": { + "jupytext": { + "cell_markers": "\"\"\"" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..ee05654 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,10 @@ + +ipykernel +ipython +jupytext +nbconvert +nbsphinx +sphinx +sphinx-autoapi +sphinx-copybutton +sphinx-rtd-theme \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..f45ec2d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,105 @@ +[project] +name = "kbmod_ml" +license = {file = "LICENSE"} +readme = "README.md" +authors = [ + { name = "LINCC Frameworks", email = "awoldag@uw.edu" } +] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Programming Language :: Python", +] +dynamic = ["version"] +requires-python = ">=3.9" +dependencies = [ +] + +[project.urls] +"Source Code" = "https://github.com/dirac-institute/kbmod_ml" + +# On a mac, install optional dependencies with `pip install '.[dev]'` (include the single quotes) +[project.optional-dependencies] +dev = [ + "asv==0.6.1", # Used to compute performance benchmarks + "jupyter", # Clears output from Jupyter notebooks + "pre-commit", # Used to run checks before finalizing a git commit + "pytest", + "pytest-cov", # Used to report total code coverage + "ruff", # Used for static linting of files +] + +[build-system] +requires = [ + "setuptools>=62", # Used to build and package the Python project + "setuptools_scm>=6.2", # Gets release version from git. Makes it available programmatically +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +write_to = "src/kbmod_ml/_version.py" + +[tool.pytest.ini_options] +testpaths = [ + "tests", +] + +[tool.black] +line-length = 110 +target-version = ["py39"] + +[tool.isort] +profile = "black" +line_length = 110 + +[tool.ruff] +line-length = 110 +target-version = "py39" + +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + "W", + # Pyflakes + "F", + # pep8-naming + "N", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", + # docstrings + "D101", + "D102", + "D103", + "D106", + "D206", + "D207", + "D208", + "D300", + "D417", + "D419", + # Numpy v2.0 compatibility + "NPY201", +] + +ignore = [ + "UP006", # Allow non standard library generics in type hints + "UP007", # Allow Union in type hints + "SIM114", # Allow if with same arms + "B028", # Allow default warning level + "SIM117", # Allow nested with + "UP015", # Allow redundant open parameters + "UP028", # Allow yield in for loop +] + +[tool.coverage.run] +omit=["src/kbmod_ml/_version.py"] diff --git a/src/kbmod_ml/__init__.py b/src/kbmod_ml/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/kbmod_ml/example_benchmarks.py b/src/kbmod_ml/example_benchmarks.py new file mode 100644 index 0000000..5a77b06 --- /dev/null +++ b/src/kbmod_ml/example_benchmarks.py @@ -0,0 +1,14 @@ +"""An example module containing simplistic methods under benchmarking.""" + +import random +import time + + +def runtime_computation(): + """Runtime computation consuming between 0 and 5 seconds.""" + time.sleep(random.uniform(0, 5)) + + +def memory_computation(): + """Memory computation for a random list up to 512 samples.""" + return [0] * random.randint(0, 512) diff --git a/tests/kbmod_ml/conftest.py b/tests/kbmod_ml/conftest.py new file mode 100644 index 0000000..e69de29