From ce53dfbed2e3c3c491d05196bf324e77559cada1 Mon Sep 17 00:00:00 2001 From: Drew Oldag Date: Mon, 31 Jul 2023 16:39:36 -0700 Subject: [PATCH] Initial commit --- .copier-answers.yml | 16 ++ .github/pull_request_template.md | 63 ++++++++ .github/workflows/build-documentation.yml | 34 +++++ .github/workflows/linting.yml | 36 +++++ .github/workflows/publish-to-pypi.yml | 39 +++++ .github/workflows/smoke-test.yml | 38 +++++ .github/workflows/testing-and-coverage.yml | 37 +++++ .gitignore | 140 ++++++++++++++++++ .pre-commit-config.yaml | 100 +++++++++++++ .readthedocs.yml | 22 +++ README.md | 14 +- docs/Makefile | 31 ++++ docs/community_modules.rst | 6 + docs/community_modules/sinusoidal.rst | 6 + docs/conf.py | 47 ++++++ docs/contributing.rst | 33 +++++ docs/index.rst | 17 +++ docs/notebooks.rst | 6 + .../sinusoidal/sinusoidal_example.ipynb | 126 ++++++++++++++++ docs/requirements.txt | 9 ++ pyproject.toml | 59 ++++++++ .../sinusoidal/sinusoidal_lightcurve.py | 27 ++++ tests/conftest.py | 0 tests/sinusoidal/test_sinusoidal.py | 22 +++ 24 files changed, 927 insertions(+), 1 deletion(-) create mode 100644 .copier-answers.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/build-documentation.yml create mode 100644 .github/workflows/linting.yml create mode 100644 .github/workflows/publish-to-pypi.yml create mode 100644 .github/workflows/smoke-test.yml create mode 100644 .github/workflows/testing-and-coverage.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .readthedocs.yml create mode 100644 docs/Makefile create mode 100644 docs/community_modules.rst create mode 100644 docs/community_modules/sinusoidal.rst create mode 100644 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/index.rst create mode 100644 docs/notebooks.rst create mode 100644 docs/notebooks/sinusoidal/sinusoidal_example.ipynb create mode 100644 docs/requirements.txt create mode 100644 pyproject.toml create mode 100644 src/sorcha_community_utils/sinusoidal/sinusoidal_lightcurve.py create mode 100644 tests/conftest.py create mode 100644 tests/sinusoidal/test_sinusoidal.py diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 0000000..1faeeea --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,16 @@ +# Changes here will be overwritten by Copier +_commit: v1.4.1 +_src_path: gh:lincc-frameworks/python-project-template +author_email: m.schwamb@qub.ac.uk +author_name: Meg Schwamb +create_example_module: false +custom_install: true +include_docs: true +include_notebooks: true +mypy_type_checking: none +package_name: sinusoidal +preferred_linter: black +project_license: none +project_name: sorcha_community_utils +use_gitlfs: none +use_isort: false diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..76e043c --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,63 @@ + + +## Change Description + +- [ ] My PR includes a link to the issue that I am addressing + + + +## Solution Description + + + + +## Code Quality +- [ ] I have read the Contribution Guide +- [ ] My code follows the code style of this project +- [ ] My code builds (or compiles) cleanly without any errors or warnings +- [ ] My code contains relevant comments and necessary documentation + +## Project-Specific Pull Request Checklists + + +### Bug Fix Checklist +- [ ] My fix includes a new test that breaks as a result of the bug (if possible) +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### New Feature Checklist +- [ ] I have added or updated the docstrings associated with my feature using the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover my new feature +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) + +### Documentation Change Checklist +- [ ] Any updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html) + +### Build/CI Change Checklist +- [ ] If required or optional dependencies have changed (including version numbers), I have updated the README to reflect this +- [ ] If this is a new CI setup, I have added the associated badge to the README + + + +### Other Change Checklist +- [ ] Any new or updated docstrings use the [NumPy docstring format](https://numpydoc.readthedocs.io/en/latest/format.html). +- [ ] I have updated the tutorial to highlight my new feature (if appropriate) +- [ ] I have added unit/End-to-End (E2E) test cases to cover any changes +- [ ] My change includes a breaking change + - [ ] My change includes backwards compatibility and deprecation warnings (if possible) diff --git a/.github/workflows/build-documentation.yml b/.github/workflows/build-documentation.yml new file mode 100644 index 0000000..e329f57 --- /dev/null +++ b/.github/workflows/build-documentation.yml @@ -0,0 +1,34 @@ +# This workflow will install Python dependencies, build the package and then build the documentation. + +name: Build documentation + + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + if [ -f docs/requirements.txt ]; then pip install -r docs/requirements.txt; fi + pip install . + - name: Install notebook requirements + run: | + sudo apt-get install pandoc + - name: Build docs + run: | + sphinx-build -T -E -b html -d docs/build/doctrees ./docs docs/build/html diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..dfcf56b --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,36 @@ +# This workflow will install Python dependencies, then perform static linting analysis. +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Lint + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Analyze code with linter + + uses: psf/black@stable + with: + src: ./src diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml new file mode 100644 index 0000000..5367eb6 --- /dev/null +++ b/.github/workflows/publish-to-pypi.yml @@ -0,0 +1,39 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/smoke-test.yml b/.github/workflows/smoke-test.yml new file mode 100644 index 0000000..928fdca --- /dev/null +++ b/.github/workflows/smoke-test.yml @@ -0,0 +1,38 @@ +# This workflow will run daily at 06:45. +# It will install Python dependencies and run tests with a variety of Python versions. +# See documentation for help debugging smoke test issues: +# https://lincc-ppt.readthedocs.io/en/latest/practices/ci_testing.html#version-culprit + +name: Unit test smoke test + +on: + schedule: + - cron: 45 6 * * * + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: List dependencies + run: | + pip list + - name: Run unit tests with pytest + run: | + python -m pytest tests diff --git a/.github/workflows/testing-and-coverage.yml b/.github/workflows/testing-and-coverage.yml new file mode 100644 index 0000000..66a8dcb --- /dev/null +++ b/.github/workflows/testing-and-coverage.yml @@ -0,0 +1,37 @@ +# This workflow will install Python dependencies, run tests and report code coverage with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Unit test and code coverage + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + python -m pip install --upgrade pip + pip install . + pip install .[dev] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Run unit tests with pytest + run: | + python -m pytest tests --cov=sinusoidal --cov-report=xml + - name: Upload coverage report to codecov + uses: codecov/codecov-action@v3 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..523b316 --- /dev/null +++ b/.gitignore @@ -0,0 +1,140 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +_version.py + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +_readthedocs/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# vscode +.vscode/ + +# dask +dask-worker-space/ + +# tmp directory +tmp/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..5c8c675 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,100 @@ +repos: + + # Compare the local template version to the latest remote template version + # This hook should always pass. It will print a message if the local version + # is out of date. + - repo: https://github.com/lincc-frameworks/pre-commit-hooks + rev: v0.1.1 + hooks: + - id: check-lincc-frameworks-template-version + name: Check template version + description: Compare current template version against latest + verbose: true + + # Clear output from jupyter notebooks so that only the input cells are committed. + - repo: local + hooks: + - id: jupyter-nb-clear-output + name: Clear output from Jupyter notebooks + description: Clear output from Jupyter notebooks. + files: \.ipynb$ + stages: [commit] + language: system + entry: jupyter nbconvert --clear-output + + # Run unit tests, verify that they pass. Note that coverage is run against + # the ./src directory here because that is what will be committed. In the + # github workflow script, the coverage is run against the installed package + # and uploaded to Codecov by calling pytest like so: + # `python -m pytest --cov= --cov-report=xml` + - repo: local + hooks: + - id: pytest-check + name: Run unit tests + description: Run unit tests with pytest. + entry: bash -c "if python -m pytest --co -qq; then python -m pytest --cov=./src --cov-report=html; fi" + language: system + pass_filenames: false + always_run: true + + # prevents committing directly branches named 'main' and 'master'. + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: no-commit-to-branch + name: Prevent main branch commits + description: Prevent the user from committing directly to the primary branch. + - id: check-added-large-files + name: Check for large files + description: Prevent the user from committing very large files. + args: ['--maxkb=500'] + + # verify that pyproject.toml is well formed + - repo: https://github.com/abravalheri/validate-pyproject + rev: v0.12.1 + hooks: + - id: validate-pyproject + name: Validate pyproject.toml + description: Verify that pyproject.toml adheres to the established schema. + + + # Analyze the code style and report code that doesn't adhere. + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + types: [python] + files: ^(src|tests)/ + # It is recommended to specify the latest version of Python + # supported by your project here, or alternatively use + # pre-commit's default_language_version, see + # https://pre-commit.com/#top_level-default_language_version + language_version: python3.10 + + + # Make sure Sphinx can build the documentation while explicitly omitting + # notebooks from the docs, so users don't have to wait through the execution + # of each notebook or each commit. By default, these will be checked in the + # GitHub workflows. + - repo: local + hooks: + - id: sphinx-build + name: Build documentation with Sphinx + entry: sphinx-build + language: system + always_run: true + exclude_types: [file, symlink] + args: + [ + "-M", # Run sphinx in make mode, so we can use -D flag later + # Note: -M requires next 3 args to be builder, source, output + "html", # Specify builder + "./docs", # Source directory of documents + "./_readthedocs", # Output directory for rendered documents + "-T", # Show full trace back on exception + "-E", # Don't use saved env; always read all files + "-d", # Flag for cached environment and doctrees + "./docs/_build/doctrees", # Directory + "-D", # Flag to override settings in conf.py + "exclude_patterns=notebooks/*", # Exclude our notebooks from pre-commit + ] diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..79bfc27 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,22 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt + - method: pip + path: . diff --git a/README.md b/README.md index 32d295f..f12e91b 100644 --- a/README.md +++ b/README.md @@ -1 +1,13 @@ -This is a repository for community generated light curve and cometary activity functions for the [sorcha](https://github.com/dirac-institute/sorcha) Solar System survey simulator software package. +# Sorcha Community Utilities + + +[![ci](https://github.com/dirac-institute/sorcha_community_utils/actions/workflows/smoke-test.yml/badge.svg)](https://github.com/dirac-institute/sorcha_community_utils/actions/workflows/smoke-test.yml) +[![pytest](https://github.com/dirac-institute/sorcha_community_utils/actions/workflows/testing-and-coverage.yml/badge.svg)](https://github.com/dirac-institute/sorcha_community_utils/actions/workflows/testing-and-coverage.yml) +[![Documentation Status](https://readthedocs.org/projects/sorcha_community_utils/badge/?version=latest)](https://sorcha_community_utils.readthedocs.io/en/latest/?badge=latest) + +[![Template](https://img.shields.io/badge/Template-LINCC%20Frameworks%20Python%20Project%20Template-brightgreen)](https://lincc-ppt.readthedocs.io/en/latest/) + +This is a repository for community generated light curve and cometary activity +functions for the [sorcha](https://github.com/dirac-institute/sorcha) Solar +System survey simulator software package. + diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..a5622f1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,31 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -T -E -d _build/doctrees -D language=en +EXCLUDENB ?= -D exclude_patterns="notebooks/*","_build","**.ipynb_checkpoints" +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = ../_readthedocs/ + +.PHONY: help clean Makefile no-nb no-notebooks + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Build all Sphinx docs locally, except the notebooks +no-nb no-notebooks: + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(EXCLUDENB) $(O) + +# Cleans up files generated by the build process +clean: + rm -r "_build/doctrees" + rm -r "$(BUILDDIR)" + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + diff --git a/docs/community_modules.rst b/docs/community_modules.rst new file mode 100644 index 0000000..785fc91 --- /dev/null +++ b/docs/community_modules.rst @@ -0,0 +1,6 @@ +Community Modules +======================================================================================== + +.. NOTE: Please add new notebooks alphabetically +.. toctree:: + Sinusoidal Lightcurve diff --git a/docs/community_modules/sinusoidal.rst b/docs/community_modules/sinusoidal.rst new file mode 100644 index 0000000..c561d55 --- /dev/null +++ b/docs/community_modules/sinusoidal.rst @@ -0,0 +1,6 @@ +Sinusoidal Lightcurve +===================== + +Basic implementation of a sinusoidal light curve. + +To cite this work: TODO \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..499ec9d --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,47 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + + +import os +import sys + +import autoapi +from importlib.metadata import version + +# Define path to the code to be documented **relative to where conf.py (this file) is kept** +sys.path.insert(0, os.path.abspath('../src/')) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "sorcha_community_utils" +copyright = "2023, Meg Schwamb" +author = "Meg Schwamb" +release = version("sorcha_community_utils") +# for example take major/minor +version = ".".join(release.split(".")[:2]) + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx.ext.mathjax", "sphinx.ext.napoleon", "sphinx.ext.viewcode"] + +extensions.append("autoapi.extension") +extensions.append("nbsphinx") + +templates_path = [] +exclude_patterns = ['_build', '**.ipynb_checkpoints'] + +master_doc = "index" # This assumes that sphinx-build is called from the root directory +html_show_sourcelink = False # Remove 'view source code' from top of page (for html, not python) +add_module_names = False # Remove namespaces from class/method signatures + +autoapi_type = "python" +autoapi_dirs = ["../src"] +autoapi_ignore = ["*/__main__.py", "*/_version.py"] +autoapi_add_toc_tree_entry = False +autoapi_member_order = "bysource" + +html_theme = "sphinx_rtd_theme" diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 0000000..4512566 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1,33 @@ +Contribution guide +================== + +**Work in Progress** + +Create your environment +----------------------- + +Clone the source code with ``git clone ...``. + +Install the in developer mode ``pip install -e .[dev]``. + +Initialize pre-commit with ``pre-commit install``. + + +Create your Contribution +------------------------ + +Add src code in new module under ``.../src/socha_community_utils`` + +Add tests in new folder under ``.../tests/`` + +Add example notebook in ``.../docs/notebooks/``. Update ``.../docs/notebooks.rst``. + +Add documentation page in ``.../docs/community_modules``. Update ``.../docs/community_modules.rst``. +Be sure to include information about how to cite your work. + +Create a PR for review +---------------------- + +Commit your changes to a new git branch and create a PR for review. + +Add as the reviewer. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..6a2cd4d --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,17 @@ +.. sinusoidal documentation main file. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Sorcha's community utilities documentation! +======================================================================================== + + + +.. toctree:: + :hidden: + + Home page + Contributing + Community Modules + API Reference + Notebooks diff --git a/docs/notebooks.rst b/docs/notebooks.rst new file mode 100644 index 0000000..fcf0e0d --- /dev/null +++ b/docs/notebooks.rst @@ -0,0 +1,6 @@ +Notebooks +======================================================================================== + +.. NOTE: Please add new notebooks alphabetically +.. toctree:: + Sinusoidal Lightcurve diff --git a/docs/notebooks/sinusoidal/sinusoidal_example.ipynb b/docs/notebooks/sinusoidal/sinusoidal_example.ipynb new file mode 100644 index 0000000..4eabc8c --- /dev/null +++ b/docs/notebooks/sinusoidal/sinusoidal_example.ipynb @@ -0,0 +1,126 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Sinusoidal Lightcurve Example" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# Import the SinusoidalLightCurve class into the current scope\n", + "from sorcha_community_utils.sinusoidal.sinusoidal_lightcurve import SinusoidalLightCurve\n", + "from sorcha.lightcurves.lightcurve_registration import update_lc_subclasses, LC_METHODS" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# Update the `LC_METHODS` registration dictionary\n", + "update_lc_subclasses()\n", + "\n", + "# Show that SinusoidalLightcurve is now registered and available.\n", + "print(LC_METHODS['sinusoidal'])" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " FieldMJD LCA Period Time0\n", + "0 60277.351867 1 0.001 60277.351867\n", + "1 60289.319749 1 0.001 60277.351867\n", + "2 60289.330920 1 0.001 60277.351867\n", + "3 60292.334497 1 0.001 60277.351867\n", + "4 60292.346208 1 0.001 60277.351867\n" + ] + } + ], + "source": [ + "# make a small pandas dataframe with ~5 rows\n", + "import pandas as pd\n", + "\n", + "data_dict = {\n", + " 'FieldMJD': [60277.351867, 60289.319749, 60289.330920, 60292.334497, 60292.346208],\n", + " 'LCA': [1, 1, 1, 1, 1],\n", + " 'Period': [0.001, 0.001, 0.001, 0.001, 0.001],\n", + " 'Time0': [60277.351867, 60277.351867, 60277.351867, 60277.351867, 60277.351867],\n", + " }\n", + "\n", + "df = pd.DataFrame.from_dict(data_dict)\n", + "print(df)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 -0.340932\n", + "1 -0.934792\n", + "2 0.186596\n", + "3 -0.026723\n", + "4 0.737016\n", + "dtype: float64\n" + ] + } + ], + "source": [ + "# instantiate the sinusoidal class\n", + "lc_model = LC_METHODS['sinusoidal']()\n", + "\n", + "# compute the change in magnitude using the dataframe created above\n", + "output = lc_model.compute(df)\n", + "\n", + "print(output)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "sorcha_community_utils", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..5c0f7d8 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,9 @@ +sphinx==6.1.3 +sphinx-rtd-theme==1.2.0 +sphinx-autoapi==2.0.1 +nbsphinx +ipython +jupytext +jupyter +matplotlib +numpy diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..f6487c2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,59 @@ +[project] +name = "sorcha_community_utils" +license = {file = "LICENSE"} +readme = "README.md" +authors = [ + { name = "Meg Schwamb", email = "m.schwamb@qub.ac.uk" } +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Programming Language :: Python", +] +dynamic = ["version"] +dependencies = [ + "pandas", + "numpy", + "sorcha", +] + +# On a mac, install optional dependencies with `pip install '.[dev]'` (include the single quotes) +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-cov", # Used to report total code coverage + "pre-commit", # Used to run checks before finalizing a git commit + "sphinx==6.1.3", # Used to automatically generate documentation + "sphinx-rtd-theme==1.2.0", # Used to render documentation + "sphinx-autoapi==2.0.1", # Used to automatically generate api documentation + "black", # Used for static linting of files + # if you add dependencies here while experimenting in a notebook and you + # want that notebook to render in your documentation, please add the + # dependencies to ./docs/requirements.txt as well. + "nbconvert", # Needed for pre-commit check to clear output from Python notebooks + "nbsphinx", # Used to integrate Python notebooks into Sphinx documentation + "ipython", # Also used in building notebooks into Sphinx + "matplotlib", # Used in sample notebook intro_notebook.ipynb + "numpy", # Used in sample notebook intro_notebook.ipynb +] + +[build-system] +requires = [ + "setuptools>=62", # Used to build and package the Python project + "setuptools_scm>=6.2", # Gets release version from git. Makes it available programmatically +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +write_to = "src/sorcha_community_utils/_version.py" + +[tool.pytest.ini_options] +testpaths = [ + "tests", +] + +[tool.black] +line-length = 110 + diff --git a/src/sorcha_community_utils/sinusoidal/sinusoidal_lightcurve.py b/src/sorcha_community_utils/sinusoidal/sinusoidal_lightcurve.py new file mode 100644 index 0000000..edbe265 --- /dev/null +++ b/src/sorcha_community_utils/sinusoidal/sinusoidal_lightcurve.py @@ -0,0 +1,27 @@ +from sorcha.lightcurves.base_lightcurve import AbstractLightCurve + +from typing import List +import pandas as pd +import numpy as np + +class SinusoidalLightCurve(AbstractLightCurve): + """ + Note: assuming sinusoidal in magnitude instead of flux. Maybe not call LCA? + """ + + def __init__(self, required_column_names: List[str] = ["FieldMJD", "LCA", "Period", "Time0"]) -> None: + super().__init__(required_column_names) + + def compute(self, df: pd.DataFrame) -> np.array: + """ + Computes a sinusoidal light curve given the input dataframe + """ + + self._validate_column_names(df) + + modtime = np.mod(df["FieldMJD"] / df["Period"] + df["Time0"], 2 * np.pi) + return df["LCA"] * np.sin(modtime) + + @staticmethod + def name_id() -> str: + return "sinusoidal" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/sinusoidal/test_sinusoidal.py b/tests/sinusoidal/test_sinusoidal.py new file mode 100644 index 0000000..2183abc --- /dev/null +++ b/tests/sinusoidal/test_sinusoidal.py @@ -0,0 +1,22 @@ +from sorcha_community_utils.sinusoidal.sinusoidal_lightcurve import SinusoidalLightCurve +import numpy as np +import pandas as pd + +def test_sinusoidal_lightcurve_name(): + assert 'sinusoidal' == SinusoidalLightCurve.name_id() + + +def test_compute_simple(): + data_dict = { + 'FieldMJD': [0], + 'LCA': [1], + 'Period': [1], + 'Time0': [0], + } + + df = pd.DataFrame.from_dict(data_dict) + + model = SinusoidalLightCurve() + output = model.compute(df) + + assert output.values[0] == 0