diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 26c23f9..5172e0a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -33,4 +33,5 @@ jobs: tes - name: Build docs - run: mkdocs build + run: mkdocs build && mkdocs gh-deploy --force + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8425a66..72b2116 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,7 +9,19 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + # TODO: Handle multiple TES versions here, currently we're only testing against TES 1.1 + # - py-tes v1.0 — TES 1.0 + # - py-tes v1.1 — TES 1.1 + # tes-version: + # - "1.0" + # - "1.1" + python-version: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + - "3.12" steps: - name: Check out code @@ -18,7 +30,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: ${{ matrix.version }} + python-version: ${{ matrix.python-version }} - name: Install requirements run: | @@ -42,6 +54,6 @@ jobs: - name: Run integration tests run: | - /bin/bash -c "$(curl -fsSL https://github.com/ohsu-comp-bio/funnel/releases/download/0.11.0-rc.5/install.sh)" -- 0.11.0-rc.5 + /bin/bash -c "$(curl -fsSL https://github.com/ohsu-comp-bio/funnel/releases/latest/download/install.sh)" funnel server --LocalStorage.AllowedDirs $HOME run & pytest tests/integration diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..5ad5180 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,34 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.12" + # You can also specify other tool versions: + # nodejs: "19" + # rust: "1.64" + # golang: "1.19" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +formats: + - pdf + - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - method: setuptools + path: . + - requirements: docs/requirements.txt diff --git a/README.md b/README.md index d35a00c..ff37eb3 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,58 @@ # py-tes 🐍 -[![GitHub Actions Test Status](https://img.shields.io/github/actions/workflow/status/ohsu-comp-bio/py-tes/tests.yml?logo=github)](https://github.com/ohsu-comp-bio/py-tes/actions) [![image](https://coveralls.io/repos/github/ohsu-comp-bio/py-tes/badge.svg?branch=master)](https://coveralls.io/github/ohsu-comp-bio/py-tes?branch=master) [![image](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +[![Build Status][build-badge]][build] -*py-tes* is a library for interacting with servers implementing the -[GA4GH Task Execution -Schema](https://github.com/ga4gh/task-execution-schemas). +[![Test Coverage][coverage-badge]][coverage] -## Install ⚡ +[![License][license-badge]][license] -Available on [PyPI](https://pypi.org/project/py-tes/). +[![PyPI][pypi-badge]][pypi] - pip install py-tes +[build-badge]: https://img.shields.io/github/actions/workflow/status/ohsu-comp-bio/py-tes/tests.yml?logo=github +[build]: https://github.com/ohsu-comp-bio/py-tes/actions +[coverage-badge]: https://coveralls.io/repos/github/ohsu-comp-bio/py-tes/badge.svg?branch=master +[coverage]: https://coveralls.io/github/ohsu-comp-bio/py-tes?branch=master +[license-badge]: https://img.shields.io/badge/License-MIT-yellow.svg +[license]: https://opensource.org/licenses/MIT +[pypi-badge]: https://img.shields.io/pypi/v/py-tes +[pypi]: https://pypi.org/project/py-tes/ -## Example ✍️ +_py-tes_ is a library for interacting with servers implementing the [GA4GH Task Execution Schema](https://github.com/ga4gh/task-execution-schemas). +# Quick Start ⚡ -``` python +| TES version | py-tes version | Example Notebook (_Coming soon!_) | +|-----------------|------------------------|-----------------------------------------------| +| [1.1][tes-v1.1] | [1.1.0][py-tes-v1.1.0] | [![Open in Colab][colab-badge]][colab-v1.1.0] | +| [1.0][tes-v1.1] | [1.0.0][py-tes-v1.0.0] | [![Open in Colab][colab-badge]][colab-v1.0.0] | + +[tes-v1.1]: https://github.com/ga4gh/task-execution-schemas/releases/tag/v1.1 +[tes-v1.0]: https://github.com/ga4gh/task-execution-schemas/releases/tag/v1.1 + +[py-tes-v1.1.0]: https://github.com/ohsu-comp-bio/py-tes/releases/tag/1.1.0 +[py-tes-v1.0.0]: https://github.com/ohsu-comp-bio/py-tes/releases/tag/1.0.0 + +[colab-badge]: https://colab.research.google.com/assets/colab-badge.svg +[colab-v1.1.0]: https://colab.research.google.com/github/ohsu-comp-bio/py-tes/blob/develop/examples/v1_1_0.ipynb +[colab-v1.0.0]: https://colab.research.google.com/github/ohsu-comp-bio/py-tes/blob/develop/examples/v1_0_0.ipynb + +# Installation 🌀 + +Install `py-tes` from [PyPI](https://pypi.org/project/py-tes/) and run it in your script: + +```sh +➜ pip install py-tes + +➜ python example.py +``` + +## example.py 🐍 + +```py import tes +import json -# define task +# Define task task = tes.Task( executors=[ tes.Executor( @@ -28,89 +62,90 @@ task = tes.Task( ] ) -# create client -cli = tes.HTTPClient("https://tes.example.com", timeout=5) +# Create client +cli = tes.HTTPClient("http://localhost:8000", timeout=5) -# access endpoints -service_info = cli.get_service_info() +# Create and run task task_id = cli.create_task(task) +cli.wait(task_id, timeout=5) + +# Fetch task info task_info = cli.get_task(task_id, view="BASIC") -cli.cancel_task(task_id) -tasks_list = cli.list_tasks(view="MINIMAL") # default view +j = json.loads(task_info.as_json()) + +# Pretty print task info +print(json.dumps(j, indent=2)) ``` -## How to... +# How to... > Makes use of the objects above... -### ...export a model to a dictionary +## ...export a model to a dictionary -``` python +```python task_dict = task.as_dict(drop_empty=False) ``` `task_dict` contents: -``` console +```console {'id': None, 'state': None, 'name': None, 'description': None, 'inputs': None, 'outputs': None, 'resources': None, 'executors': [{'image': 'alpine', 'command': ['echo', 'hello'], 'workdir': None, 'stdin': None, 'stdout': None, 'stderr': None, 'env': None}], 'volumes': None, 'tags': None, 'logs': None, 'creation_time': None} ``` -### ...export a model to JSON +## ...export a model to JSON -``` python +```python task_json = task.as_json() # also accepts `drop_empty` arg ``` `task_json` contents: -``` console +```console {"executors": [{"image": "alpine", "command": ["echo", "hello"]}]} ``` -### ...pretty print a model +## ...pretty print a model -``` python +```python print(task.as_json(indent=3)) # keyword args are passed to `json.dumps()` ``` Output: -``` json +```json { - "executors": [ - { - "image": "alpine", - "command": [ - "echo", - "hello" - ] - } - ] + "executors": [ + { + "image": "alpine", + "command": ["echo", "hello"] + } + ] } ``` -### ...access a specific task from the task list +## ...access a specific task from the task list -``` python +```py specific_task = tasks_list.tasks[5] ``` `specific_task` contents: -``` console +```sh Task(id='393K43', state='COMPLETE', name=None, description=None, inputs=None, outputs=None, resources=None, executors=None, volumes=None, tags=None, logs=None, creation_time=None) ``` -### ...iterate over task list items +## ...iterate over task list items -``` python +```py for t in tasks_list[:3]: print(t.as_json(indent=3)) ``` Output: -``` console +```sh { "id": "task_A2GFS4", "state": "RUNNING" @@ -125,31 +160,31 @@ Output: } ``` -### ...instantiate a model from a JSON representation +## ...instantiate a model from a JSON representation -``` python +```py task_from_json = tes.client.unmarshal(task_json, tes.Task) ``` `task_from_json` contents: -``` console +```sh Task(id=None, state=None, name=None, description=None, inputs=None, outputs=None, resources=None, executors=[Executor(image='alpine', command=['echo', 'hello'], workdir=None, stdin=None, stdout=None, stderr=None, env=None)], volumes=None, tags=None, logs=None, creation_time=None) ``` Which is equivalent to `task`: -``` python +```py print(task_from_json == task) ``` Output: -``` console +```sh True ``` -## Additional Resources 📚 +# Additional Resources 📚 - [ga4gh-tes](https://github.com/microsoft/ga4gh-tes) : C# implementation of the GA4GH TES API; provides distributed batch task execution on Microsoft Azure @@ -161,8 +196,8 @@ True - [Nextflow](https://www.nextflow.io/): Nextflow enables scalable and reproducible scientific workflows using software containers. It allows the adaptation of pipelines written in the most common scripting languages. -- [GA4GH TES](https://www.ga4gh.org/product/task-execution-service-tes/): Main page for the Task Execution Schema — a standardized schema and API for describing batch execution tasks. +- [GA4GH TES](https://www.ga4gh.org/product/task-execution-service-tes/): Main page for the Task Execution Schema — a standardized schema and API for describing batch execution tasks. -- [TES GitHub](https://github.com/ga4gh/task-execution-schemas): Source repo for the Task Execution Schema +- [TES GitHub](https://github.com/ga4gh/task-execution-schemas): Source repo for the Task Execution Schema -- [Awesome TES](https://github.com/ohsu-comp-bio/awesome-tes): A curated list of awesome GA4GH TES projects and programs +- [Awesome TES](https://github.com/ohsu-comp-bio/awesome-tes): A curated list of awesome GA4GH TES projects and programs diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..35844d9 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Snakemake.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Snakemake.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Snakemake" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Snakemake" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/_static/custom.css b/docs/_static/custom.css new file mode 100644 index 0000000..99e61fe --- /dev/null +++ b/docs/_static/custom.css @@ -0,0 +1,83 @@ +.image-reference img { + display: inline; + background: none !important; +} + +.image-reference { + border-bottom: none !important; +} + +:root { + --color-links: #047857; + --color-brand: #047857; +} + +#content { + overflow-x: auto; +} + +.sticky.top-16 { + overflow: unset; + } + +/* Light theme styles (default) */ +body.dark-theme { + background-color: black !important; +} + +/* Dark theme styles */ +body.light-theme { + background-color: blue !important; +} + +pre.light-theme .kn { color: #2838b0 !important} /* Keyword.Namespace */ +pre.light-theme .kp { color: #2838b0 !important} /* Keyword.Pseudo */ +pre.light-theme .kr { color: #2838b0 !important} /* Keyword.Reserved */ +pre.light-theme .kt { color: #2838b0; font-style: italic !important} /* Keyword.Type */ +pre.light-theme .m { color: #444444 !important} /* Literal.Number */ +pre.light-theme .s { color: #b83838 !important} /* Literal.String */ +pre.light-theme .na { color: #388038 !important} /* Name.Attribute */ +pre.light-theme .nb { color: #388038 !important} /* Name.Builtin */ +pre.light-theme .nc { color: #287088 !important} /* Name.Class */ +pre.light-theme .no { color: #b85820 !important} /* Name.Constant */ +pre.light-theme .nd { color: #287088 !important} /* Name.Decorator */ +pre.light-theme .ni { color: #709030 !important} /* Name.Entity */ +pre.light-theme .ne { color: #908828 !important} /* Name.Exception */ +pre.light-theme .nf { color: #785840 !important} /* Name.Function */ +pre.light-theme .nl { color: #289870 !important} /* Name.Label */ +pre.light-theme .nn { color: #289870 !important} /* Name.Namespace */ +pre.light-theme .nt { color: #2838b0 !important} /* Name.Tag */ +pre.light-theme .nv { color: #b04040 !important} /* Name.Variable */ +pre.light-theme .ow { color: #a848a8 !important} /* Operator.Word */ +pre.light-theme .pm { color: #888888 !important} /* Punctuation.Marker */ +pre.light-theme .w { color: #a89028 !important} /* Text.Whitespace */ +pre.light-theme .mb { color: #444444 !important} /* Literal.Number.Bin */ +pre.light-theme .mf { color: #444444 !important} /* Literal.Number.Float */ +pre.light-theme .mh { color: #444444 !important} /* Literal.Number.Hex */ +pre.light-theme .mi { color: #444444 !important} /* Literal.Number.Integer */ +pre.light-theme .mo { color: #444444 !important} /* Literal.Number.Oct */ +pre.light-theme .sa { color: #444444 !important} /* Literal.String.Affix */ +pre.light-theme .sb { color: #b83838 !important} /* Literal.String.Backtick */ +pre.light-theme .sc { color: #a848a8 !important} /* Literal.String.Char */ +pre.light-theme .dl { color: #b85820 !important} /* Literal.String.Delimiter */ +pre.light-theme .sd { color: #b85820; font-style: italic !important} /* Literal.String.Doc */ +pre.light-theme .s2 { color: #b83838 !important} /* Literal.String.Double */ +pre.light-theme .se { color: #709030 !important} /* Literal.String.Escape */ +pre.light-theme .sh { color: #b83838 !important} /* Literal.String.Heredoc */ +pre.light-theme .si { color: #b83838; text-decoration: underline !important} /* Literal.String.Interpol */ +pre.light-theme .sx { color: #a848a8 !important} /* Literal.String.Other */ +pre.light-theme .sr { color: #a848a8 !important} /* Literal.String.Regex */ +pre.light-theme .s1 { color: #b83838 !important} /* Literal.String.Single */ +pre.light-theme .ss { color: #b83838 !important} /* Literal.String.Symbol */ +pre.light-theme .bp { color: #388038; font-style: italic !important} /* Name.Builtin.Pseudo */ +pre.light-theme .fm { color: #b85820 !important} /* Name.Function.Magic */ +pre.light-theme .vc { color: #b04040 !important} /* Name.Variable.Class */ +pre.light-theme .vg { color: #908828 !important} /* Name.Variable.Global */ +pre.light-theme .vi { color: #b04040 !important} /* Name.Variable.Instance */ +pre.light-theme .vm { color: #b85820 !important} /* Name.Variable.Magic */ +pre.light-theme .il { color: #444444 !important} /* Literal.Number.Integer.Long */ +pre.light-theme .n { color: #444444 !important} /* Misc */ +pre.light-theme .o { color: #444 !important} /* Misc */ +pre.light-theme .p { color: #444 !important} /* Misc */ +pre.light-theme .k { color: #a848a8 !important} /* Misc */ +pre.light-theme .kc { color: #2838b0 !important} /* Misc */ \ No newline at end of file diff --git a/docs/_static/custom.js b/docs/_static/custom.js new file mode 100644 index 0000000..295b9b5 --- /dev/null +++ b/docs/_static/custom.js @@ -0,0 +1,28 @@ +// Select the button using its class, assuming it's the only one with this class +const nav = document.querySelector(".flex.items-center.space-x-1"); +console.log(nav); + +const themeToggleButton = nav.childNodes[3]; +console.log(themeToggleButton); + +mode = localStorage.getItem('darkMode'); +setTheme(mode); + +// Add an event listener to the button +themeToggleButton.addEventListener('click', function() { + mode = mode === 'light' ? 'dark' : 'light' + setTheme(mode); +}); + +function setTheme(mode) { + var pres = document.body.getElementsByTagName("pre"); + for (let pre of pres) { + if (mode === 'dark') { + pre.classList.add('dark-theme'); + pre.classList.remove('light-theme'); + } else { + pre.classList.add('light-theme'); + pre.classList.remove('dark-theme'); + } + } +} diff --git a/docs/_static/logo-ga4gh-dark.png b/docs/_static/logo-ga4gh-dark.png new file mode 100644 index 0000000..a18b470 Binary files /dev/null and b/docs/_static/logo-ga4gh-dark.png differ diff --git a/docs/_static/logo-ga4gh-light.png b/docs/_static/logo-ga4gh-light.png new file mode 100644 index 0000000..6e42686 Binary files /dev/null and b/docs/_static/logo-ga4gh-light.png differ diff --git a/docs/_static/logo-snake.svg b/docs/_static/logo-snake.svg new file mode 100644 index 0000000..de13355 --- /dev/null +++ b/docs/_static/logo-snake.svg @@ -0,0 +1,109 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_static/sphinx-argparse.css b/docs/_static/sphinx-argparse.css new file mode 100644 index 0000000..70ce1ab --- /dev/null +++ b/docs/_static/sphinx-argparse.css @@ -0,0 +1,6 @@ +.wy-table-responsive table td { + white-space: normal !important; +} +.wy-table-responsive { + overflow: visible !important; +} diff --git a/docs/_templates/page.html b/docs/_templates/page.html new file mode 100644 index 0000000..453f385 --- /dev/null +++ b/docs/_templates/page.html @@ -0,0 +1,10 @@ +{% extends "!page.html" %} + +{{ super() }} + +{% block extrahead %} + +{% endblock %} diff --git a/docs/api/tes.rst b/docs/api/tes.rst new file mode 100644 index 0000000..77c2a26 --- /dev/null +++ b/docs/api/tes.rst @@ -0,0 +1,34 @@ +py-tes +=========== + +.. toctree:: + :caption: API Docs + :name: api_docs + :hidden: + :maxdepth: 1 + + api_docs/tes + +tes.client module +----------------- + +.. automodule:: tes.client + :members: + :undoc-members: + :show-inheritance: + +tes.models module +----------------- + +.. automodule:: tes.models + :members: + :undoc-members: + :show-inheritance: + +tes.utils module +---------------- + +.. automodule:: tes.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api_docs/tes.rst b/docs/api_docs/tes.rst new file mode 100644 index 0000000..77c2a26 --- /dev/null +++ b/docs/api_docs/tes.rst @@ -0,0 +1,34 @@ +py-tes +=========== + +.. toctree:: + :caption: API Docs + :name: api_docs + :hidden: + :maxdepth: 1 + + api_docs/tes + +tes.client module +----------------- + +.. automodule:: tes.client + :members: + :undoc-members: + :show-inheritance: + +tes.models module +----------------- + +.. automodule:: tes.models + :members: + :undoc-members: + :show-inheritance: + +tes.utils module +---------------- + +.. automodule:: tes.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..8842ce3 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,371 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Snakemake documentation build configuration file, created by +# sphinx-quickstart on Sat Feb 1 16:01:02 2014. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +from sphinxawesome_theme.postprocess import Icons +import tes + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("../")) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.mathjax", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", + "sphinxarg.ext", + "sphinx.ext.autosectionlabel", + "myst_parser", + "sphinxawesome_theme.highlighting", +] + +html_css_files = ["custom.css"] +html_js_files = ["custom.js"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "py-tes" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = tes.__version__ + +if os.environ.get("READTHEDOCS") == "True": + # Because Read The Docs modifies conf.py, versioneer gives a "dirty" + # version like "5.10.0+0.g28674b1.dirty" that is cleaned here. + version = version.partition("+0.g")[0] + +# The full version, including alpha/beta/rc tags. +release = version + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build", "apidocs"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "material" +pygments_dark_style = "material" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "sphinxawesome_theme" +html_theme_options = { + "show_breadcrumbs": True, + "logo_light": "logo-ga4gh-light.png", + "logo_dark": "logo-ga4gh-dark.png", + "main_nav_links": { + "About": "https://github.com/ohsu-comp-bio/py-tes", + "Docs": "https://github.com/ohsu-comp-bio/py-tes", + "GitHub": "https://github.com/ohsu-comp-bio/py-tes", + }, + "awesome_external_links": True, + "awesome_headerlinks": True, + "show_prev_next": False, + "extra_header_link_icons": { + "repository on GitHub": { + "link": "https://github.com/ohsu-comp-bio/py-tes", + "icon": ( + '' + '' + ), + }, + }, +} +html_permalinks_icon = Icons.permalinks_icon +# html_theme_options = { +# "show_nav_level": 2, +# "header_links_before_dropdown": 0, +# "external_links": [ +# { +# "text": "Snakemake plugin catalog", +# "alt": "Snakemake plugin catalog", +# "href": "https://snakemake.github.io/snakemake-plugin-catalog", +# }, +# { +# "text": "Snakemake workflow catalog", +# "alt": "Snakemake workflow catalog", +# "href": "https://snakemake.github.io/snakemake-workflow-catalog", +# }, +# { +# "text": "Snakemake wrappers", +# "alt": "Snakemake wrappers", +# "href": "https://snakemake-wrappers.readthedocs.io", +# }, +# ], +# } +# html_theme_options = { +# "primary_color": "emerald", +# "secondary_color": "emerald", +# "dark_logo": "logo-snake.svg", +# "light_logo": "logo-snake.svg", +# "navigation_style": "plain", +# "sidebar_links": [ +# { +# "text": "Snakemake plugin catalog", +# "alt": "Snakemake plugin catalog", +# "href": "https://snakemake.github.io/snakemake-plugin-catalog", +# }, +# { +# "text": "Snakemake workflow catalog", +# "alt": "Snakemake workflow catalog", +# "href": "https://snakemake.github.io/snakemake-workflow-catalog", +# }, +# { +# "text": "Snakemake wrappers", +# "alt": "Snakemake wrappers", +# "href": "https://snakemake-wrappers.readthedocs.io", +# }, +# ], +# } + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = ["_static/css"] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {"index": "index.html"} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +# htmlhelp_basename = "Snakemakedoc" + + +# -- Options for LaTeX output --------------------------------------------- + +# latex_elements = { +# # The paper size ('letterpaper' or 'a4paper'). +# #'papersize': 'letterpaper', +# # The font size ('10pt', '11pt' or '12pt'). +# #'pointsize': '10pt', +# # Additional stuff for the LaTeX preamble. +# #'preamble': '', +# } + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +# latex_documents = [ +# ("index", "Snakemake.tex", "Snakemake Documentation", "Johannes Koester", "manual"), +# ] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +# man_pages = [("index", "snakemake", "Snakemake Documentation", ["Johannes Koester"], 1)] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +# texinfo_documents = [ +# ( +# "index", +# "Snakemake", +# "Snakemake Documentation", +# "Johannes Koester", +# "Snakemake", +# "One line description of project.", +# "Miscellaneous", +# ), +# ] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +def setup(app): + app.add_css_file("sphinx-argparse.css") diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..db2e882 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,77 @@ +.. automodule:: tes + +.. _manual-main: + +====== +py-tes +====== + +.. image:: https://travis-ci.org/ohsu-comp-bio/py-tes.svg?branch=master + :target: https://travis-ci.org/ohsu-comp-bio/py-tes +.. image:: https://coveralls.io/repos/github/ohsu-comp-bio/py-tes/badge.svg?branch=master + :target: https://coveralls.io/github/ohsu-comp-bio/py-tes?branch=master +.. image:: https://img.shields.io/badge/License-MIT-yellow.svg + :target: https://opensource.org/licenses/MIT + +*py-tes* is a library for interacting with servers implementing the +`GA4GH Task Execution +Schema `__. + +Install +~~~~~~~ + +Available on `PyPI `__. + +:: + + pip install py-tes + +Example +~~~~~~~ + +:: + + import tes + + task = tes.Task( + executors=[ + tes.Executor( + image="alpine", + command=["echo", "hello"] + ) + ] + ) + + cli = tes.HTTPClient("http://funnel.example.com", timeout=5) + task_id = cli.create_task(task) + res = cli.get_task(task_id) + cli.cancel_task(task_id) + + +.. _main-support: + +------- +Support +------- + +* For releases, see :ref:`Changelog `. +* Check :ref:`frequently asked questions (FAQ) `. +* For **bugs and feature requests**, please use the `issue tracker `_. +* For **contributions**, visit py-tes on `Github `_ and read the :ref:`guidelines `. + +.. _main-resources: + +--------- +Resources +--------- + +`Snakemake Wrappers Repository `_ + The Snakemake Wrapper Repository is a collection of reusable wrappers that allow to quickly use popular tools from Snakemake rules and workflows. + +.. toctree:: + :caption: API + :name: api_docs + :hidden: + :maxdepth: 1 + + api_docs/tes diff --git a/docs/requirements.txt b/docs/requirements.txt index 1833352..639402e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,12 @@ -lazydocs>=0.4.8 -mkdocs>=1.4.2 -mkdocs-awesome-pages-plugin>=2.8.0 -mkdocs-material>=9.0.12 -pydocstyle>=6.3.0 +sphinx >=3 +sphinxcontrib-napoleon +sphinx-argparse +docutils +myst-parser +configargparse +appdirs +immutables +sphinxawesome-theme +snakemake-interface-common +snakemake-interface-executor-plugins +snakemake-interface-storage-plugins \ No newline at end of file diff --git a/examples/example.py b/examples/example.py new file mode 100644 index 0000000..47e2b04 --- /dev/null +++ b/examples/example.py @@ -0,0 +1,26 @@ +import tes +import json + +# Define task +task = tes.Task( + executors=[ + tes.Executor( + image="alpine", + command=["echo", "hello"] + ) + ] +) + +# Create client +cli = tes.HTTPClient("http://localhost:8000", timeout=5) + +# Create and run task +task_id = cli.create_task(task) +cli.wait(task_id, timeout=5) + +# Fetch task info +task_info = cli.get_task(task_id, view="BASIC") +j = json.loads(task_info.as_json()) + +# Pretty print task info +print(json.dumps(j, indent=2)) diff --git a/examples/v1_0_0.ipynb b/examples/v1_0_0.ipynb new file mode 100644 index 0000000..5b8f196 --- /dev/null +++ b/examples/v1_0_0.ipynb @@ -0,0 +1,137 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "collapsed": true, + "id": "LmPwTqX3JR5p", + "outputId": "08655348-909d-4be5-9473-195631aed7dd" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting py-tes==1.1.0rc3\n", + " Downloading py_tes-1.1.0rc3-py3-none-any.whl.metadata (6.5 kB)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (24.2.0)\n", + "Requirement already satisfied: future>=0.16.0 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (1.0.0)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (2.8.2)\n", + "Requirement already satisfied: requests>=2.18.2 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (2.32.3)\n", + "Collecting sphinx_rtd_theme (from py-tes==1.1.0rc3)\n", + " Downloading sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl.metadata (4.4 kB)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.6.1->py-tes==1.1.0rc3) (1.16.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (3.4.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (2024.8.30)\n", + "Requirement already satisfied: sphinx<9,>=6 in /usr/local/lib/python3.10/dist-packages (from sphinx_rtd_theme->py-tes==1.1.0rc3) (8.1.3)\n", + "Requirement already satisfied: docutils<0.22,>0.18 in /usr/local/lib/python3.10/dist-packages (from sphinx_rtd_theme->py-tes==1.1.0rc3) (0.21.2)\n", + "Collecting sphinxcontrib-jquery<5,>=4 (from sphinx_rtd_theme->py-tes==1.1.0rc3)\n", + " Downloading sphinxcontrib_jquery-4.1-py2.py3-none-any.whl.metadata (2.6 kB)\n", + "Requirement already satisfied: sphinxcontrib-applehelp>=1.0.7 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-devhelp>=1.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-htmlhelp>=2.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.1.0)\n", + "Requirement already satisfied: sphinxcontrib-jsmath>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.0.1)\n", + "Requirement already satisfied: sphinxcontrib-qthelp>=1.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-serializinghtml>=1.1.9 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: Jinja2>=3.1 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (3.1.4)\n", + "Requirement already satisfied: Pygments>=2.17 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.18.0)\n", + "Requirement already satisfied: snowballstemmer>=2.2 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.2.0)\n", + "Requirement already satisfied: babel>=2.13 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.16.0)\n", + "Requirement already satisfied: alabaster>=0.7.14 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.0.0)\n", + "Requirement already satisfied: imagesize>=1.3 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.4.1)\n", + "Requirement already satisfied: packaging>=23.0 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (24.2)\n", + "Requirement already satisfied: tomli>=2 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.2.1)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from Jinja2>=3.1->sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (3.0.2)\n", + "Downloading py_tes-1.1.0rc3-py3-none-any.whl (12 kB)\n", + "Downloading sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl (7.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.7/7.7 MB\u001b[0m \u001b[31m37.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading sphinxcontrib_jquery-4.1-py2.py3-none-any.whl (121 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.1/121.1 kB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hInstalling collected packages: sphinxcontrib-jquery, sphinx_rtd_theme, py-tes\n", + "Successfully installed py-tes-1.1.0rc3 sphinx_rtd_theme-3.0.2 sphinxcontrib-jquery-4.1\n" + ] + } + ], + "source": [ + "!pip install py-tes==1.0.0" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 373 + }, + "id": "nhMb-oGFKTms", + "outputId": "d4a059f6-0384-4051-eabb-b1d628cb3499" + }, + "outputs": [ + { + "ename": "HTTPError", + "evalue": "404 Client Error: Not Found for url: https://development.aced-idp.org/funnel/tasks", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mHTTPError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;31m# Create and run task\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m \u001b[0mtask_id\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcli\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate_task\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtask\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 19\u001b[0m \u001b[0mcli\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtask_id\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/tes/client.py\u001b[0m in \u001b[0;36mcreate_task\u001b[0;34m(self, task)\u001b[0m\n\u001b[1;32m 168\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mDict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_request_params\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmsg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[0mpaths\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mappend_suffixes_to_url\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0murls\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"/tasks\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 170\u001b[0;31m response = send_request(paths=paths, method='post',\n\u001b[0m\u001b[1;32m 171\u001b[0m kwargs_requests=kwargs)\n\u001b[1;32m 172\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0munmarshal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresponse\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjson\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCreateTaskResponse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mid\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/tes/client.py\u001b[0m in \u001b[0;36msend_request\u001b[0;34m(paths, method, kwargs_requests, **kwargs)\u001b[0m\n\u001b[1;32m 90\u001b[0m raise requests.exceptions.HTTPError(\n\u001b[1;32m 91\u001b[0m f\"No response received; HTTP Exceptions: {http_exceptions}\")\n\u001b[0;32m---> 92\u001b[0;31m \u001b[0mresponse\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraise_for_status\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 93\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresponse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 94\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/requests/models.py\u001b[0m in \u001b[0;36mraise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1022\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1023\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhttp_error_msg\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1024\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mHTTPError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhttp_error_msg\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresponse\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1025\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1026\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mHTTPError\u001b[0m: 404 Client Error: Not Found for url: https://development.aced-idp.org/funnel/tasks" + ] + } + ], + "source": [ + "import tes\n", + "import json\n", + "\n", + "# Define task\n", + "task = tes.Task(\n", + " executors=[\n", + " tes.Executor(\n", + " image=\"alpine\",\n", + " command=[\"echo\", \"hello\"]\n", + " )\n", + " ]\n", + ")\n", + "\n", + "# Create client\n", + "# TODO: Replace https://tes.example.com with live TES API site\n", + "cli = tes.HTTPClient(\"https://tes.example.com\", timeout=5)\n", + "\n", + "# Create and run task\n", + "task_id = cli.create_task(task)\n", + "cli.wait(task_id, timeout=5)\n", + "\n", + "# Fetch task info\n", + "task_info = cli.get_task(task_id, view=\"BASIC\")\n", + "j = json.loads(task_info.as_json())\n", + "\n", + "# Pretty print task info\n", + "print(json.dumps(j, indent=2))" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/v1_1_0.ipynb b/examples/v1_1_0.ipynb new file mode 100644 index 0000000..95b2cdd --- /dev/null +++ b/examples/v1_1_0.ipynb @@ -0,0 +1,137 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "LmPwTqX3JR5p", + "outputId": "08655348-909d-4be5-9473-195631aed7dd", + "collapsed": true + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting py-tes==1.1.0rc3\n", + " Downloading py_tes-1.1.0rc3-py3-none-any.whl.metadata (6.5 kB)\n", + "Requirement already satisfied: attrs>=17.4.0 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (24.2.0)\n", + "Requirement already satisfied: future>=0.16.0 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (1.0.0)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (2.8.2)\n", + "Requirement already satisfied: requests>=2.18.2 in /usr/local/lib/python3.10/dist-packages (from py-tes==1.1.0rc3) (2.32.3)\n", + "Collecting sphinx_rtd_theme (from py-tes==1.1.0rc3)\n", + " Downloading sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl.metadata (4.4 kB)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.6.1->py-tes==1.1.0rc3) (1.16.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (3.4.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.18.2->py-tes==1.1.0rc3) (2024.8.30)\n", + "Requirement already satisfied: sphinx<9,>=6 in /usr/local/lib/python3.10/dist-packages (from sphinx_rtd_theme->py-tes==1.1.0rc3) (8.1.3)\n", + "Requirement already satisfied: docutils<0.22,>0.18 in /usr/local/lib/python3.10/dist-packages (from sphinx_rtd_theme->py-tes==1.1.0rc3) (0.21.2)\n", + "Collecting sphinxcontrib-jquery<5,>=4 (from sphinx_rtd_theme->py-tes==1.1.0rc3)\n", + " Downloading sphinxcontrib_jquery-4.1-py2.py3-none-any.whl.metadata (2.6 kB)\n", + "Requirement already satisfied: sphinxcontrib-applehelp>=1.0.7 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-devhelp>=1.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-htmlhelp>=2.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.1.0)\n", + "Requirement already satisfied: sphinxcontrib-jsmath>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.0.1)\n", + "Requirement already satisfied: sphinxcontrib-qthelp>=1.0.6 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: sphinxcontrib-serializinghtml>=1.1.9 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.0.0)\n", + "Requirement already satisfied: Jinja2>=3.1 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (3.1.4)\n", + "Requirement already satisfied: Pygments>=2.17 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.18.0)\n", + "Requirement already satisfied: snowballstemmer>=2.2 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.2.0)\n", + "Requirement already satisfied: babel>=2.13 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.16.0)\n", + "Requirement already satisfied: alabaster>=0.7.14 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.0.0)\n", + "Requirement already satisfied: imagesize>=1.3 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (1.4.1)\n", + "Requirement already satisfied: packaging>=23.0 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (24.2)\n", + "Requirement already satisfied: tomli>=2 in /usr/local/lib/python3.10/dist-packages (from sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (2.2.1)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from Jinja2>=3.1->sphinx<9,>=6->sphinx_rtd_theme->py-tes==1.1.0rc3) (3.0.2)\n", + "Downloading py_tes-1.1.0rc3-py3-none-any.whl (12 kB)\n", + "Downloading sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl (7.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.7/7.7 MB\u001b[0m \u001b[31m37.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading sphinxcontrib_jquery-4.1-py2.py3-none-any.whl (121 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.1/121.1 kB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hInstalling collected packages: sphinxcontrib-jquery, sphinx_rtd_theme, py-tes\n", + "Successfully installed py-tes-1.1.0rc3 sphinx_rtd_theme-3.0.2 sphinxcontrib-jquery-4.1\n" + ] + } + ], + "source": [ + "!pip install py-tes==1.1.0" + ] + }, + { + "cell_type": "code", + "source": [ + "import tes\n", + "import json\n", + "\n", + "# Define task\n", + "task = tes.Task(\n", + " executors=[\n", + " tes.Executor(\n", + " image=\"alpine\",\n", + " command=[\"echo\", \"hello\"]\n", + " )\n", + " ]\n", + ")\n", + "\n", + "# Create client\n", + "# TODO: Replace https://tes.example.com with live TES API site\n", + "cli = tes.HTTPClient(\"https://tes.example.com\", timeout=5)\n", + "\n", + "# Create and run task\n", + "task_id = cli.create_task(task)\n", + "cli.wait(task_id, timeout=5)\n", + "\n", + "# Fetch task info\n", + "task_info = cli.get_task(task_id, view=\"BASIC\")\n", + "j = json.loads(task_info.as_json())\n", + "\n", + "# Pretty print task info\n", + "print(json.dumps(j, indent=2))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 373 + }, + "id": "nhMb-oGFKTms", + "outputId": "d4a059f6-0384-4051-eabb-b1d628cb3499" + }, + "execution_count": 26, + "outputs": [ + { + "output_type": "error", + "ename": "HTTPError", + "evalue": "404 Client Error: Not Found for url: https://development.aced-idp.org/funnel/tasks", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mHTTPError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;31m# Create and run task\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m \u001b[0mtask_id\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcli\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate_task\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtask\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 19\u001b[0m \u001b[0mcli\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtask_id\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/tes/client.py\u001b[0m in \u001b[0;36mcreate_task\u001b[0;34m(self, task)\u001b[0m\n\u001b[1;32m 168\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mDict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_request_params\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmsg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[0mpaths\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mappend_suffixes_to_url\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0murls\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"/tasks\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 170\u001b[0;31m response = send_request(paths=paths, method='post',\n\u001b[0m\u001b[1;32m 171\u001b[0m kwargs_requests=kwargs)\n\u001b[1;32m 172\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0munmarshal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresponse\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjson\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mCreateTaskResponse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mid\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/tes/client.py\u001b[0m in \u001b[0;36msend_request\u001b[0;34m(paths, method, kwargs_requests, **kwargs)\u001b[0m\n\u001b[1;32m 90\u001b[0m raise requests.exceptions.HTTPError(\n\u001b[1;32m 91\u001b[0m f\"No response received; HTTP Exceptions: {http_exceptions}\")\n\u001b[0;32m---> 92\u001b[0;31m \u001b[0mresponse\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraise_for_status\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 93\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresponse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 94\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/requests/models.py\u001b[0m in \u001b[0;36mraise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1022\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1023\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhttp_error_msg\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1024\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mHTTPError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhttp_error_msg\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresponse\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1025\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1026\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mHTTPError\u001b[0m: 404 Client Error: Not Found for url: https://development.aced-idp.org/funnel/tasks" + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 907fd76..c815130 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ -attrs>=22.2.0 -python-dateutil>=2.8.2 -requests>=2.28.2 +attrs>=17.4.0 +future>=0.16.0 +python-dateutil>=2.6.1 +requests>=2.18.2 +sphinx_rtd_theme diff --git a/setup.py b/setup.py index 3b0898b..ee1af33 100644 --- a/setup.py +++ b/setup.py @@ -39,6 +39,7 @@ def find_version(*file_paths): packages=find_packages(exclude=["tests*"]), python_requires=">=3.7, <4", install_requires=read("requirements.txt").splitlines(), + tests_require=read("tests/requirements.txt").splitlines(), zip_safe=True, classifiers=[ "Development Status :: 4 - Beta", @@ -49,8 +50,8 @@ def find_version(*file_paths): "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9" - "Programming Language :: Python :: 3.10" + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11" ], ) diff --git a/tes/__init__.py b/tes/__init__.py index 28efe4e..4583576 100644 --- a/tes/__init__.py +++ b/tes/__init__.py @@ -36,4 +36,4 @@ "ServiceInfo" ] -__version__ = "1.1.0-rc.1" +__version__ = "1.1.0" diff --git a/tes/models.py b/tes/models.py index e5cdf6e..873389f 100644 --- a/tes/models.py +++ b/tes/models.py @@ -74,7 +74,7 @@ def strconv(value: Any) -> Any: """ if isinstance(value, (tuple, list)): if all([isinstance(n, str) for n in value]): - return [str(n) for n in value] + return type(value)(str(n) for n in value) else: return value elif isinstance(value, str): @@ -110,6 +110,8 @@ def timestampconv(value: Optional[str]) -> Optional[datetime]: """ if value is None: return value + if isinstance(value, datetime): + return value return dateutil.parser.parse(value) diff --git a/tes/utils.py b/tes/utils.py index 1f0f4f4..5031589 100644 --- a/tes/utils.py +++ b/tes/utils.py @@ -56,7 +56,7 @@ def unmarshal(j: Any, o: Type, convert_camel_case=True) -> Any: try: m = json.loads(j) except json.decoder.JSONDecodeError: - pass + raise UnmarshalError("Unable to decode JSON string: %s" % j) elif j is None: return None else: diff --git a/tests/integration/test_funnel.py b/tests/integration/test_funnel.py index b56be4c..aa7631f 100644 --- a/tests/integration/test_funnel.py +++ b/tests/integration/test_funnel.py @@ -1,35 +1,55 @@ -import unittest +import pytest import tes -class TestTESClient(unittest.TestCase): - def setUp(self): - self.cli = tes.HTTPClient("http://localhost:8000", timeout=5) - self.task = tes.Task( - executors=[ - tes.Executor( - image="alpine", - command=["echo", "hello"] - ) - ] - ) +@pytest.fixture +def tes_client(): + return tes.HTTPClient("http://localhost:8000", timeout=5) - def test_task_creation(self): - # Test service info retrieval - service_info = self.cli.get_service_info() - self.assertIsNotNone(service_info) - # Test task creation - task_id = self.cli.create_task(self.task) - self.assertIsNotNone(task_id) +@pytest.fixture +def task(): + return tes.Task(executors=[tes.Executor(image="alpine", command=["echo", "hello"])]) - # Wait for task to complete - _ = self.cli.wait(task_id) - # Test task info retrieval - task_info = self.cli.get_task(task_id, view="BASIC") - self.assertIsNotNone(task_info) +def test_service_info(tes_client): + service_info = tes_client.get_service_info() + assert service_info is not None -if __name__ == '__main__': - unittest.main() +def test_task_creation(tes_client, task): + task_id = tes_client.create_task(task) + assert task_id is not None + + _ = tes_client.wait(task_id) + + task_info = tes_client.get_task(task_id, view="BASIC") + assert task_info is not None + + +def test_task_status(tes_client, task): + task_id = tes_client.create_task(task) + assert task_id is not None + + status = tes_client.get_task(task_id, view="MINIMAL").state + assert status in [ + "QUEUED", + "INITIALIZING", + "RUNNING", + "COMPLETE", + "CANCELED", + "EXECUTOR_ERROR", + "SYSTEM_ERROR", + "UNKNOWN", + ] + + +def test_task_logs(tes_client, task): + task_id = tes_client.create_task(task) + assert task_id is not None + + _ = tes_client.wait(task_id) + + logs = tes_client.get_task(task_id, view="FULL").logs + assert logs is not None + assert len(logs) > 0 diff --git a/tests/test_client.py b/tests/test_client.py index 45cd9dc..a6a22b7 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,7 +1,6 @@ import pytest import requests import requests_mock -import unittest import uuid from tes.client import append_suffixes_to_url, HTTPClient, send_request @@ -9,346 +8,309 @@ from tes.utils import TimeoutError -class TestHTTPClient(unittest.TestCase): - task = Task( - executors=[ - Executor( - image="alpine", - command=["echo", "hello"] - ) - ] - ) - mock_id = str(uuid.uuid4()) - mock_url = "http://fakehost:8000" - cli = HTTPClient(mock_url, timeout=5) - - def test_cli(self): - cli = HTTPClient(url="http://fakehost:8000//", timeout=5) - self.assertEqual(cli.url, "http://fakehost:8000") - self.assertEqual(cli.urls, [ - "http://fakehost:8000/ga4gh/tes/v1", - "http://fakehost:8000/v1", - "http://fakehost:8000"] - ) - self.assertEqual(cli.timeout, 5) +@pytest.fixture +def task(): + return Task(executors=[Executor(image="alpine", command=["echo", "hello"])]) - with self.assertRaises(TypeError): - cli = HTTPClient(url=8000, timeout=5) # type: ignore - with self.assertRaises(TypeError): - HTTPClient(url="http://fakehost:8000", timeout="5") # type: ignore +@pytest.fixture +def mock_id(): + return str(uuid.uuid4()) - with self.assertRaises(ValueError): - HTTPClient(url="fakehost:8000", timeout=5) - with self.assertRaises(ValueError): - HTTPClient(url="htpp://fakehost:8000", timeout=5) # type: ignore +@pytest.fixture +def mock_url(): + return "http://fakehost:8000" - def test_create_task(self): - with requests_mock.Mocker() as m: - m.post( - "%s/ga4gh/tes/v1/tasks" % (self.mock_url), - status_code=200, - json={"id": self.mock_id} - ) - self.cli.create_task(self.task) - self.assertEqual(m.last_request.text, self.task.as_json()) - self.assertAlmostEqual(m.last_request.timeout, self.cli.timeout) - m.post( - "%s/ga4gh/tes/v1/tasks" % (self.mock_url), - status_code=500 - ) - with self.assertRaises(requests.HTTPError): - self.cli.create_task(self.task) +@pytest.fixture +def cli(mock_url): + return HTTPClient(mock_url, timeout=5) - with self.assertRaises(TypeError): - self.cli.create_task('not_a_task_object') # type: ignore - def test_get_task(self): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks/%s" % (self.mock_url, self.mock_id), - status_code=200, - json={ - "id": self.mock_id, - "state": "RUNNING", - } - ) - self.cli.get_task(self.mock_id, "MINIMAL") - self.assertEqual( - m.last_request.url, - "%s/ga4gh/tes/v1/tasks/%s?view=MINIMAL" % ( - self.mock_url, self.mock_id - ) - ) - self.assertAlmostEqual(m.last_request.timeout, self.cli.timeout) +def test_cli(): + cli = HTTPClient(url="http://fakehost:8000//", timeout=5) + assert cli.url == "http://fakehost:8000" + assert cli.urls == [ + "http://fakehost:8000/ga4gh/tes/v1", + "http://fakehost:8000/v1", + "http://fakehost:8000", + ] + assert cli.timeout == 5 - m.get( - requests_mock.ANY, - status_code=404 - ) - with self.assertRaises(requests.HTTPError): - self.cli.get_task(self.mock_id) + with pytest.raises(TypeError): + HTTPClient(url=8000, timeout=5) # type: ignore - def test_list_tasks(self): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks" % (self.mock_url), - status_code=200, - json={ - "tasks": [] - } - ) - self.cli.list_tasks() - self.assertEqual( - m.last_request.url, - "%s/ga4gh/tes/v1/tasks?view=MINIMAL" % (self.mock_url) - ) - self.assertAlmostEqual(m.last_request.timeout, self.cli.timeout) + with pytest.raises(TypeError): + HTTPClient(url="http://fakehost:8000", timeout="5") # type: ignore - # empty response - m.get( - "%s/ga4gh/tes/v1/tasks" % (self.mock_url), - status_code=200, - json={} - ) - self.cli.list_tasks() - self.assertEqual( - m.last_request.url, - "%s/ga4gh/tes/v1/tasks?view=MINIMAL" % (self.mock_url) - ) + with pytest.raises(ValueError): + HTTPClient(url="fakehost:8000", timeout=5) - m.get( - "%s/ga4gh/tes/v1/tasks" % (self.mock_url), - status_code=500 - ) - with self.assertRaises(requests.HTTPError): - self.cli.list_tasks() + with pytest.raises(ValueError): + HTTPClient(url="htpp://fakehost:8000", timeout=5) # type: ignore - def test_cancel_task(self): - with requests_mock.Mocker() as m: - m.post( - "%s/ga4gh/tes/v1/tasks/%s:cancel" % ( - self.mock_url, self.mock_id), - status_code=200, - json={} - ) - self.cli.cancel_task(self.mock_id) - self.assertEqual( - m.last_request.url, - "%s/ga4gh/tes/v1/tasks/%s:cancel" % ( - self.mock_url, self.mock_id) - ) - self.assertAlmostEqual(m.last_request.timeout, self.cli.timeout) - m.post( - "%s/ga4gh/tes/v1/tasks/%s:cancel" % ( - self.mock_url, self.mock_id), - status_code=500 - ) - with self.assertRaises(requests.HTTPError): - self.cli.cancel_task(self.mock_id) +def test_create_task(cli, task, mock_id, mock_url): + with requests_mock.Mocker() as m: + m.post(f"{mock_url}/ga4gh/tes/v1/tasks", status_code=200, json={"id": mock_id}) + cli.create_task(task) + assert m.last_request.text == task.as_json() + assert m.last_request.timeout == cli.timeout - m.post( - requests_mock.ANY, - status_code=404, - json={} - ) - with self.assertRaises(requests.HTTPError): - self.cli.cancel_task(self.mock_id) + m.post(f"{mock_url}/ga4gh/tes/v1/tasks", status_code=500) + with pytest.raises(requests.HTTPError): + cli.create_task(task) - def test_get_service_info(self): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/service-info" % (self.mock_url), - status_code=200, - json={} - ) - self.cli.get_service_info() - self.assertEqual( - m.last_request.url, - "%s/ga4gh/tes/v1/service-info" % (self.mock_url) - ) - self.assertAlmostEqual(m.last_request.timeout, self.cli.timeout) + with pytest.raises(TypeError): + cli.create_task("not_a_task_object") # type: ignore - m.get( - "%s/ga4gh/tes/v1/service-info" % (self.mock_url), - status_code=500 - ) - with self.assertRaises(requests.HTTPError): - self.cli.get_service_info() - - def test_wait(self): - with self.assertRaises(TimeoutError): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks/%s" % (self.mock_url, self.mock_id), - status_code=200, - json={ - "id": self.mock_id, - "state": "RUNNING", - } - ) - self.cli.wait(self.mock_id, timeout=1) - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks/%s" % (self.mock_url, self.mock_id), - [ - {"status_code": 200, - "json": {"id": self.mock_id, "state": "INITIALIZING"}}, - {"status_code": 200, - "json": {"id": self.mock_id, "state": "RUNNING"}}, - {"status_code": 200, - "json": {"id": self.mock_id, "state": "COMPLETE"}} - ] - ) - self.cli.wait(self.mock_id, timeout=2) +def test_get_task(cli, mock_id, mock_url): + with requests_mock.Mocker() as m: + m.get( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}", + status_code=200, + json={ + "id": mock_id, + "state": "RUNNING", + }, + ) + cli.get_task(mock_id, "MINIMAL") + assert ( + m.last_request.url + == f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}?view=MINIMAL" + ) + assert m.last_request.timeout == cli.timeout - def test_wait_exception(self): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks/%s" % (self.mock_url, self.mock_id), - status_code=200, - json={ - "Error": "Error", - } - ) - with self.assertRaises(Exception): - self.cli.wait(self.mock_id, timeout=2) + m.get(requests_mock.ANY, status_code=404) + with pytest.raises(requests.HTTPError): + cli.get_task(mock_id) - def test_wait_no_state_change(self): - with requests_mock.Mocker() as m: - m.get( - "%s/ga4gh/tes/v1/tasks/%s" % (self.mock_url, self.mock_id), - [ - {"status_code": 200, "json": {"id": self.mock_id, "state": "RUNNING"}}, - {"status_code": 200, "json": {"id": self.mock_id, "state": "RUNNING"}}, - # Continues to return RUNNING state - ] - ) - with self.assertRaises(TimeoutError): - self.cli.wait(self.mock_id, timeout=2) - - def test_request_params(self): - - cli = HTTPClient(url="http://fakehost:8000", timeout=5) - vals = cli._request_params() - self.assertAlmostEqual(vals["timeout"], 5) - self.assertEqual(vals["headers"]["Content-type"], "application/json") - self.assertRaises(KeyError, lambda: vals["headers"]["Authorization"]) - self.assertRaises(KeyError, lambda: vals["auth"]) - self.assertRaises(KeyError, lambda: vals["data"]) - self.assertRaises(KeyError, lambda: vals["params"]) - - cli = HTTPClient(url="http://fakehost:8000", user="user", - password="password", token="token") - vals = cli._request_params(data='{"json": "string"}', - params={"query_param": "value"}) - self.assertAlmostEqual(vals["timeout"], 10) - self.assertEqual(vals["headers"]["Content-type"], "application/json") - self.assertEqual(vals["headers"]["Authorization"], "Bearer token") - self.assertEqual(vals["auth"], ("user", "password")) - self.assertEqual(vals["data"], '{"json": "string"}') - self.assertEqual(vals["params"], {"query_param": "value"}) - - def test_append_suffixes_to_url(self): - urls = ["http://example.com", "http://example.com/"] - urls_order = ["http://example1.com", "http://example2.com"] - suffixes = ["foo", "/foo", "foo/", "/foo/"] - no_suffixes = ["", "/", "//", "///"] - suffixes_order = ["1", "2"] - - results = append_suffixes_to_url(urls=urls, suffixes=suffixes) - assert len(results) == len(urls) * len(suffixes) - assert all(url == 'http://example.com/foo' for url in results) - - results = append_suffixes_to_url(urls=urls, suffixes=no_suffixes) - assert len(results) == len(urls) * len(no_suffixes) - assert all(url == 'http://example.com' for url in results) - - results = append_suffixes_to_url(urls=urls_order, suffixes=suffixes_order) - assert len(results) == len(urls_order) * len(suffixes_order) - assert results[0] == 'http://example1.com/1' - assert results[1] == 'http://example1.com/2' - assert results[2] == 'http://example2.com/1' - assert results[3] == 'http://example2.com/2' - - def test_send_request(self): - mock_url = "http://example.com" - mock_id = "mock_id" - mock_urls = append_suffixes_to_url([mock_url], ["/suffix", "/"]) - - # invalid method - with pytest.raises(ValueError): - send_request(paths=mock_urls, method="invalid") - - # errors for all paths - with requests_mock.Mocker() as m: - m.get(requests_mock.ANY, exc=requests.exceptions.ConnectTimeout) - with pytest.raises(requests.HTTPError): - send_request(paths=mock_urls) - # error on first path, 200 on second - with requests_mock.Mocker() as m: - m.get(mock_urls[0], exc=requests.exceptions.ConnectTimeout) - m.get(mock_urls[1], status_code=200) - response = send_request(paths=mock_urls) - assert response.status_code == 200 - assert m.last_request.url.rstrip('/') == f"{mock_url}" +def test_list_tasks(cli, mock_url): + with requests_mock.Mocker() as m: + m.get(f"{mock_url}/ga4gh/tes/v1/tasks", status_code=200, json={"tasks": []}) + cli.list_tasks() + assert m.last_request.url == f"{mock_url}/ga4gh/tes/v1/tasks?view=MINIMAL" + assert m.last_request.timeout == cli.timeout - # error on first path, 404 on second - with requests_mock.Mocker() as m: - m.get(mock_urls[0], exc=requests.exceptions.ConnectTimeout) - m.get(mock_urls[1], status_code=404) - with pytest.raises(requests.HTTPError): - send_request(paths=mock_urls) + # empty response + m.get(f"{mock_url}/ga4gh/tes/v1/tasks", status_code=200, json={}) + cli.list_tasks() + assert m.last_request.url == f"{mock_url}/ga4gh/tes/v1/tasks?view=MINIMAL" - # 404 on first path, error on second - with requests_mock.Mocker() as m: - m.get(mock_urls[0], status_code=404) - m.get(mock_urls[1], exc=requests.exceptions.ConnectTimeout) - with pytest.raises(requests.HTTPError): - send_request(paths=mock_urls) + m.get(f"{mock_url}/ga4gh/tes/v1/tasks", status_code=500) + with pytest.raises(requests.HTTPError): + cli.list_tasks() - # 404 on first path, 200 on second - with requests_mock.Mocker() as m: - m.get(mock_urls[0], status_code=404) - m.get(mock_urls[1], status_code=200) - response = send_request(paths=mock_urls) - assert response.status_code == 200 - assert m.last_request.url.rstrip('/') == f"{mock_url}" - # POST 200 - with requests_mock.Mocker() as m: - m.post(f"{mock_url}/suffix/foo/{mock_id}:bar", status_code=200) - paths = append_suffixes_to_url(mock_urls, ["/foo/{id}:bar"]) - response = send_request(paths=paths, method="post", json={}, - id=mock_id) - assert response.status_code == 200 - assert m.last_request.url == f"{mock_url}/suffix/foo/{mock_id}:bar" - - # GET 200 - with requests_mock.Mocker() as m: - m.get(f"{mock_url}/suffix/foo/{mock_id}", status_code=200) - paths = append_suffixes_to_url(mock_urls, ["/foo/{id}"]) - response = send_request(paths=paths, id=mock_id) - assert response.status_code == 200 - assert m.last_request.url == f"{mock_url}/suffix/foo/{mock_id}" +def test_cancel_task(cli, mock_id, mock_url): + with requests_mock.Mocker() as m: + m.post( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}:cancel", status_code=200, json={} + ) + cli.cancel_task(mock_id) + assert m.last_request.url == f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}:cancel" + assert m.last_request.timeout == cli.timeout + + m.post(f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}:cancel", status_code=500) + with pytest.raises(requests.HTTPError): + cli.cancel_task(mock_id) + + m.post(requests_mock.ANY, status_code=404, json={}) + with pytest.raises(requests.HTTPError): + cli.cancel_task(mock_id) - # POST 404 - with requests_mock.Mocker() as m: - m.post(requests_mock.ANY, status_code=404, json={}) - paths = append_suffixes_to_url(mock_urls, ["/foo"]) - with pytest.raises(requests.HTTPError): - send_request(paths=paths, method="post", json={}) - assert m.last_request.url == f"{mock_url}/foo" - # GET 500 +def test_get_service_info(cli, mock_url): + with requests_mock.Mocker() as m: + m.get(f"{mock_url}/ga4gh/tes/v1/service-info", status_code=200, json={}) + cli.get_service_info() + assert m.last_request.url == f"{mock_url}/ga4gh/tes/v1/service-info" + assert m.last_request.timeout == cli.timeout + + m.get(f"{mock_url}/ga4gh/tes/v1/service-info", status_code=500) + with pytest.raises(requests.HTTPError): + cli.get_service_info() + + +def test_wait(cli, mock_id, mock_url): + with pytest.raises(TimeoutError): with requests_mock.Mocker() as m: - m.get(f"{mock_url}/suffix/foo", status_code=500) - paths = append_suffixes_to_url(mock_urls, ["/foo"]) - with pytest.raises(requests.HTTPError): - send_request(paths=paths) - assert m.last_request.url == f"{mock_url}/suffix/foo" + m.get( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}", + status_code=200, + json={ + "id": mock_id, + "state": "RUNNING", + }, + ) + cli.wait(mock_id, timeout=1) + + with requests_mock.Mocker() as m: + m.get( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}", + [ + {"status_code": 200, "json": {"id": mock_id, "state": "INITIALIZING"}}, + {"status_code": 200, "json": {"id": mock_id, "state": "RUNNING"}}, + {"status_code": 200, "json": {"id": mock_id, "state": "COMPLETE"}}, + ], + ) + cli.wait(mock_id, timeout=2) + + +def test_wait_exception(cli, mock_id, mock_url): + with requests_mock.Mocker() as m: + m.get( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}", + status_code=200, + json={ + "Error": "Error", + }, + ) + with pytest.raises(Exception): + cli.wait(mock_id, timeout=2) + + +def test_wait_no_state_change(cli, mock_id, mock_url): + with requests_mock.Mocker() as m: + m.get( + f"{mock_url}/ga4gh/tes/v1/tasks/{mock_id}", + [ + {"status_code": 200, "json": {"id": mock_id, "state": "RUNNING"}}, + {"status_code": 200, "json": {"id": mock_id, "state": "RUNNING"}}, + # Continues to return RUNNING state + ], + ) + with pytest.raises(TimeoutError): + cli.wait(mock_id, timeout=2) + + +def test_request_params(): + cli = HTTPClient(url="http://fakehost:8000", timeout=5) + vals = cli._request_params() + assert vals["timeout"] == 5 + assert vals["headers"]["Content-type"] == "application/json" + with pytest.raises(KeyError): + _ = vals["headers"]["Authorization"] + with pytest.raises(KeyError): + _ = vals["auth"] + with pytest.raises(KeyError): + _ = vals["data"] + with pytest.raises(KeyError): + _ = vals["params"] + + cli = HTTPClient( + url="http://fakehost:8000", user="user", password="password", token="token" + ) + vals = cli._request_params( + data='{"json": "string"}', params={"query_param": "value"} + ) + assert vals["timeout"] == 10 + assert vals["headers"]["Content-type"] == "application/json" + assert vals["headers"]["Authorization"] == "Bearer token" + assert vals["auth"] == ("user", "password") + assert vals["data"] == '{"json": "string"}' + assert vals["params"] == {"query_param": "value"} + + +def test_append_suffixes_to_url(): + urls = ["http://example.com", "http://example.com/"] + urls_order = ["http://example1.com", "http://example2.com"] + suffixes = ["foo", "/foo", "foo/", "/foo/"] + no_suffixes = ["", "/", "//", "///"] + suffixes_order = ["1", "2"] + + results = append_suffixes_to_url(urls=urls, suffixes=suffixes) + assert len(results) == len(urls) * len(suffixes) + assert all(url == "http://example.com/foo" for url in results) + + results = append_suffixes_to_url(urls=urls, suffixes=no_suffixes) + assert len(results) == len(urls) * len(no_suffixes) + assert all(url == "http://example.com" for url in results) + + results = append_suffixes_to_url(urls=urls_order, suffixes=suffixes_order) + assert len(results) == len(urls_order) * len(suffixes_order) + assert results[0] == "http://example1.com/1" + assert results[1] == "http://example1.com/2" + assert results[2] == "http://example2.com/1" + assert results[3] == "http://example2.com/2" + + +def test_send_request(): + mock_url = "http://example.com" + mock_id = "mock_id" + mock_urls = append_suffixes_to_url([mock_url], ["/suffix", "/"]) + + # invalid method + with pytest.raises(ValueError): + send_request(paths=mock_urls, method="invalid") + + # errors for all paths + with requests_mock.Mocker() as m: + m.get(requests_mock.ANY, exc=requests.exceptions.ConnectTimeout) + with pytest.raises(requests.HTTPError): + send_request(paths=mock_urls) + + # error on first path, 200 on second + with requests_mock.Mocker() as m: + m.get(mock_urls[0], exc=requests.exceptions.ConnectTimeout) + m.get(mock_urls[1], status_code=200) + response = send_request(paths=mock_urls) + assert response.status_code == 200 + assert m.last_request.url.rstrip("/") == f"{mock_url}" + + # error on first path, 404 on second + with requests_mock.Mocker() as m: + m.get(mock_urls[0], exc=requests.exceptions.ConnectTimeout) + m.get(mock_urls[1], status_code=404) + with pytest.raises(requests.HTTPError): + send_request(paths=mock_urls) + + # 404 on first path, error on second + with requests_mock.Mocker() as m: + m.get(mock_urls[0], status_code=404) + m.get(mock_urls[1], exc=requests.exceptions.ConnectTimeout) + with pytest.raises(requests.HTTPError): + send_request(paths=mock_urls) + + # 404 on first path, 200 on second + with requests_mock.Mocker() as m: + m.get(mock_urls[0], status_code=404) + m.get(mock_urls[1], status_code=200) + response = send_request(paths=mock_urls) + assert response.status_code == 200 + assert m.last_request.url.rstrip("/") == f"{mock_url}" + + # POST 200 + with requests_mock.Mocker() as m: + m.post(f"{mock_url}/suffix/foo/{mock_id}:bar", status_code=200) + paths = append_suffixes_to_url(mock_urls, ["/foo/{id}:bar"]) + response = send_request(paths=paths, method="post", json={}, id=mock_id) + assert response.status_code == 200 + assert m.last_request.url == f"{mock_url}/suffix/foo/{mock_id}:bar" + + # GET 200 + with requests_mock.Mocker() as m: + m.get(f"{mock_url}/suffix/foo/{mock_id}", status_code=200) + paths = append_suffixes_to_url(mock_urls, ["/foo/{id}"]) + response = send_request(paths=paths, id=mock_id) + assert response.status_code == 200 + assert m.last_request.url == f"{mock_url}/suffix/foo/{mock_id}" + + # POST 404 + with requests_mock.Mocker() as m: + m.post(requests_mock.ANY, status_code=404, json={}) + paths = append_suffixes_to_url(mock_urls, ["/foo"]) + with pytest.raises(requests.HTTPError): + send_request(paths=paths, method="post", json={}) + assert m.last_request.url == f"{mock_url}/foo" + + # GET 500 + with requests_mock.Mocker() as m: + m.get(f"{mock_url}/suffix/foo", status_code=500) + paths = append_suffixes_to_url(mock_urls, ["/foo"]) + with pytest.raises(requests.HTTPError): + send_request(paths=paths) + assert m.last_request.url == f"{mock_url}/suffix/foo" diff --git a/tests/test_models.py b/tests/test_models.py index c1cebd7..fc64f5a 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,7 +1,8 @@ import json -import unittest - from copy import deepcopy +from datetime import datetime + +import pytest from tes.models import ( Executor, @@ -21,14 +22,7 @@ ) -task_valid = Task( - executors=[ - Executor( - image="alpine", - command=["echo", "hello"] - ) - ] -) +task_valid = Task(executors=[Executor(image="alpine", command=["echo", "hello"])]) datetm = "2018-01-01T00:00:00Z" @@ -52,29 +46,15 @@ stdin="/abs/path", stdout="/abs/path", stderr="/abs/path", - env={"VAR": "value"} + env={"VAR": "value"}, ), - Executor( - image="alpine", - command=["echo", "worls"] - ) + Executor(image="alpine", command=["echo", "worls"]), ], inputs=[ - Input( - url="s3:/some/path", - path="/abs/path" - ), - Input( - content="foo", - path="/abs/path" - ) - ], - outputs=[ - Output( - url="s3:/some/path", - path="/abs/path" - ) + Input(url="s3:/some/path", path="/abs/path"), + Input(content="foo", path="/abs/path"), ], + outputs=[Output(url="s3:/some/path", path="/abs/path")], volumes=[], tags={"key": "value", "key2": "value2"}, logs=[ @@ -88,23 +68,20 @@ end_time=datetm, # type: ignore exit_code=0, stdout="hello", - stderr="world" + stderr="world", ) ], outputs=[ OutputFileLog( url="s3:/some/path", path="/abs/path", - size_bytes=int64conv(123) # type: ignore + size_bytes=int64conv(123), # type: ignore ) ], - system_logs=[ - "some system log message", - "some other system log message" - ] + system_logs=["some system log message", "some other system log message"], ) ], - creation_time=datetm # type: ignore + creation_time=datetm, # type: ignore ) task_invalid = Task( @@ -115,136 +92,146 @@ stdin="relative/path", stdout="relative/path", stderr="relative/path", - env={1: 2} - ) - ], - inputs=[ - Input( - url="s3:/some/path", - content="foo" - ), - Input( - path="relative/path" - ) - ], - outputs=[ - Output(), - Output( - url="s3:/some/path", - path="relative/path" + env={1: 2}, ) ], - volumes=['/abs/path', 'relative/path'], - tags={1: 2} + inputs=[Input(url="s3:/some/path", content="foo"), Input(path="relative/path")], + outputs=[Output(), Output(url="s3:/some/path", path="relative/path")], + volumes=["/abs/path", "relative/path"], + tags={1: 2}, ) -expected = { - "executors": [ - { - "image": "alpine", - "command": ["echo", "hello"] - } - ] -} +expected = {"executors": [{"image": "alpine", "command": ["echo", "hello"]}]} -class TestModels(unittest.TestCase): - - def test_list_of(self): - validator = list_of(str) - self.assertEqual(list_of(str), validator) - self.assertEqual( - repr(validator), - ">" +def test_list_of(): + validator = list_of(str) + assert list_of(str) == validator + assert repr(validator) == ">" + with pytest.raises(TypeError): + Input(url="s3:/some/path", path="/opt/foo", content=123) # type: ignore + with pytest.raises(TypeError): + Task( + inputs=[Input(url="s3:/some/path", path="/opt/foo"), "foo"] # type: ignore ) - with self.assertRaises(TypeError): - Input( - url="s3:/some/path", - path="/opt/foo", - content=123 # type: ignore - ) - with self.assertRaises(TypeError): - Task( - inputs=[ - Input( - url="s3:/some/path", path="/opt/foo" - ), - "foo" # type: ignore - ] - ) - - def test_drop_none(self): - self.assertEqual(_drop_none({}), {}) - self.assertEqual(_drop_none({"foo": None}), {}) - self.assertEqual(_drop_none({"foo": 1}), {"foo": 1}) - self.assertEqual(_drop_none({"foo": None, "bar": 1}), {"bar": 1}) - self.assertEqual(_drop_none({"foo": [1, None, 2]}), {"foo": [1, 2]}) - self.assertEqual(_drop_none({"foo": {"bar": None}}), {"foo": {}}) - self.assertEqual( - _drop_none({"foo": {"bar": None}, "baz": 1}), - {"foo": {}, "baz": 1} + + +def test_drop_none(): + assert _drop_none({}) == {} + assert _drop_none({"foo": None}) == {} + assert _drop_none({"foo": 1}) == {"foo": 1} + assert _drop_none({"foo": None, "bar": 1}) == {"bar": 1} + assert _drop_none({"foo": [1, None, 2]}) == {"foo": [1, 2]} + assert _drop_none({"foo": {"bar": None}}) == {"foo": {}} + assert _drop_none({"foo": {"bar": None}, "baz": 1}) == {"foo": {}, "baz": 1} + + +def test_strconv(): + assert strconv("foo") == "foo" + assert strconv(["foo", "bar"]) == ["foo", "bar"] + assert strconv(("foo", "bar")) == ("foo", "bar") + assert strconv(1) == 1 + assert strconv([1]) == [1] + + +def test_int64conv(): + assert int64conv("1") == 1 + assert int64conv("-1") == -1 + assert int64conv(None) is None + + +def test_timestampconv(): + tm = timestampconv("2018-02-01T00:00:00Z") + assert tm is not None + assert tm.year == 2018 + assert tm.month == 2 + assert tm.day == 1 + assert tm.hour == 0 + assert tm.timestamp() == 1517443200.0 + assert timestampconv(None) is None + + +def test_datetime_json_handler(): + tm = timestampconv("2018-02-01T00:00:00Z") + tm_iso = "2018-02-01T00:00:00+00:00" + assert tm is not None + assert datetime_json_handler(tm) == tm_iso + with pytest.raises(TypeError): + datetime_json_handler(None) + with pytest.raises(TypeError): + datetime_json_handler("abc") + with pytest.raises(TypeError): + datetime_json_handler(2001) + with pytest.raises(TypeError): + datetime_json_handler(tm_iso) + + +def test_as_dict(): + task = deepcopy(task_valid) + assert task.as_dict() == expected + with pytest.raises(KeyError): + task.as_dict()["inputs"] + assert task.as_dict(drop_empty=False)["inputs"] is None + + +def test_as_json(): + task = deepcopy(task_valid) + assert task.as_json() == json.dumps(expected) + + +def test_is_valid(): + task = deepcopy(task_valid) + assert task.is_valid()[0] + + task = deepcopy(task_valid_full) + assert task.is_valid()[0] + + task = deepcopy(task_invalid) + task.executors[0].image = None # type: ignore + task.executors[0].command = None # type: ignore + assert not task.is_valid()[0] + + task = deepcopy(task_invalid) + task.executors = None + assert not task.is_valid()[0] + + +def test_task_creation(): + task = Task( + id="test_id", + state="RUNNING", + name="test_task", + description="test description", + executors=[Executor(image="python:3.8", command=["python", "--version"])], + creation_time=datetime.now(), + ) + assert task.id == "test_id" + assert task.state == "RUNNING" + assert task.name == "test_task" + assert task.description == "test description" + assert len(task.executors) == 1 + assert task.executors[0].image == "python:3.8" + assert task.executors[0].command == ["python", "--version"] + assert task.creation_time is not None + + +def test_task_invalid_state(): + with pytest.raises(ValueError): + Task( + id="test_id", + state="INVALID_STATE", # type: ignore + name="test_task", + description="test description", + executors=[Executor(image="python:3.8", command=["python", "--version"])], + creation_time=datetime.now(), ) - def test_strconv(self): - self.assertTrue(strconv("foo"), u"foo") - self.assertTrue(strconv(["foo", "bar"]), [u"foo", u"bar"]) - self.assertTrue(strconv(("foo", "bar")), (u"foo", u"bar")) - self.assertTrue(strconv(1), 1) - self.assertTrue(strconv([1]), [1]) - - def test_int64conv(self): - self.assertEqual(int64conv("1"), 1) - self.assertEqual(int64conv("-1"), -1) - self.assertIsNone(int64conv(None)) - - def test_timestampconv(self): - tm = timestampconv("2018-02-01T00:00:00Z") - self.assertIsNotNone(tm) - assert tm is not None - self.assertAlmostEqual(tm.year, 2018) - self.assertAlmostEqual(tm.month, 2) - self.assertAlmostEqual(tm.day, 1) - self.assertAlmostEqual(tm.hour, 0) - self.assertAlmostEqual(tm.timestamp(), 1517443200.0) - self.assertIsNone(timestampconv(None)) - - def test_datetime_json_handler(self): - tm = timestampconv("2018-02-01T00:00:00Z") - tm_iso = '2018-02-01T00:00:00+00:00' - assert tm is not None - self.assertEqual(datetime_json_handler(tm), tm_iso) - with self.assertRaises(TypeError): - datetime_json_handler(None) - with self.assertRaises(TypeError): - datetime_json_handler("abc") - with self.assertRaises(TypeError): - datetime_json_handler(2001) - with self.assertRaises(TypeError): - datetime_json_handler(tm_iso) - - def test_as_dict(self): - task = deepcopy(task_valid) - self.assertEqual(task.as_dict(), expected) - with self.assertRaises(KeyError): - task.as_dict()['inputs'] - self.assertIsNone(task.as_dict(drop_empty=False)['inputs']) - - def test_as_json(self): - task = deepcopy(task_valid) - self.assertEqual(task.as_json(), json.dumps(expected)) - - def test_is_valid(self): - task = deepcopy(task_valid) - self.assertTrue(task.is_valid()[0]) - - task = deepcopy(task_valid_full) - self.assertTrue(task.is_valid()[0]) - - task = deepcopy(task_invalid) - task.executors[0].image = None # type: ignore - task.executors[0].command = None # type: ignore - self.assertFalse(task.is_valid()[0]) - - task = deepcopy(task_invalid) - task.executors = None - self.assertFalse(task.is_valid()[0]) + +def test_executor_missing_image(): + with pytest.raises(TypeError): + Executor(command=["python", "--version"]) + + +def test_executor_missing_command(): + with pytest.raises(TypeError): + Executor(image="python:3.8") diff --git a/tests/test_utils.py b/tests/test_utils.py index b7fe5ed..0a8849c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,9 +1,6 @@ -from __future__ import absolute_import, print_function, unicode_literals - -import dateutil.parser import json -import unittest - +import dateutil.parser +import pytest from tes.utils import camel_to_snake, unmarshal, UnmarshalError from tes.models import ( CancelTaskRequest, @@ -24,257 +21,255 @@ ) -class TestUtils(unittest.TestCase): - - def test_camel_to_snake(self): - case1 = "FooBar" - case2 = "fooBar" - case3 = "foo_bar" - self.assertEqual(camel_to_snake(case1), "foo_bar") - self.assertEqual(camel_to_snake(case2), "foo_bar") - self.assertEqual(camel_to_snake(case3), "foo_bar") - - def test_unmarshal(self): - - # test unmarshalling with no or minimal contents - try: - unmarshal( - CancelTaskRequest(id="foo").as_json(), - CancelTaskRequest - ) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(CancelTaskResponse().as_json(), CancelTaskResponse) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal( - CreateTaskResponse(id="foo").as_json(), - CreateTaskResponse - ) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(Executor( - image="alpine", command=["echo", "hello"]).as_json(), - Executor - ) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(ExecutorLog().as_json(), ExecutorLog) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal( - GetTaskRequest(id="foo", view="BASIC").as_json(), - GetTaskRequest - ) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(Input().as_json(), Input) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(ListTasksRequest().as_json(), ListTasksRequest) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(ListTasksResponse().as_json(), ListTasksResponse) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(Output().as_json(), Output) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(OutputFileLog().as_json(), OutputFileLog) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(Resources().as_json(), Resources) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(ServiceInfo().as_json(), ServiceInfo) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(Task().as_json(), Task) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - try: - unmarshal(TaskLog().as_json(), TaskLog) - except Exception: - self.fail("Raised ExceptionType unexpectedly!") - - # test special cases - self.assertIsNone(unmarshal(None, Input)) - with self.assertRaises(TypeError): - unmarshal([], Input) - with self.assertRaises(TypeError): - unmarshal(1, Input) - with self.assertRaises(TypeError): - unmarshal(1.3, Input) - with self.assertRaises(TypeError): - unmarshal(True, Input) - with self.assertRaises(TypeError): - unmarshal('foo', Input) - - # test with some interesting contents - test_invalid_dict = {"foo": "bar"} - test_invalid_str = json.dumps(test_invalid_dict) - with self.assertRaises(UnmarshalError): - unmarshal(test_invalid_dict, CreateTaskResponse) - with self.assertRaises(UnmarshalError): - unmarshal(test_invalid_str, CreateTaskResponse) - - test_simple_dict = { - "url": "file://test_file", - "path": "/mnt/test_file", - "type": "FILE" - } - test_simple_str = json.dumps(test_simple_dict) - o1 = unmarshal(test_simple_dict, Input) - o2 = unmarshal(test_simple_str, Input, convert_camel_case=False) - self.assertTrue(isinstance(o1, Input)) - self.assertTrue(isinstance(o2, Input)) - self.assertEqual(o1, o2) - self.assertEqual(o1.as_dict(), test_simple_dict) - self.assertEqual(o1.as_json(), test_simple_str) - - test_complex_dict = { - "name": "test", - "inputs": [ - { - "url": "file:///storage/inputs/test_file", - "path": "/mnt/test_file", - "type": "FILE" - } - ], - "outputs": [ - { - "url": "file:///storage/outputs/test_outputfile", - "path": "/mnt/test_outputfile", - "type": "FILE" - } - ], - "executors": [ - { - "image": "alpine", - "command": ["echo", "hello"], - "env": {"HOME": "/home/"} - } - ], - "logs": [ - { - "start_time": "2017-10-09T17:05:00.0Z", - "end_time": "2017-10-09T17:40:30.0Z", - "metadata": {"testmeta": "testvalue"}, - "logs": [ - { - "start_time": "2017-10-09T17:06:30.0Z", - "end_time": "2017-10-09T17:39:50.0Z", - "exit_code": 0, - "stdout": "hello", - "stderr": "", - } - ], - "outputs": [ - { - "url": "file:///storage/outputs/test_outputfile", - "path": "/mnt/test_outputfile", - "size_bytes": "3333" - } - ], - "system_logs": [ - "level='info' msg='Download started' \ - timestamp='2018-05-04T09:12:42.391262682-07:00' \ - task_attempt='0' executor_index='0' \ - url='swift://biostream/protograph'" - ] - } - ], - "resources": { - "cpu_cores": 1, - "ram_gb": 2, - "disk_gb": 3, - "preemptible": True, - "zones": ["us-east-1", "us-west-1"] - }, - "creation_time": "2017-10-09T17:00:00.0Z" - } - - test_complex_str = json.dumps(test_complex_dict) - o1 = unmarshal(test_complex_dict, Task) - o2 = unmarshal(test_complex_str, Task) - self.assertTrue(isinstance(o1, Task)) - self.assertTrue(isinstance(o2, Task)) - self.assertAlmostEqual(o1, o2) - expected = test_complex_dict.copy() - - # handle expected conversions - expected["logs"][0]["outputs"][0]["size_bytes"] = int( - expected["logs"][0]["outputs"][0]["size_bytes"] - ) - expected["logs"][0]["start_time"] = dateutil.parser.parse( - expected["logs"][0]["start_time"] - ) - expected["logs"][0]["end_time"] = dateutil.parser.parse( - expected["logs"][0]["end_time"] - ) - expected["logs"][0]["logs"][0]["start_time"] = dateutil.parser.parse( - expected["logs"][0]["logs"][0]["start_time"] - ) - expected["logs"][0]["logs"][0]["end_time"] = dateutil.parser.parse( - expected["logs"][0]["logs"][0]["end_time"] - ) - expected["creation_time"] = dateutil.parser.parse( - expected["creation_time"] - ) - self.assertEqual(o1.as_dict(), expected) - - def test_unmarshal_types(self): - - empty_log_dict = { - 'id': 'c55qjplpsjir0oo1kdj0', - 'state': 'QUEUED', - 'name': 'toil-bbc72af7-e11a-4831-9392-669ea6c309a1-0', - 'executors': [{ - 'image': - 'testImage', - 'command': [ - '_toil_kubernetes_executor', - 'gAWVGwAAAAAAAAB9lIwHY29tbWFuZJSMCnNsZWVwIDEwMDCUcy4=' - ] - }], - 'logs': [{}], - 'creation_time': "2017-10-09T17:00:00" - } - - expected = empty_log_dict.copy() - expected["creation_time"] = dateutil.parser.parse( - expected["creation_time"] - ) +def test_camel_to_snake(): + assert camel_to_snake("FooBar") == "foo_bar" + assert camel_to_snake("fooBar") == "foo_bar" + assert camel_to_snake("foo_bar") == "foo_bar" + - empty_log_str = json.dumps(empty_log_dict) - o1 = unmarshal(empty_log_dict, Task) +def test_unmarshal(): + # test unmarshalling with no or minimal contents + try: + unmarshal(CancelTaskRequest(id="foo").as_json(), CancelTaskRequest) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") - self.assertEqual(o1.as_dict(), expected) - self.assertEqual(o1.as_json(), empty_log_str) + try: + unmarshal(CancelTaskResponse().as_json(), CancelTaskResponse) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(CreateTaskResponse(id="foo").as_json(), CreateTaskResponse) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal( + Executor(image="alpine", command=["echo", "hello"]).as_json(), Executor + ) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(ExecutorLog().as_json(), ExecutorLog) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(GetTaskRequest(id="foo", view="BASIC").as_json(), GetTaskRequest) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(Input().as_json(), Input) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(ListTasksRequest().as_json(), ListTasksRequest) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(ListTasksResponse().as_json(), ListTasksResponse) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(Output().as_json(), Output) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(OutputFileLog().as_json(), OutputFileLog) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(Resources().as_json(), Resources) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(ServiceInfo().as_json(), ServiceInfo) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(Task().as_json(), Task) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + try: + unmarshal(TaskLog().as_json(), TaskLog) + except Exception: + pytest.fail("Raised ExceptionType unexpectedly!") + + # test special cases + assert unmarshal(None, Input) is None + with pytest.raises(TypeError): + unmarshal([], Input) + with pytest.raises(TypeError): + unmarshal(1, Input) + with pytest.raises(TypeError): + unmarshal(1.3, Input) + with pytest.raises(TypeError): + unmarshal(True, Input) + with pytest.raises(UnmarshalError): + unmarshal("foo", Input) + + # test with some interesting contents + test_invalid_dict = {"foo": "bar"} + test_invalid_str = json.dumps(test_invalid_dict) + with pytest.raises(UnmarshalError): + unmarshal(test_invalid_dict, CreateTaskResponse) + with pytest.raises(UnmarshalError): + unmarshal(test_invalid_str, CreateTaskResponse) + + test_simple_dict = { + "url": "file://test_file", + "path": "/mnt/test_file", + "type": "FILE", + } + test_simple_str = json.dumps(test_simple_dict) + o1 = unmarshal(test_simple_dict, Input) + o2 = unmarshal(test_simple_str, Input, convert_camel_case=False) + assert isinstance(o1, Input) + assert isinstance(o2, Input) + assert o1 == o2 + assert o1.as_dict() == test_simple_dict + assert o1.as_json() == test_simple_str + + test_complex_dict = { + "name": "test", + "inputs": [ + { + "url": "file:///storage/inputs/test_file", + "path": "/mnt/test_file", + "type": "FILE", + } + ], + "outputs": [ + { + "url": "file:///storage/outputs/test_outputfile", + "path": "/mnt/test_outputfile", + "type": "FILE", + } + ], + "executors": [ + {"image": "alpine", "command": ["echo", "hello"], "env": {"HOME": "/home/"}} + ], + "logs": [ + { + "start_time": "2017-10-09T17:05:00.0Z", + "end_time": "2017-10-09T17:40:30.0Z", + "metadata": {"testmeta": "testvalue"}, + "logs": [ + { + "start_time": "2017-10-09T17:06:30.0Z", + "end_time": "2017-10-09T17:39:50.0Z", + "exit_code": 0, + "stdout": "hello", + "stderr": "", + } + ], + "outputs": [ + { + "url": "file:///storage/outputs/test_outputfile", + "path": "/mnt/test_outputfile", + "size_bytes": "3333", + } + ], + "system_logs": [ + "level='info' msg='Download started' \ + timestamp='2018-05-04T09:12:42.391262682-07:00' \ + task_attempt='0' executor_index='0' \ + url='swift://biostream/protograph'" + ], + } + ], + "resources": { + "cpu_cores": 1, + "ram_gb": 2, + "disk_gb": 3, + "preemptible": True, + "zones": ["us-east-1", "us-west-1"], + }, + "creation_time": "2017-10-09T17:00:00.0Z", + } + + test_complex_str = json.dumps(test_complex_dict) + o1 = unmarshal(test_complex_dict, Task) + o2 = unmarshal(test_complex_str, Task) + assert isinstance(o1, Task) + assert isinstance(o2, Task) + assert o1 == o2 + expected = test_complex_dict.copy() + + # handle expected conversions + expected["logs"][0]["outputs"][0]["size_bytes"] = int( + expected["logs"][0]["outputs"][0]["size_bytes"] + ) + expected["logs"][0]["start_time"] = dateutil.parser.parse( + expected["logs"][0]["start_time"] + ) + expected["logs"][0]["end_time"] = dateutil.parser.parse( + expected["logs"][0]["end_time"] + ) + expected["logs"][0]["logs"][0]["start_time"] = dateutil.parser.parse( + expected["logs"][0]["logs"][0]["start_time"] + ) + expected["logs"][0]["logs"][0]["end_time"] = dateutil.parser.parse( + expected["logs"][0]["logs"][0]["end_time"] + ) + expected["creation_time"] = dateutil.parser.parse(expected["creation_time"]) + assert o1.as_dict() == expected + + +def test_unmarshal_types(): + empty_log_dict = { + "id": "c55qjplpsjir0oo1kdj0", + "state": "QUEUED", + "name": "toil-bbc72af7-e11a-4831-9392-669ea6c309a1-0", + "executors": [ + { + "image": "testImage", + "command": [ + "_toil_kubernetes_executor", + "gAWVGwAAAAAAAAB9lIwHY29tbWFuZJSMCnNsZWVwIDEwMDCUcy4=", + ], + } + ], + "logs": [{}], + "creation_time": "2017-10-09T17:00:00", + } + + expected = empty_log_dict.copy() + expected["creation_time"] = dateutil.parser.parse(expected["creation_time"]) + + empty_log_str = json.dumps(empty_log_dict) + o1 = unmarshal(empty_log_dict, Task) + + assert o1.as_dict() == expected + assert o1.as_json() == empty_log_str + + +def test_unmarshal_additional_cases(): + # Additional test cases for more coverage + test_dict_with_extra_fields = {"id": "foo", "extra_field": "extra_value"} + with pytest.raises(UnmarshalError): + unmarshal(test_dict_with_extra_fields, CancelTaskRequest) + + test_dict_with_nested_objects = { + "id": "foo", + "executors": [{"image": "alpine", "command": ["echo", "hello"]}], + } + result = unmarshal(test_dict_with_nested_objects, Task) + assert isinstance(result, Task) + assert result.executors[0].image == "alpine" + assert result.executors[0].command == ["echo", "hello"] + + test_dict_with_invalid_json = '{"id": "foo", "invalid_json": }' + with pytest.raises(UnmarshalError): + unmarshal(test_dict_with_invalid_json, CancelTaskRequest)