From 4816d702a6f425080d05c928527bd04e0aff4198 Mon Sep 17 00:00:00 2001 From: Jacob Tomlinson Date: Mon, 9 Nov 2020 15:59:20 +0000 Subject: [PATCH 1/3] Migrate to GitHub Actions --- .github/workflows/ci-build.yaml | 38 +++++++++++++++++++++++++++++ .travis.yml | 43 --------------------------------- README.md | 23 +++++++++--------- 3 files changed, 50 insertions(+), 54 deletions(-) create mode 100644 .github/workflows/ci-build.yaml delete mode 100644 .travis.yml diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml new file mode 100644 index 0000000..beb819c --- /dev/null +++ b/.github/workflows/ci-build.yaml @@ -0,0 +1,38 @@ +name: CI +on: [push, pull_request] + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v2 + + - name: Setup Conda Environment + uses: goanpeca/setup-miniconda@v1 + with: + miniconda-version: "latest" + python-version: "3.7" + environment-file: binder/environment.yml + activate-environment: dask-examples + auto-activate-base: false + + - name: Install testing and docs dependencies + shell: bash -l {0} + run: | + conda install -c conda-forge nbconvert nbformat jupyter_client ipykernel + pip install nbsphinx dask-sphinx-theme sphinx + - name: Build + shell: bash -l {0} + run: | + python prep.py --small + sphinx-build -M html . _build -vsphinx-build -M html . _build -v + + - name: Deploy + if: ${{ github.ref == 'refs/heads/master' && github.event_name != 'pull_request'}} + uses: JamesIves/github-pages-deploy-action@3.7.1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BRANCH: gh-pages + FOLDER: _build/html + CLEAN: true diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 19de5cc..0000000 --- a/.travis.yml +++ /dev/null @@ -1,43 +0,0 @@ -language: python - -python: - - "3.6" - -env: - global: - # Doctr deploy key for dask/dask-tutorial - - secure: "ZG40FBzwgbJx8dKbdtMXVS6+oWMqw1i/dkz0MhB5RWotM+buAlFzTFGaal7FeWNf/oBa4pPw9zgAeGo2tjnNxqJIN47j+8K43vikwJp7fjtEp1ru/IS+50MCWBAARSLIh2qtymEMwIpMYLZes5JU9ZPPagEOtaWFE5IDSKUKKNBTgNihuGxRaikm9ApeP2r/6gOjEEj+Qslk+o3SzX2xwteKRSkVIPPocrPnZc2IOsuP5lCrSadO0wrawVhHUlYraPNMyZEk3ChIUUW0aWyXF8OFjN1uQ0LkAgtj+O2IcshvO8ZieRUUJuHMWTWQnv9lao/mmqiOWPsG3Sf3WPuNG8oEnMjBrlMH2yYGifpgEhxkis/mebpucaQtdtptW8Btv5O+yN8MNCf2+Y3A+QHHD8yCEVGU3V/PdZbEt9ZVjCYIwQ02k/SZ9zes9NirNP2G8Ff3agQuI7LHP1nkbTzUSBvshT3XsPkRi0Q31ZnCnJJ7/NZMNTJyL1x1OIWweV60fncoJI0n3JSvzVH76wQszg+7eagE1Tudw7n0qS24Tem4hcLjLkeOdfEl8khYCgj/aI8pezzNNc5ogL3qdFvXn5JP5zs/OuxxlpqToBBurQtujxp1JI/dwgVy392avzizimHfalcrRski94qJli8jvPnSDSbL1OzAhzw+zHIv68U=" - - -before_install: - - sudo apt-get update - - sudo apt-get install graphviz - -install: - # Install conda - - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - conda config --set always_yes yes --set changeps1 no - - conda update conda - - # Install dependencies - - conda env create -n test -f binder/environment.yml - - source activate test - - # Install testing dependencies - - conda install -c conda-forge nbconvert nbformat jupyter_client ipykernel - - # Install documentation dependencies - - pip install nbsphinx dask-sphinx-theme sphinx - -script: - - set -e - - source activate test - - python prep.py --small - - sphinx-build -M html . _build -v - - pip install doctr - - doctr deploy --built-docs _build/html . - -notifications: - email: false diff --git a/README.md b/README.md index 360d4be..4b8bb2a 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ This tutorial was last given at SciPy 2020 which was a virtual conference. [A video of the SciPy 2020 tutorial is available online](https://www.youtube.com/watch?v=EybGGLbLipI). [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/dask/dask-tutorial/master?urlpath=lab) +[![Build Status](https://github.com/dask/dask-tutorial/workflows/CI/badge.svg)](https://github.com/dask/dask-tutorial/actions?query=workflow%3ACI) Dask provides multi-core execution on larger-than-memory datasets. @@ -35,13 +36,13 @@ schedulers (odd sections.) and then install necessary packages. There are three different ways to achieve this, pick the one that best suits you, and ***only pick one option***. -They are, in order of preference: +They are, in order of preference: #### 2a) Create a conda environment (preferred) In the main repo directory - conda env create -f binder/environment.yml + conda env create -f binder/environment.yml conda activate dask-tutorial jupyter labextension install @jupyter-widgets/jupyterlab-manager jupyter labextension install @bokeh/jupyter_bokeh @@ -55,10 +56,10 @@ You will need the following core libraries You may find the following libraries helpful for some exercises conda install python-graphviz -c conda-forge - -Note that this options will alter your existing environment, potentially changing the versions of packages you already -have installed. - + +Note that this options will alter your existing environment, potentially changing the versions of packages you already +have installed. + #### 2c) Use Dockerfile You can build a docker image out of the provided Dockerfile. @@ -69,7 +70,7 @@ Run a container, replacing the ID with the output of the previous command $ docker run -it -p 8888:8888 -p 8787:8787 -The above command will give an URL (`Like http://(container_id or 127.0.0.1):8888/?token=`) which +The above command will give an URL (`Like http://(container_id or 127.0.0.1):8888/?token=`) which can be used to access the notebook from browser. You may need to replace the given hostname with "localhost" or "127.0.0.1". @@ -79,7 +80,7 @@ can be used to access the notebook from browser. You may need to replace the giv From the repo directory - jupyter notebook + jupyter notebook Or @@ -110,8 +111,8 @@ This was already done for method c) and does not need repeating. 2. [Bag](02_bag.ipynb) - the first high-level collection: a generalized iterator for use with a functional programming style and to clean messy data. - -3. [Array](03_array.ipynb) - blocked numpy-like functionality with a collection of + +3. [Array](03_array.ipynb) - blocked numpy-like functionality with a collection of numpy arrays spread across your cluster. 7. [Dataframe](04_dataframe.ipynb) - parallelized operations on many pandas dataframes @@ -120,7 +121,7 @@ spread across your cluster. 5. [Distributed](05_distributed.ipynb) - Dask's scheduler for clusters, with details of how to view the UI. -6. [Advanced Distributed](06_distributed_advanced.ipynb) - further details on distributed +6. [Advanced Distributed](06_distributed_advanced.ipynb) - further details on distributed computing, including how to debug. 7. [Dataframe Storage](07_dataframe_storage.ipynb) - efficient ways to read and write From 687ca1a85e833fb7af4a93f9e685dc3ae048ebc4 Mon Sep 17 00:00:00 2001 From: James Bourbeau Date: Mon, 9 Nov 2020 11:08:30 -0600 Subject: [PATCH 2/3] Update conda env name --- .github/workflows/ci-build.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml index beb819c..a0fcef7 100644 --- a/.github/workflows/ci-build.yaml +++ b/.github/workflows/ci-build.yaml @@ -14,7 +14,7 @@ jobs: miniconda-version: "latest" python-version: "3.7" environment-file: binder/environment.yml - activate-environment: dask-examples + activate-environment: dask-tutorial auto-activate-base: false - name: Install testing and docs dependencies From 75d2cebb6adc6a88c831d70f90bce2c82991f189 Mon Sep 17 00:00:00 2001 From: James Bourbeau Date: Mon, 9 Nov 2020 11:15:51 -0600 Subject: [PATCH 3/3] Update sphinx-build command --- .github/workflows/ci-build.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-build.yaml b/.github/workflows/ci-build.yaml index a0fcef7..dbed448 100644 --- a/.github/workflows/ci-build.yaml +++ b/.github/workflows/ci-build.yaml @@ -26,7 +26,7 @@ jobs: shell: bash -l {0} run: | python prep.py --small - sphinx-build -M html . _build -vsphinx-build -M html . _build -v + sphinx-build -M html . _build -v - name: Deploy if: ${{ github.ref == 'refs/heads/master' && github.event_name != 'pull_request'}}