From 242c749f210f3627a8ef9761a79b4ca9dbebb65f Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Tue, 22 Oct 2024 10:07:18 +0200 Subject: [PATCH] [DOC][MAINT] build doc with minimum version of python and dependencies (#4650) * test doc on minimum * [full doc] request full build * let tox build the doc * [full doc] request full build * tight layout * escape rich * [full doc] request full build * fix * restore tests * use old layout in exmaples * fix --- .github/workflows/README.md | 4 +- .github/workflows/build-docs.yml | 44 +++++++++---------- .github/workflows/release-docs.yml | 34 +++++++------- .gitignore | 3 ++ CONTRIBUTING.rst | 3 +- doc/Makefile | 2 +- doc/maintenance.rst | 13 ++++-- .../plot_haxby_different_estimators.py | 2 +- .../02_decoding/plot_haxby_full_analysis.py | 2 +- .../plot_compare_decomposition.py | 3 +- .../plot_group_level_connectivity.py | 3 +- .../plot_seed_to_voxel_correlation.py | 6 +-- .../plot_sphere_based_connectome.py | 3 +- .../04_glm_first_level/plot_design_matrix.py | 21 +++------ .../plot_predictions_residuals.py | 2 +- .../plot_second_level_design_matrix.py | 3 +- .../plot_second_level_two_sample_test.py | 4 +- .../plot_age_group_prediction_cross_val.py | 3 +- nilearn/_utils/logger.py | 3 +- tox.ini | 20 +++++++-- 20 files changed, 97 insertions(+), 81 deletions(-) diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 1de3017170..8dd8957dc3 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -27,7 +27,7 @@ Automatically comments on a newly open pull request to provide some guidelines, #### Full and partial doc builds -This workflow configuration is based on what is done in [scikit-learn](https://github.com/scikit-learn/scikit-learn). +This original workflow derived from on what is done in [scikit-learn](https://github.com/scikit-learn/scikit-learn). On Pull Requests, Actions run "partial builds" by default which render all the rst files, but only build examples modified in the Pull Request. @@ -55,7 +55,7 @@ $ git commit -m "[example] plot_nilearn_101.py" However for quick checks to do yourself you should always opt for local builds following the instructions here: [building-documentation](https://nilearn.github.io/stable/development.html#building-documentation). -Note: setuptools needs to be installed to run the doc build with python 3.12. +Note: setuptools needs to be installed to run the doc build with python >=3.12. Upon a successful build of the doc, it is zipped and uploaded as an artifact. A circle-ci workflow is then triggered. See below. diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 54d1fa6abe..3fe2342eae 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -36,6 +36,7 @@ env: BROWSER: /usr/bin/firefox DISPLAY: :99.0 NILEARN_DATA: /home/runner/work/nilearn/nilearn/nilearn_data + MIN_PYTHON_VERSION: '3.9' jobs: @@ -78,7 +79,7 @@ jobs: - name: Setup python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: ${{ env.MIN_PYTHON_VERSION }} - name: Install packages run: | @@ -192,20 +193,6 @@ jobs: sudo -E apt-get -yq update sudo -E apt-get -yq --no-install-suggests --no-install-recommends install \ dvipng texlive-latex-base texlive-latex-extra - - name: Setup python - uses: actions/setup-python@v5 - with: - python-version: '3.12' - # Install the local version of the library, along with both standard and testing-related dependencies - # The `doc` dependency group is included because the build_docs job uses this script. - # See pyproject.toml for dependency group options - - name: Install packages - run: | - python -m pip install --user --upgrade pip setuptools - python -m pip install .[plotting,doc] - - - name: List dependencies - run: pip freeze # Check if we are doing a full or partial build - name: Find build type @@ -228,18 +215,24 @@ jobs: restore-keys: | data_cache-${{ github.workflow }}_ref-${{ github.ref }}_run-${{ github.run_number }} - # Update the authors file and the names file - # in case a contributor has been added to citation.cff - # but did not run the maint_tools/citation_cff_maint.py script. - - name: update AUTHORS.rst and doc/changes/names.rst - run: python maint_tools/citation_cff_maint.py - # Set up and launch a virtual browser needed for one example to run # without stalling the job. The example launches an html in the browser. - name: Set up display server for virtual browser run: Xvfb -ac :99 -screen 0 1280x1024x16 > /dev/null 2>&1 & - # Run the doc build. + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MIN_PYTHON_VERSION }} + + - name: Install tox + run: python -m pip install tox + + - name: Show tox config + run: tox c + + # Run the doc build + # We let tox handle creating virtual env and install dependencies. # If no data is restored in previous steps, # the data will be downloaded during the build # (this only applies for full builds; @@ -247,9 +240,12 @@ jobs: - name: Build docs id: build-docs run: | - cd doc; set -o pipefail; - PATTERN=$(cat ../pattern.txt) make $(cat ../build.txt) 2>&1 | tee log.txt; + PATTERN=$(cat pattern.txt) + tox run \ + --colored yes \ + --list-dependencies \ + -e doc -- $(cat build.txt) 2>&1 | tee log.txt; - name: Check for unreplaced argument in docstrings if: always() diff --git a/.github/workflows/release-docs.yml b/.github/workflows/release-docs.yml index 4a9cdfc45a..ebc6629e65 100644 --- a/.github/workflows/release-docs.yml +++ b/.github/workflows/release-docs.yml @@ -13,6 +13,7 @@ env: BROWSER: /usr/bin/firefox DISPLAY: :99.0 NILEARN_DATA: /home/runner/work/nilearn/nilearn/nilearn_data + MIN_PYTHON_VERSION: '3.9' jobs: @@ -35,7 +36,7 @@ jobs: - name: Setup python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: ${{ env.MIN_PYTHON_VERSION }} - name: Install packages run: | python -m pip install --user --upgrade pip setuptools @@ -93,22 +94,6 @@ jobs: sudo -E apt-get -yq --no-install-suggests --no-install-recommends install \ dvipng texlive-latex-base texlive-latex-extra - - name: Setup python - uses: actions/setup-python@v5 - with: - python-version: '3.12' - - - name: Install packages - run: | - python -m pip install --user --upgrade pip setuptools - python -m pip install .[plotting,doc] - - # Update the authors file and the names file - # in case a contributor has been added to citation.cff - # but did not run the maint_tools/citation_cff_maint.py script. - - name: update AUTHORS.rst and doc/changes/names.rst - run: python maint_tools/citation_cff_maint.py - - name: Get data from the get_data job uses: actions/cache@v4 with: @@ -122,13 +107,24 @@ jobs: - name: Set up display server for virtual browser run: Xvfb -ac :99 -screen 0 1280x1024x16 > /dev/null 2>&1 & + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MIN_PYTHON_VERSION }} + + - name: Install tox + run: python -m pip install pip tox + - name: Build docs + # We let tox handle creating virtual env and install dependencies. env: SSH_AUTH_SOCK: /tmp/ssh_agent.sock run: | git config --global user.email "actions@github.com" git config --global user.name "GitHub actions" - cd doc set -o pipefail export VERSIONTAG=$(git describe --tags --abbrev=0) - make install + tox run \ + --colored yes \ + --list-dependencies \ + -e doc -- install diff --git a/.gitignore b/.gitignore index 46f7c93e2d..1965ca8ad5 100644 --- a/.gitignore +++ b/.gitignore @@ -71,6 +71,9 @@ doc/modules/generated_reports/*.html doc/modules/description doc/sg_execution_times.rst doc/themes/nilearn/static/jquery.js +log.txt +build.txt +pattern.txt # examples results/ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index e4da8fe029..5c582f724b 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -791,7 +791,8 @@ or, if you do not have make install (for instance under Windows): python3 -m sphinx -b html -d _build/doctrees . _build/html -if you don't need the plots, a quicker option is: +The full build can take a very long time. +So if you don't need the plots, a quicker option is: .. code-block:: bash diff --git a/doc/Makefile b/doc/Makefile index 61b5369d7e..c930073f3a 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -14,7 +14,7 @@ export BROWSER_PYSCRIPT BROWSER := python -c "$$BROWSER_PYSCRIPT" # You can set these variables from the command line. -SPHINXOPTS = -v +SPHINXOPTS = -v --color SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build diff --git a/doc/maintenance.rst b/doc/maintenance.rst index d15630aa09..d73302787f 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -389,8 +389,8 @@ Build and deploy the documentation manually .. note:: - This step is now automated as described above. If there is a need to run it - manually please follow the instructions below. + This step is now automated as described above. + If there is a need to run it manually please follow the instructions below. Before building the documentation, make sure that the following LaTeX @@ -426,12 +426,17 @@ See available linux distributions of texlive-latex-base and texlive-latex-extra: - https://pkgs.org/search/?q=texlive-latex-extra We now need to update the documentation. +We let tox handle creating virtual env and install dependencies. + +.. warning:: + + The doc build is done with the minimum python version supported by Nilearn. .. code-block:: bash - cd doc export VERSIONTAG=$(git describe --tags --abbrev=0) - make install + pip install tox + tox run --colored yes --list-dependencies -e doc -- install This will build the documentation (beware, this is time consuming...) diff --git a/examples/02_decoding/plot_haxby_different_estimators.py b/examples/02_decoding/plot_haxby_different_estimators.py index 7261d51466..481df4e7bc 100644 --- a/examples/02_decoding/plot_haxby_different_estimators.py +++ b/examples/02_decoding/plot_haxby_different_estimators.py @@ -118,7 +118,7 @@ # Then we make a rudimentary diagram import matplotlib.pyplot as plt -plt.subplots(figsize=(8, 6), layout="constrained") +plt.subplots(figsize=(8, 6), constrained_layout=True) all_categories = np.sort(np.hstack([categories, "AVERAGE"])) tick_position = np.arange(len(all_categories)) diff --git a/examples/02_decoding/plot_haxby_full_analysis.py b/examples/02_decoding/plot_haxby_full_analysis.py index 0b6165ed18..fb5fa8d6e5 100644 --- a/examples/02_decoding/plot_haxby_full_analysis.py +++ b/examples/02_decoding/plot_haxby_full_analysis.py @@ -129,7 +129,7 @@ # -------------------------------------------------- import matplotlib.pyplot as plt -plt.figure(layout="constrained") +plt.figure(constrained_layout=True) tick_position = np.arange(len(categories)) plt.xticks(tick_position, categories, rotation=45) diff --git a/examples/03_connectivity/plot_compare_decomposition.py b/examples/03_connectivity/plot_compare_decomposition.py index 11cdda3c03..b2334a5a30 100644 --- a/examples/03_connectivity/plot_compare_decomposition.py +++ b/examples/03_connectivity/plot_compare_decomposition.py @@ -165,7 +165,8 @@ from matplotlib import pyplot as plt from matplotlib.ticker import FormatStrFormatter -plt.figure(figsize=(4, 4), layout="constrained") +plt.figure(figsize=(4, 4), constrained_layout=True) + positions = np.arange(len(scores)) plt.barh(positions, scores) plt.ylabel("Component #", size=12) diff --git a/examples/03_connectivity/plot_group_level_connectivity.py b/examples/03_connectivity/plot_group_level_connectivity.py index 6ea74dc00d..09d43a4cb5 100644 --- a/examples/03_connectivity/plot_group_level_connectivity.py +++ b/examples/03_connectivity/plot_group_level_connectivity.py @@ -243,7 +243,8 @@ mean_scores = [np.mean(scores[kind]) for kind in kinds] scores_std = [np.std(scores[kind]) for kind in kinds] -plt.figure(figsize=(6, 4), layout="constrained") +plt.figure(figsize=(6, 4), constrained_layout=True) + positions = np.arange(len(kinds)) * 0.1 + 0.1 plt.barh(positions, mean_scores, align="center", height=0.05, xerr=scores_std) yticks = [k.replace(" ", "\n") for k in kinds] diff --git a/examples/03_connectivity/plot_seed_to_voxel_correlation.py b/examples/03_connectivity/plot_seed_to_voxel_correlation.py index f8d163471f..f4474c501e 100644 --- a/examples/03_connectivity/plot_seed_to_voxel_correlation.py +++ b/examples/03_connectivity/plot_seed_to_voxel_correlation.py @@ -121,10 +121,10 @@ # %% # We can plot the **seed time series**. - import matplotlib.pyplot as plt -plt.figure(layout="constrained") +plt.figure(constrained_layout=True) + plt.plot(seed_time_series) plt.title("Seed time series (Posterior cingulate cortex)") plt.xlabel("Scan number") @@ -133,8 +133,8 @@ # %% # Exemplarily, we can also select 5 random voxels from the **brain-wide # data** and plot the time series from. +plt.figure(constrained_layout=True) -plt.figure(layout="constrained") plt.plot(brain_time_series[:, [10, 45, 100, 5000, 10000]]) plt.title("Time series from 5 random voxels") plt.xlabel("Scan number") diff --git a/examples/03_connectivity/plot_sphere_based_connectome.py b/examples/03_connectivity/plot_sphere_based_connectome.py index 12e4e16889..57cda350e1 100644 --- a/examples/03_connectivity/plot_sphere_based_connectome.py +++ b/examples/03_connectivity/plot_sphere_based_connectome.py @@ -94,7 +94,8 @@ # ------------------- import matplotlib.pyplot as plt -plt.figure(layout="constrained") +plt.figure(constrained_layout=True) + for time_serie, label in zip(time_series.T, labels): plt.plot(time_serie, label=label) diff --git a/examples/04_glm_first_level/plot_design_matrix.py b/examples/04_glm_first_level/plot_design_matrix.py index b1a6151d89..7682882245 100644 --- a/examples/04_glm_first_level/plot_design_matrix.py +++ b/examples/04_glm_first_level/plot_design_matrix.py @@ -108,11 +108,9 @@ import matplotlib.pyplot as plt fig, (ax1, ax2, ax3) = plt.subplots( - figsize=(10, 6), - nrows=1, - ncols=3, - layout="constrained", + figsize=(10, 6), nrows=1, ncols=3, constrained_layout=True ) + plot_design_matrix(X1, axes=ax1) ax1.set_title("Event-related design matrix", fontsize=12) plot_design_matrix(X2, axes=ax2) @@ -130,15 +128,12 @@ # the effficieny of # `your design `_. # noqa: E501 # - from nilearn.plotting import plot_design_matrix_correlation -fig3, (ax1, ax2, ax3) = plt.subplots( - figsize=(16, 5), - nrows=1, - ncols=3, - layout="constrained", +fig, (ax1, ax2, ax3) = plt.subplots( + figsize=(16, 5), nrows=1, ncols=3, constrained_layout=True ) + plot_design_matrix_correlation(X1, axes=ax1) ax1.set_title("Event-related correlation matrix", fontsize=12) plot_design_matrix_correlation(X2, axes=ax2) @@ -189,11 +184,9 @@ # Let's compare it to the unmodulated block design fig, (ax1, ax2) = plt.subplots( - figsize=(10, 6), - nrows=1, - ncols=2, - layout="constrained", + figsize=(10, 6), nrows=1, ncols=2, constrained_layout=True ) + plot_design_matrix(X2, axes=ax1) ax1.set_title("Block design matrix", fontsize=12) plot_design_matrix(X4, axes=ax2) diff --git a/examples/04_glm_first_level/plot_predictions_residuals.py b/examples/04_glm_first_level/plot_predictions_residuals.py index a8892aba82..f0e3d59daa 100644 --- a/examples/04_glm_first_level/plot_predictions_residuals.py +++ b/examples/04_glm_first_level/plot_predictions_residuals.py @@ -139,8 +139,8 @@ # Plot distribution of residuals # ------------------------------ # Note that residuals are not really distributed normally. +fig2, axs2 = plt.subplots(2, 3, constrained_layout=True) -fig2, axs2 = plt.subplots(2, 3, layout="constrained") axs2 = axs2.flatten() for i in range(6): axs2[i].set_title(f"Cluster peak {coords[i]}\n") diff --git a/examples/05_glm_second_level/plot_second_level_design_matrix.py b/examples/05_glm_second_level/plot_second_level_design_matrix.py index 492ed2c551..3dbc900196 100644 --- a/examples/05_glm_second_level/plot_second_level_design_matrix.py +++ b/examples/05_glm_second_level/plot_second_level_design_matrix.py @@ -53,7 +53,8 @@ # Let's plot it. from nilearn.plotting import plot_design_matrix -fig, ax1 = plt.subplots(1, 1, figsize=(3, 4), layout="constrained") +fig, ax1 = plt.subplots(1, 1, figsize=(3, 4), constrained_layout=True) + ax = plot_design_matrix(design_matrix, axes=ax1) ax.set_ylabel("maps") ax.set_title("Second level design matrix", fontsize=12) diff --git a/examples/05_glm_second_level/plot_second_level_two_sample_test.py b/examples/05_glm_second_level/plot_second_level_two_sample_test.py index 2beef48b5c..f8bd35d2bf 100644 --- a/examples/05_glm_second_level/plot_second_level_two_sample_test.py +++ b/examples/05_glm_second_level/plot_second_level_two_sample_test.py @@ -93,8 +93,10 @@ 1, 2, gridspec_kw={"width_ratios": [1, 17]}, - layout="constrained", + constrained_layout=True, ) + + plotting.plot_design_matrix( unpaired_design_matrix, rescale=False, axes=ax_unpaired ) diff --git a/examples/07_advanced/plot_age_group_prediction_cross_val.py b/examples/07_advanced/plot_age_group_prediction_cross_val.py index f6b19f90e7..a5b046db18 100644 --- a/examples/07_advanced/plot_age_group_prediction_cross_val.py +++ b/examples/07_advanced/plot_age_group_prediction_cross_val.py @@ -119,7 +119,8 @@ # %% # display the results -plt.figure(figsize=(6, 4), layout="constrained") +plt.figure(figsize=(6, 4), constrained_layout=True) + positions = [0.1, 0.2, 0.3, 0.4] plt.barh(positions, mean_scores, align="center", height=0.05, xerr=scores_std) yticks = ["dummy", *list(gs.cv_results_["param_connectivity__kind"].data[1:])] diff --git a/nilearn/_utils/logger.py b/nilearn/_utils/logger.py index 22a19e0f87..9a02e2e1da 100644 --- a/nilearn/_utils/logger.py +++ b/nilearn/_utils/logger.py @@ -21,6 +21,7 @@ def _has_rich(): if _has_rich(): from rich import print + from rich.markup import escape # The technique used in the log() function only applies to CPython, because @@ -94,7 +95,7 @@ def log( func_name = f"{object_self.__class__.__name__}.{func_name}" if _has_rich(): - print(f"[blue]\\[{func_name}][/blue] {msg}") + print(f"[blue]\\[{func_name}][/blue] {escape(msg)}") else: print(f"[{func_name}] {msg}") diff --git a/tox.ini b/tox.ini index 2c4d35b96b..64f5ca7b16 100644 --- a/tox.ini +++ b/tox.ini @@ -152,11 +152,25 @@ commands = pytest {posargs:} [testenv:doc] -description = build doc +description = build doc with minimum supported version of python and all dependencies (plotting included). +base_python = 3.9 extras = doc -passenv = {[global_var]passenv} +deps = + {[min]deps} + {[plotmin]deps} + rich + plotly + kaleido +passenv = + {[global_var]passenv} + PATTERN + VERSIONTAG allowlist_externals = make commands = make -C doc clean - make -C doc html-noplot + ; Update the authors file and the names file + ; in case a contributor has been added to citation.cff + ; but did not run the maint_tools/citation_cff_maint.py script. + python maint_tools/citation_cff_maint.py + make -C doc {posargs:}