Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add rust to should_use_compilers lint #1018

Merged
merged 2 commits into from
Nov 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 40 additions & 26 deletions bioconda_utils/lint/check_build_help.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import os

from . import LintCheck, ERROR, WARNING, INFO
from . import INFO, WARNING, LintCheck


class should_use_compilers(LintCheck):
Expand All @@ -21,16 +21,25 @@ class should_use_compilers(LintCheck):
build:
- {{ compiler('language') }}

Where language is one of ``c``, ``cxx``, ``fortran``, ``go`` or
Where language is one of ``c``, ``cxx``, ``fortran``, ``rust``, ``go`` or
``cgo``. You can specify multiple compilers if needed.

There is no need to add ``libgfortran``, ``libgcc``, or
``toolchain`` to the dependencies as this will be handled by
conda-build itself.

"""
compilers = ('gcc', 'llvm', 'libgfortran', 'libgcc', 'go', 'cgo',
'toolchain')

compilers = (
"gcc",
"llvm",
"libgfortran",
"libgcc",
"go",
"cgo",
"toolchain",
"rust",
)

def check_deps(self, deps):
for compiler in self.compilers:
Expand All @@ -45,15 +54,15 @@ class compilers_must_be_in_build(LintCheck):
``requirements: build:`` section.

"""

def check_deps(self, deps):
for dep in deps:
if dep.startswith('compiler_'):
if dep.startswith("compiler_"):
for location in deps[dep]:
if 'run' in location or 'host' in location:
if "run" in location or "host" in location:
self.message(section=location)



class uses_setuptools(LintCheck):
"""The recipe uses setuptools in run depends

Expand All @@ -62,10 +71,11 @@ class uses_setuptools(LintCheck):
pkg_resources or setuptools console scripts).

"""

severity = INFO

def check_recipe(self, recipe):
if 'setuptools' in recipe.get_deps('run'):
if "setuptools" in recipe.get_deps("run"):
self.message()


Expand All @@ -81,27 +91,28 @@ class setup_py_install_args(LintCheck):
requires defines entrypoints in its ``setup.py``.

"""

@staticmethod
def _check_line(line: str) -> bool:
"""Check a line for a broken call to setup.py"""
if 'setup.py install' not in line:
if "setup.py install" not in line:
return True
if '--single-version-externally-managed' in line:
if "--single-version-externally-managed" in line:
return True
return False

def check_deps(self, deps):
if 'setuptools' not in deps:
if "setuptools" not in deps:
return # no setuptools, no problem

if not self._check_line(self.recipe.get('build/script', '')):
self.message(section='build/script')
if not self._check_line(self.recipe.get("build/script", "")):
self.message(section="build/script")

try:
with open(os.path.join(self.recipe.dir, 'build.sh')) as buildsh:
with open(os.path.join(self.recipe.dir, "build.sh")) as buildsh:
for num, line in enumerate(buildsh):
if not self._check_line(line):
self.message(fname='build.sh', line=num)
self.message(fname="build.sh", line=num)
except FileNotFoundError:
pass

Expand All @@ -115,10 +126,10 @@ class cython_must_be_in_host(LintCheck):
host:
- cython
"""

def check_deps(self, deps):
if 'cython' in deps:
if any('host' not in location
for location in deps['cython']):
if "cython" in deps:
if any("host" not in location for location in deps["cython"]):
self.message()


Expand All @@ -132,9 +143,11 @@ class cython_needs_compiler(LintCheck):
- {{ compiler('c') }}

"""

severity = WARNING

def check_deps(self, deps):
if 'cython' in deps and 'compiler_c' not in deps and 'compiler_cxx' not in deps:
if "cython" in deps and "compiler_c" not in deps and "compiler_cxx" not in deps:
self.message()


Expand All @@ -143,23 +156,23 @@ class missing_run_exports(LintCheck):

This ensures that the package is automatically pinned to a compatible version if
it is used as a dependency in another recipe.
This is a conservative strategy to avoid breakage. We came to the
This is a conservative strategy to avoid breakage. We came to the
conclusion that it is better to require this little overhead instead
of trying to fix things when they break later on.
This holds for compiled packages (in particular those with shared
libraries) but also for e.g. Python packages, as those might also
introduce breaking changes in their APIs or command line interfaces.

We distinguish between four cases.
We distinguish between four cases.

**Case 1:** If the software follows semantic versioning (or it has at least a normal version string (like 1.2.3) and the actual strategy of the devs is unknown), add run_exports to the recipe like this::

build:
run_exports:
- {{ pin_subpackage('myrecipe', max_pin="x") }}

with ``myrecipe`` being the name of the recipe (you can also use the name variable).
This will by default pin the package to ``>=1.2.0,<2.0.0`` where ``1.2.0`` is the
This will by default pin the package to ``>=1.2.0,<2.0.0`` where ``1.2.0`` is the
version of the package at build time of the one depending on it and ``<2.0.0`` constrains
it to be less than the next major (i.e. potentially not backward compatible) version.

Expand All @@ -168,15 +181,15 @@ class missing_run_exports(LintCheck):
build:
run_exports:
- {{ pin_subpackage('myrecipe', max_pin="x.x") }}

**Case 3:** If the software has a normal versioning (like 1.2.3) but does reportedly not follow semantic versioning, please choose the ``max_pin`` argument such that it captures the potential next version that will introduce a breaking change.
E.g. if you expect breaking changes to occur with the next minor release, choose ``max_pin="x.x"``, if they even can occur with the next patch release, choose ``max_pin="x.x.x"``.

**Case 4:** If the software does have a non-standard versioning (e.g. calendar versioning like 20220602), we cannot really protect well against breakages. However, we can at least pin to the current version as a minimum and skip the max_pin constraint. This works by setting ``max_pin=None``.

In the recipe depending on this one, one just needs to specify the package name
and no version at all.

Also check out the possible arguments of `pin_subpackage` here:
https://docs.conda.io/projects/conda-build/en/stable/resources/define-metadata.html#export-runtime-requirements

Expand All @@ -195,6 +208,7 @@ class missing_run_exports(LintCheck):
Usually, there is no need to use ``pin_compatible``, just use ``pin_subpackage`` as shown above, and fix
run_exports in upstream packages as well if needed.
"""

def check_recipe(self, recipe):
build = recipe.meta.get("build", dict())
if "run_exports" not in build:
Expand Down
3 changes: 3 additions & 0 deletions test/lint_cases.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,9 @@ tests:
- name: compiler_old_3
expect: should_use_compilers
add: { requirements: { build: ["llvm # [osx]"] } }
- name: compiler_old_4
expect: should_use_compilers
add: { requirements: { build: ["rust >=1.56"] } }
- name: compiler_in_host
expect: compilers_must_be_in_build
remove: build/noarch
Expand Down