Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into jlap-two-file-cache
Browse files Browse the repository at this point in the history
  • Loading branch information
dholth committed Nov 16, 2023
2 parents 0cf6b34 + a5036a8 commit 1b5f85d
Show file tree
Hide file tree
Showing 22 changed files with 823 additions and 1,799 deletions.
4 changes: 0 additions & 4 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ jobs:
--file /opt/conda-libmamba-solver-src/dev/requirements.txt \
--file /opt/conda-libmamba-solver-src/tests/requirements.txt &&
sudo /opt/conda/bin/python -m pip install /opt/conda-libmamba-solver-src --no-deps -vvv &&
sudo /opt/conda/bin/python -m pip install /opt/conda-libmamba-solver-src/dev/collect_upstream_conda_tests/ -vvv &&
source /opt/conda-src/dev/linux/bashrc.sh &&
/opt/conda/bin/python -m pytest /opt/conda-libmamba-solver-src -vv -m 'not slow'"
Expand Down Expand Up @@ -167,7 +166,6 @@ jobs:
--file ../conda-libmamba-solver/tests/requirements.txt \
python=${{ matrix.python-version }}
conda update openssl ca-certificates certifi
python -m pip install ../conda-libmamba-solver/dev/collect_upstream_conda_tests -vv
conda info
python -c "from importlib.metadata import version; print('libmambapy', version('libmambapy'))"
Expand Down Expand Up @@ -263,8 +261,6 @@ jobs:
run: |
call .\dev-init.bat
if errorlevel 1 exit 1
python -m pip install -vv "%GITHUB_WORKSPACE%\conda-libmamba-solver\dev\collect_upstream_conda_tests"
if errorlevel 1 exit 1
python -m pip install --no-deps -vv "%GITHUB_WORKSPACE%\conda-libmamba-solver"
if errorlevel 1 exit 1
Expand Down
214 changes: 29 additions & 185 deletions .github/workflows/upstream_tests.yml

Large diffs are not rendered by default.

5 changes: 2 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ ci:
exclude: |
(?x)^(
tests/data/ |
tests/_reposerver\.py |
conda_libmamba_solver/mamba_utils\.py
)/
repos:
Expand All @@ -29,7 +28,7 @@ repos:
args: ["--py38-plus"]
exclude: ^conda/exports.py
- repo: https://github.com/psf/black
rev: 23.10.1
rev: 23.11.0
hooks:
- id: black
exclude: tests/_reposerver\.py
Expand Down Expand Up @@ -65,4 +64,4 @@ repos:
- id: insert-license
files: \.py$
args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol]
exclude: ^(tests/repodata_time_machine.py|mamba_utils\.py) # extend global exclude
exclude: ^(tests/repodata_time_machine.py|mamba_utils\.py|tests/channel_testing/helpers\.py|tests/channel_testing/reposerver\.py) # extend global exclude
37 changes: 22 additions & 15 deletions conda_libmamba_solver/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,12 +156,14 @@ def reload_local_channels(self):
"""
Reload a channel that was previously loaded from a local directory.
"""
for url, info in self._index.items():
if url.startswith("file://"):
url, json_path, overlay_path = self._fetch_channel(url)
new = self._json_path_to_repo_info(url, json_path, overlay_path)
self._repos[self._repos.index(info.repo)] = new.repo
self._index[url] = new
for noauth_url, info in self._index.items():
if noauth_url.startswith("file://") or info.channel.scheme == "file":
url, json_path = self._fetch_channel(info.full_url)
repo_position = self._repos.index(info.repo)
info.repo.clear(True)
new = self._json_path_to_repo_info(url, json_path, try_solv=False)
self._repos[repo_position] = new.repo
self._index[noauth_url] = new
set_channel_priorities(self._index)

def _repo_from_records(
Expand Down Expand Up @@ -245,21 +247,25 @@ def _fetch_channel(self, url: str) -> tuple[str, Path, Path | None]:
return url, json_path, overlay_path

def _json_path_to_repo_info(
self, url: str, json_path: str | Path, overlay_path: Path | None = None
) -> _ChannelRepoInfo | None:
self, url: str, json_path: str, try_solv: bool = False
) -> Optional[_ChannelRepoInfo]:
channel = Channel.from_url(url)
noauth_url = channel.urls(with_credentials=False, subdirs=(channel.subdir,))[0]
json_path = Path(json_path)
solv_path = json_path.parent / f"{json_path.stem}.solv"
try:
json_stat = json_path.stat()
except OSError as exc:
log.debug("Failed to stat %s", json_path, exc_info=exc)
json_stat = None
try:
solv_stat = solv_path.stat()
except OSError as exc:
log.debug("Failed to stat %s", solv_path, exc_info=exc)
if try_solv:
try:
solv_path = json_path.parent / f"{json_path.stem}.solv"
solv_stat = solv_path.stat()
except OSError as exc:
log.debug("Failed to stat %s", solv_path, exc_info=exc)
solv_stat = None
else:
solv_path = None
solv_stat = None

if solv_stat is None and json_stat is None:
Expand Down Expand Up @@ -304,8 +310,9 @@ def _load_channels(self) -> dict[str, _ChannelRepoInfo]:
noauth_urls = c.urls(with_credentials=False, subdirs=self._subdirs)
if seen_noauth.issuperset(noauth_urls):
continue
if c.auth or c.token: # authed channel always takes precedence
urls += Channel(c).urls(with_credentials=True, subdirs=self._subdirs)
auth_urls = c.urls(with_credentials=True, subdirs=self._subdirs)
if noauth_urls != auth_urls: # authed channel always takes precedence
urls += auth_urls
seen_noauth.update(noauth_urls)
continue
# at this point, we are handling an unauthed channel; in some edge cases,
Expand Down
49 changes: 37 additions & 12 deletions conda_libmamba_solver/solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
REPODATA_FN,
UNKNOWN_CHANNEL,
ChannelPriority,
on_win,
)
from conda.base.context import context
from conda.common.compat import on_win
from conda.common.constants import NULL
from conda.common.io import Spinner, timeout
from conda.common.path import paths_equal
Expand Down Expand Up @@ -160,7 +160,6 @@ def solve_final_state(
# From now on we _do_ require a solver and the index
init_api_context()
subdirs = self.subdirs
conda_bld_channels = ()
if self._called_from_conda_build():
log.info("Using solver via 'conda.plan.install_actions' (probably conda build)")
# Problem: Conda build generates a custom index which happens to "forget" about
Expand All @@ -179,6 +178,7 @@ def solve_final_state(
IndexHelper = _CachedLibMambaIndexHelper
else:
IndexHelper = LibMambaIndexHelper
conda_bld_channels = ()

all_channels = [
*conda_bld_channels,
Expand Down Expand Up @@ -402,10 +402,16 @@ def _solve_attempt(
def _specs_to_tasks(self, in_state: SolverInputState, out_state: SolverOutputState):
log.debug("Creating tasks for %s specs", len(out_state.specs))
if in_state.is_removing:
return self._specs_to_tasks_remove(in_state, out_state)
if self._called_from_conda_build():
return self._specs_to_tasks_conda_build(in_state, out_state)
return self._specs_to_tasks_add(in_state, out_state)
tasks = self._specs_to_tasks_remove(in_state, out_state)
elif self._called_from_conda_build():
tasks = self._specs_to_tasks_conda_build(in_state, out_state)
else:
tasks = self._specs_to_tasks_add(in_state, out_state)
log.debug(
"Created following tasks:\n%s",
json.dumps({k[0]: v for k, v in tasks.items()}, indent=2),
)
return tasks

@staticmethod
def _spec_to_str(spec):
Expand Down Expand Up @@ -456,7 +462,7 @@ def _specs_to_tasks_add(self, in_state: SolverInputState, out_state: SolverOutpu
# logic considers should be the target version for each package in the environment
# and requested changes. We are _not_ following those targets here, but we do iterate
# over the list to decide what to do with each package.
for name, _classic_logic_spec in out_state.specs.items():
for name, _classic_logic_spec in sorted(out_state.specs.items()):
if name.startswith("__"):
continue # ignore virtual packages
installed: PackageRecord = in_state.installed.get(name)
Expand Down Expand Up @@ -820,8 +826,11 @@ def _export_solved_records(
else:
log.warn("Tried to unlink %s but it is not installed or manageable?", filename)

for_conda_build = self._called_from_conda_build()
for channel, filename, json_payload in to_link:
record = self._package_record_from_json_payload(index, channel, filename, json_payload)
record = self._package_record_from_json_payload(
index, channel, filename, json_payload, for_conda_build=for_conda_build
)
# We need this check below to make sure noarch package get reinstalled
# record metadata coming from libmamba is incomplete and won't pass the
# noarch checks -- to fix it, we swap the metadata-only record with its locally
Expand All @@ -842,20 +851,28 @@ def _export_solved_records(
)

# Fixes conda-build tests/test_api_build.py::test_croot_with_spaces
if on_win and self._called_from_conda_build():
if on_win and for_conda_build:
for record in out_state.records.values():
record.channel.location = percent_decode(record.channel.location)
if "%" not in str(record):
continue
if record.channel.location: # multichannels like 'defaults' have no location
record.channel.location = percent_decode(record.channel.location)
record.channel.name = percent_decode(record.channel.name)

def _package_record_from_json_payload(
self, index: LibMambaIndexHelper, channel: str, pkg_filename: str, json_payload: str
self,
index: LibMambaIndexHelper,
channel: str,
pkg_filename: str,
json_payload: str,
for_conda_build: bool = False,
) -> PackageRecord:
"""
The libmamba transactions cannot return full-blown objects from the C/C++ side.
Instead, it returns the instructions to build one on the Python side:
channel_info: dict
Channel data, as built in .index.LibmambaIndexHelper._fetch_channel()
Channel datas, as built in .index.LibmambaIndexHelper._fetch_channel()
This is retrieved from the .index._index mapping, keyed by channel URLs
pkg_filename: str
The filename (.tar.bz2 or .conda) of the selected record.
Expand All @@ -881,6 +898,14 @@ def _package_record_from_json_payload(
# Otherwise, these are records from the index
kwargs["fn"] = pkg_filename
kwargs["channel"] = channel_info.channel
if for_conda_build:
# conda-build expects multichannel instances in the Dist->PackageRecord mapping
# see https://github.com/conda/conda-libmamba-solver/issues/363
for multichannel_name, mc_channels in context.custom_multichannels.items():
urls = [url for c in mc_channels for url in c.urls(with_credentials=False)]
if channel_info.noauth_url in urls:
kwargs["channel"] = multichannel_name
break
kwargs["url"] = join_url(channel_info.full_url, pkg_filename)
if not kwargs.get("subdir"): # missing in old channels
kwargs["subdir"] = channel_info.channel.subdir
Expand Down
13 changes: 8 additions & 5 deletions conda_libmamba_solver/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,9 @@ def installed(self) -> Mapping[str, PackageRecord]:
"""
This exposes the installed packages in the prefix. Note that a ``PackageRecord``
can generate an equivalent ``MatchSpec`` object with ``.to_match_spec()``.
Records are toposorted.
"""
return MappingProxyType(self.prefix_data._prefix_records)
return MappingProxyType(dict(sorted(self.prefix_data._prefix_records.items())))

@property
def history(self) -> Mapping[str, MatchSpec]:
Expand Down Expand Up @@ -261,7 +262,7 @@ def virtual(self) -> Mapping[str, MatchSpec]:
cannot be (un)installed, they only represent constrains for other packages. By convention,
their names start with a double underscore.
"""
return MappingProxyType(self._virtual)
return MappingProxyType(dict(sorted(self._virtual.items())))

@property
def aggressive_updates(self) -> Mapping[str, MatchSpec]:
Expand All @@ -281,12 +282,14 @@ def always_update(self) -> Mapping[str, MatchSpec]:
- almost all packages if update_all is true
- etc
"""
pkgs = {pkg: MatchSpec(pkg) for pkg in self.aggressive_updates if pkg in self.installed}
installed = self.installed
pinned = self.pinned
pkgs = {pkg: MatchSpec(pkg) for pkg in self.aggressive_updates if pkg in installed}
if context.auto_update_conda and paths_equal(self.prefix, context.root_prefix):
pkgs.setdefault("conda", MatchSpec("conda"))
if self.update_modifier.UPDATE_ALL:
for pkg in self.installed:
if pkg != "python" and pkg not in self.pinned:
for pkg in installed:
if pkg != "python" and pkg not in pinned:
pkgs.setdefault(pkg, MatchSpec(pkg))
return MappingProxyType(pkgs)

Expand Down
124 changes: 0 additions & 124 deletions dev/collect_upstream_conda_tests/collect_upstream_conda_tests.py

This file was deleted.

Loading

0 comments on commit 1b5f85d

Please sign in to comment.