Skip to content

Commit

Permalink
Enable --offline installs from pkgs_dirs (#423)
Browse files Browse the repository at this point in the history
  • Loading branch information
jaimergp authored Jan 25, 2024
1 parent 52adf73 commit d16e1b3
Show file tree
Hide file tree
Showing 4 changed files with 105 additions and 7 deletions.
25 changes: 23 additions & 2 deletions conda_libmamba_solver/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@
from conda.common.io import DummyExecutor, ThreadLimitedThreadPoolExecutor, env_var
from conda.common.serialize import json_dump, json_load
from conda.common.url import percent_decode, remove_auth, split_anaconda_token
from conda.core.package_cache_data import PackageCacheData
from conda.core.subdir_data import SubdirData
from conda.models.channel import Channel
from conda.models.match_spec import MatchSpec
Expand Down Expand Up @@ -117,6 +118,7 @@ def __init__(
subdirs: Iterable[str] | None = None,
repodata_fn: str = REPODATA_FN,
query_format=api.QueryFormat.JSON,
load_pkgs_cache: bool = False,
):
self._channels = context.channels if channels is None else channels
self._subdirs = context.subdirs if subdirs is None else subdirs
Expand All @@ -128,6 +130,9 @@ def __init__(
installed_repo = self._load_installed(installed_records)
self._repos.append(installed_repo)

if load_pkgs_cache:
self._repos.extend(self._load_pkgs_cache())

self._index = self._load_channels()
self._repos += [info.repo for info in self._index.values()]

Expand Down Expand Up @@ -212,7 +217,7 @@ def _repo_from_records(
info.noarch = record.noarch.value
if record.channel and record.channel.subdir_url:
info.repo_url = record.channel.subdir_url
additional_infos[record.name] = info
additional_infos[record.fn] = info

with NamedTemporaryFile(suffix=".json", delete=False, mode="w") as f:
f.write(json_dump(exported))
Expand Down Expand Up @@ -241,7 +246,12 @@ def _fetch_channel(self, url: str) -> tuple[str, os.PathLike]:

log.debug("Fetching %s with SubdirData.repo_fetch", channel)
subdir_data = SubdirData(channel, repodata_fn=self._repodata_fn)
json_path, _ = subdir_data.repo_fetch.fetch_latest_path()
if context.offline or context.use_index_cache:
# This might not exist (yet, anymore), but that's ok because we'll check
# for existence later and safely ignore if needed
json_path = subdir_data.cache_path_json
else:
json_path, _ = subdir_data.repo_fetch.fetch_latest_path()

return url, json_path

Expand Down Expand Up @@ -333,6 +343,17 @@ def _load_channels(self) -> dict[str, _ChannelRepoInfo]:

return index

def _load_pkgs_cache(self, pkgs_dirs=None) -> Iterable[api.Repo]:
if pkgs_dirs is None:
pkgs_dirs = context.pkgs_dirs
repos = []
for path in pkgs_dirs:
package_cache_data = PackageCacheData(path)
package_cache_data.load()
repo = self._repo_from_records(self._pool, path, package_cache_data.values())
repos.append(repo)
return repos

def _load_installed(self, records: Iterable[PackageRecord]) -> api.Repo:
repo = self._repo_from_records(self._pool, "installed", records)
repo.set_installed()
Expand Down
30 changes: 25 additions & 5 deletions conda_libmamba_solver/solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,15 @@
from conda.common.io import Spinner, timeout
from conda.common.path import paths_equal
from conda.common.url import join_url, percent_decode
from conda.core.package_cache_data import PackageCacheData
from conda.core.prefix_data import PrefixData
from conda.core.solve import Solver
from conda.exceptions import (
CondaValueError,
InvalidMatchSpec,
InvalidSpec,
PackagesNotFoundError,
ParseError,
UnsatisfiableError,
)
from conda.models.channel import Channel
Expand Down Expand Up @@ -207,6 +209,7 @@ def solve_final_state(
channels=all_channels,
subdirs=subdirs,
repodata_fn=self._repodata_fn,
load_pkgs_cache=context.offline,
)
index.reload_local_channels()

Expand Down Expand Up @@ -879,21 +882,38 @@ def _package_record_from_json_payload(
json_payload: str
A str-encoded JSON payload with the PackageRecord kwargs.
"""
try:
kwargs = json.loads(json_payload)
except (TypeError, ValueError, json.JSONDecodeError) as exc:
channel_name = Channel(channel).canonical_name
msg = f"Could not parse JSON payload for {channel_name}::{pkg_filename}"
raise ParseError(msg) from exc

# conda-lock will inject virtual packages, but these are not in the index
if pkg_filename.startswith("__") and "/@/" in channel:
return PackageRecord(**json.loads(json_payload))
return PackageRecord(**kwargs)

kwargs = json.loads(json_payload)
try:
channel_info = index.get_info(channel)
except KeyError:
# this channel was never used to build the index, which
# means we obtained an already installed PackageRecord
# this channel was never used to build the remote index, which
# can mean two things: it comes from pkgs_dirs (offline)
# or we obtained an already installed PackageRecord
# whose metadata contains a channel that doesn't exist
# in both cases, we can return the record from the correct object
if context.offline:
for path in context.pkgs_dirs:
pcd = PackageCacheData(path)
pcd.load()
record = next((r for r in pcd.values() if r.fn == pkg_filename), None)
if record:
return record
pd = PrefixData(self.prefix)
record = pd.get(kwargs["name"])
record = pd.get(kwargs["name"], default=None)
if record and record.fn == pkg_filename:
return record
# No luck? Cross our fingers and return the record from the JSON payload straight
return PackageRecord(**kwargs)

# Otherwise, these are records from the index
kwargs["fn"] = pkg_filename
Expand Down
19 changes: 19 additions & 0 deletions news/423-offline-cache
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
### Enhancements

* Load `pkgs_dirs` records when called in offline mode. (#396 via #423)

### Bug fixes

* Do not crash if a stateless repodata cache is accessed with `--offline` or `--use-index-cache`. (#396 via #423).

### Deprecations

* <news item>

### Docs

* <news item>

### Other

* <news item>
38 changes: 38 additions & 0 deletions tests/test_channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import shutil
import sys
from pathlib import Path
from subprocess import check_call
from urllib.request import urlretrieve

import pytest
from conda.base.context import reset_context
Expand Down Expand Up @@ -345,3 +347,39 @@ def test_unknown_channels_do_not_crash(tmp_path):
assert package_is_installed(prefix, "test-package")
conda_inprocess("install", prefix, "zlib")
assert package_is_installed(prefix, "zlib")


@pytest.mark.skipif(not on_linux, reason="Only run on Linux")
def test_use_cache_works_offline_fresh_install_keep(tmp_path):
"""
https://github.com/conda/conda-libmamba-solver/issues/396
constructor installers have a `-k` switch (keep) to leave the
pkgs/ cache prepopulated. Offline updating from the cache should be a
harmless no-op, not a hard crash.
"""
miniforge_url = (
"https://github.com/conda-forge/miniforge/releases/"
f"latest/download/Miniforge3-Linux-{os.uname().machine}.sh"
)
urlretrieve(miniforge_url, tmp_path / "miniforge.sh")
# bkfp: batch, keep, force, prefix
check_call(["bash", str(tmp_path / "miniforge.sh"), "-bkfp", tmp_path / "miniforge"])
env = os.environ.copy()
env["CONDA_ROOT_PREFIX"] = str(tmp_path / "miniforge")
env["CONDA_PKGS_DIRS"] = str(tmp_path / "miniforge" / "pkgs")
env["CONDA_ENVS_DIRS"] = str(tmp_path / "miniforge" / "envs")
env["HOME"] = str(tmp_path) # ignore ~/.condarc
args = (
"update",
"-p",
tmp_path / "miniforge",
"--all",
"--dry-run",
"--override-channels",
"--channel=conda-forge",
)
kwargs = {"capture_output": False, "check": True, "env": env}
conda_subprocess(*args, "--offline", **kwargs)
conda_subprocess(*args, "--use-index-cache", **kwargs)
conda_subprocess(*args, "--offline", "--use-index-cache", **kwargs)

0 comments on commit d16e1b3

Please sign in to comment.