Skip to content

Commit

Permalink
Merge pull request #31 from minrk/constrains
Browse files Browse the repository at this point in the history
handle run_constrained and constraints in run_exports
  • Loading branch information
beckermr authored May 8, 2024
2 parents 10099c2 + 60556c3 commit 71b18b1
Show file tree
Hide file tree
Showing 2 changed files with 146 additions and 21 deletions.
38 changes: 34 additions & 4 deletions conda_forge_feedstock_check_solvable/mamba_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,13 +479,17 @@ def __init__(self, channels, platform):
platform=platform,
has_priority=True,
)
for repo in self.repos:
# need set_installed for add_pin, not sure why
repo.set_installed()

def solve(
self,
specs,
get_run_exports=False,
ignore_run_exports_from=None,
ignore_run_exports=None,
constraints=None,
) -> Tuple[bool, List[str]]:
"""Solve given a set of specs.
Expand All @@ -501,6 +505,9 @@ def solve(
A list of packages from which to ignore the run exports.
ignore_run_exports : list, optional
A list of things that should be ignore in the run exports.
constraints : list, optional
A list of package specs to apply as constraints to the solve.
These packages are not included in the solution.
Returns
-------
Expand All @@ -521,8 +528,15 @@ def solve(
solver = api.Solver(self.pool, solver_options)

_specs = [_norm_spec(s) for s in specs]
_constraints = [_norm_spec(s) for s in constraints or []]

print_debug("MAMBA running solver for specs \n\n%s\n", pprint.pformat(_specs))
print_debug(
"MAMBA running solver for specs \n\n%s\nconstraints: %s\n",
pprint.pformat(_specs),
pprint.pformat(_constraints),
)
for constraint in _constraints:
solver.add_pin(constraint)

solver.add_jobs(_specs, api.SOLVER_INSTALL)
success = solver.solve()
Expand Down Expand Up @@ -1023,6 +1037,8 @@ def _is_recipe_solvable_on_platform(
build_req = m.get_value("requirements/build", [])
host_req = m.get_value("requirements/host", [])
run_req = m.get_value("requirements/run", [])
run_constrained = m.get_value("requirements/run_constrained", [])

ign_runex = m.get_value("build/ignore_run_exports", [])
ign_runex_from = m.get_value("build/ignore_run_exports_from", [])

Expand All @@ -1038,6 +1054,8 @@ def _is_recipe_solvable_on_platform(
if _err is not None:
errors.append(_err)

run_constrained = list(set(run_constrained) | build_rx["strong_constrains"])

if m.is_cross:
host_req = list(set(host_req) | build_rx["strong"])
if not (m.noarch or m.noarch_python):
Expand All @@ -1050,6 +1068,9 @@ def _is_recipe_solvable_on_platform(
run_req = list(set(run_req) | build_rx["strong"])
if m.build_is_host:
run_req = list(set(run_req) | build_rx["weak"])
run_constrained = list(
set(run_constrained) | build_rx["weak_constrains"]
)
else:
host_req = list(set(host_req) | build_rx["strong"])

Expand All @@ -1069,12 +1090,21 @@ def _is_recipe_solvable_on_platform(
if m.noarch or m.noarch_python:
run_req = list(set(run_req) | host_rx["noarch"])
else:
run_req = list(set(run_req) | host_rx["weak"])
run_req = list(set(run_req) | host_rx["weak"] | host_rx["strong"])

run_constrained = list(
set(run_constrained)
| host_rx["weak_constrains"]
| host_rx["strong_constrains"]
)

run_constrained = apply_pins(
run_constrained, host_req or [], build_req or [], outnames, m
)
if run_req:
run_req = apply_pins(run_req, host_req or [], build_req or [], outnames, m)
run_req = _clean_reqs(run_req, outnames)
_solvable, _err, _ = solver.solve(run_req)
_solvable, _err, _ = solver.solve(run_req, constraints=run_constrained)
solvable = solvable and _solvable
if _err is not None:
errors.append(_err)
Expand All @@ -1086,7 +1116,7 @@ def _is_recipe_solvable_on_platform(
)
if tst_req:
tst_req = _clean_reqs(tst_req, outnames)
_solvable, _err, _ = solver.solve(tst_req)
_solvable, _err, _ = solver.solve(tst_req, constraints=run_constrained)
solvable = solvable and _solvable
if _err is not None:
errors.append(_err)
Expand Down
129 changes: 112 additions & 17 deletions tests/test_mamba_solvable.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,32 @@ def test_mamba_solver_apply_pins(tmp_path):
assert any(r.startswith("jpeg >=") for r in run_req)


@flaky
def test_mamba_solver_constraints():
with suppress_conda_build_logging():
solver = _mamba_factory(("conda-forge",), "osx-64")
solvable, err, solution = solver.solve(
["simplejson"], constraints=["python=3.10", "zeromq=4.2"]
)
assert solvable, err
python = [pkg for pkg in solution if pkg.split()[0] == "python"][0]
name, version, build = python.split(None, 2)
assert version.startswith("3.10.")
assert not any(pkg.startswith("zeromq") for pkg in solution), pprint.pformat(
solution
)


@flaky
def test_mamba_solver_constraints_unsolvable():
with suppress_conda_build_logging():
solver = MambaSolver(("conda-forge",), "osx-64")
solvable, err, solution = solver.solve(
["simplejson"], constraints=["python=3.10", "python=3.11"]
)
assert not solvable, pprint.pformat(solution)


@flaky
def test_mamba_solver_nvcc():
with suppress_conda_build_logging():
Expand Down Expand Up @@ -347,27 +373,96 @@ def test_cupy_solvable(tmp_path):


@flaky
def test_run_exports_strong_constrains_solvable(tmp_path):
"""dolfinx_mpc depends on fenics-basix which has strong_constrained in run_exports"""
feedstock_dir = clone_and_checkout_repo(
tmp_path,
"https://github.com/conda-forge/dolfinx_mpc-feedstock",
ref="main",
)
subprocess.run(
"git checkout 26bb83149573c285cd596fbca2db89a4c69435c3",
cwd=feedstock_dir,
shell=True,
check=True,
def test_run_exports_constrains_conflict(feedstock_dir, tmp_path_factory):
recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml")
os.makedirs(os.path.dirname(recipe_file), exist_ok=True)

with FakeRepoData(tmp_path_factory.mktemp("channel")) as repodata:
for pkg in [
FakePackage("fakeconstrainedpkg", version="1.0"),
FakePackage("fakeconstrainedpkg", version="2.0"),
]:
repodata.add_package(pkg)

with open(recipe_file, "w") as fp:
fp.write(
dedent(
"""
package:
name: "cf-autotick-bot-test-package"
version: "0.9"
source:
path: .
build:
number: 8
requirements:
build: []
host:
# pick a package with run_exports: constrains
- fenics-basix 0.8.0 *_0
run:
- libzlib
- fakeconstrainedpkg
run_constrained:
- fakeconstrainedpkg 1.0
""",
),
)

# keep only one variant, avoid unnecessary solves
for cbc in pathlib.Path(feedstock_dir).glob(".ci_support/*.yaml"):
if cbc.name != "linux_python3.8.____cpython.yaml":
cbc.unlink()

solvable, errors, solve_by_variant = is_recipe_solvable(
feedstock_dir,
additional_channels=[repodata.channel_url],
timeout=None,
)
assert solvable, pprint.pformat(errors)


@flaky
def test_run_exports_constrains_notok(feedstock_dir, tmp_path_factory):
recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml")
os.makedirs(os.path.dirname(recipe_file), exist_ok=True)

with open(recipe_file, "w") as fp:
fp.write(
dedent(
"""
package:
name: "cf-autotick-bot-test-package"
version: "0.9"
source:
path: .
build:
number: 8
requirements:
build: []
host:
# pick a package with run_exports: constrains
- fenics-basix 0.8.0 *_0
run:
# fenics-basix 0.8 has run_exports.strong_constrains: nanobind 1.9
# this should conflict
- nanobind =1.8
""",
),
)

# keep only one variant, avoid unnecessary solves
# every variant exercises this issue and this feedstock has ~100 variants
for cbc in pathlib.Path(feedstock_dir).glob(".ci_support/*.yaml"):
if cbc.name != "linux_64_mpimpichpython3.10.____cpythonscalarreal.yaml":
if cbc.name != "linux_python3.8.____cpython.yaml":
cbc.unlink()
solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir)
pprint.pprint(solvable_by_variant)
assert solvable, pprint.pformat(errors)
assert not solvable, pprint.pformat(errors)


@flaky
Expand Down Expand Up @@ -520,7 +615,7 @@ def test_mamba_solver_hangs():

with suppress_conda_build_logging():
solver = _mamba_factory(("conda-forge", "defaults"), "linux-64")
solver.solve(
res = solver.solve(
[
"gdal >=2.1.0",
"ncurses >=6.2,<7.0a0",
Expand Down

0 comments on commit 71b18b1

Please sign in to comment.