continue work on conda

This commit is contained in:
DavHau 2020-11-17 18:29:44 +07:00
parent 347ff93a78
commit 55f0bfbc4e
19 changed files with 256 additions and 143 deletions

View file

@ -7,45 +7,59 @@ from time import time
import toml
from mach_nix.generate import main
pwd = dirname(realpath(__file__))
os.environ['py_ver_str'] = '3.7.5'
os.environ['system'] = 'x86_64-linux'
os.environ['out_file'] = f'{pwd}/overrides.nix'
os.environ['disable_checks'] = 'true'
with open(pwd + "/../mach_nix/provider_defaults.toml") as f:
provider_settings = toml.load(f)
if os.path.isfile("./providers.toml"):
with open(pwd + "./providers.toml") as f:
provider_settings.update(toml.load(f))
provider_settings.update(dict(
numba = "wheel"
))
os.environ['providers'] = json.dumps(provider_settings)
nixpkgs_json = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/nixpkgs-json.nix -o {nixpkgs_json} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['nixpkgs_json'] = nixpkgs_json
pypi_deps_db = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/deps-db-and-fetcher.nix -A pypi_deps_db_src -o {pypi_deps_db} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['pypi_deps_db_src'] = pypi_deps_db
from mach_nix import generate
for key in ('PYPI_FETCHER_COMMIT', 'PYPI_FETCHER_SHA256'):
with open(f"{pypi_deps_db}/{key}") as f:
os.environ[key.lower()] = f.read()
def main():
pwd = dirname(realpath(__file__))
os.environ['py_ver_str'] = '3.7.5'
os.environ['system'] = 'x86_64-linux'
os.environ['out_file'] = f'{pwd}/overrides.nix'
os.environ['disable_checks'] = 'true'
with open(pwd + "/../mach_nix/provider_defaults.toml") as f:
provider_settings = toml.load(f)
if os.path.isfile("./providers.toml"):
with open(pwd + "./providers.toml") as f:
provider_settings.update(toml.load(f))
provider_settings.update(dict(
# _default = "conda"
))
os.environ['providers'] = json.dumps(provider_settings)
nixpkgs_json = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/nixpkgs-json.nix -o {nixpkgs_json} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['nixpkgs_json'] = nixpkgs_json
pypi_deps_db = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/deps-db-and-fetcher.nix -A pypi_deps_db_src -o {pypi_deps_db} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['pypi_deps_db_src'] = pypi_deps_db
providers = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/lib.nix -A parseProvidersToJson -o {providers} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['providers'] = providers
conda_channels_json = tempfile.mktemp()
cmd = f'nix-build {pwd}/../mach_nix/nix/conda-channels.nix -A condaChannelsJson -o {conda_channels_json} --show-trace'
sp.check_call(cmd, shell=True)
os.environ['conda_channels_json'] = conda_channels_json
for key in ('PYPI_FETCHER_COMMIT', 'PYPI_FETCHER_SHA256'):
with open(f"{pypi_deps_db}/{key}") as f:
os.environ[key.lower()] = f.read()
with open(pwd + "/reqs.txt") as f:
os.environ['requirements'] = f.read()
# generates and writes nix expression into ./debug/expr.nix
start = time()
generate.main()
dur = round(time() - start, 1)
print(f"resolving took: {dur}s")
with open(pwd + "/reqs.txt") as f:
os.environ['requirements'] = f.read()
# generates and writes nix expression into ./debug/expr.nix
start = time()
main()
dur = round(time() - start, 1)
print(f"resolving took: {dur}s")

View file

@ -49,10 +49,10 @@ let
# (High level API) generates a python environment with minimal user effort
_mkPython = caller: args:
if builtins.isList args then
(import ./mach_nix/nix/mkPython.nix { inherit pkgs pypiDataRev pypiDataSha256; })
(import ./mach_nix/nix/mkPython.nix { inherit pkgs condaChannelsExtra pypiDataRev pypiDataSha256; })
python { extra_pkgs = args; }
else
(import ./mach_nix/nix/mkPython.nix { inherit pkgs pypiDataRev pypiDataSha256; })
(import ./mach_nix/nix/mkPython.nix { inherit pkgs condaChannelsExtra pypiDataRev pypiDataSha256; })
python (l.throwOnDeprecatedArgs caller args);
in

View file

@ -5,7 +5,6 @@ import re
import sys
from abc import ABC, abstractmethod
from dataclasses import dataclass
from os.path import abspath, dirname
from typing import List, Tuple, Iterable
import distlib.markers
@ -22,7 +21,7 @@ from ..cache import cached
class Candidate:
name: str
ver: Version
extras: tuple
selected_extras: tuple
provider_info: 'ProviderInfo'
build: str = None
@ -30,8 +29,7 @@ class Candidate:
@dataclass
class ProviderInfo:
provider: 'DependencyProviderBase'
# following args are only required in case of wheel
wheel_fname: str = None
wheel_fname: str = None # only required for wheel
url: str = None
hash: str = None
@ -41,8 +39,9 @@ def normalize_name(key: str) -> str:
class ProviderSettings:
def __init__(self, json_str):
data = json.loads(json_str)
def __init__(self, providers_json):
with open(providers_json) as f:
data = json.load(f)
if isinstance(data, list) or isinstance(data, str):
self.default_providers = self._parse_provider_list(data)
self.pkg_providers = {}
@ -178,7 +177,7 @@ class CombinedDependencyProvider(DependencyProviderBase):
def get_pkg_reqs(self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]:
for provider in self.allowed_providers_for_pkg(c.name).values():
if c in provider.all_candidates(c.name, c.extras, c.build):
if c in provider.all_candidates(c.name, c.selected_extras, c.build):
return provider.get_pkg_reqs(c)
def list_all_providers_for_pkg(self, pkg_name):
@ -326,7 +325,7 @@ class WheelDependencyProvider(DependencyProviderBase):
if reqs_raw is None:
reqs_raw = []
# handle extras by evaluationg markers
install_reqs = list(filter_reqs_by_eval_marker(parse_reqs(reqs_raw), self.context_wheel, c.extras))
install_reqs = list(filter_reqs_by_eval_marker(parse_reqs(reqs_raw), self.context_wheel, c.selected_extras))
return install_reqs, []
def deviated_version(self, pkg_name, pkg_version: Version, build):
@ -476,11 +475,9 @@ class SdistDependencyProvider(DependencyProviderBase):
reqs_raw = pkg[t]
reqs = parse_reqs(reqs_raw)
requirements[t] = list(filter_reqs_by_eval_marker(reqs, self.context))
if not c.extras:
extras = []
# even if no extras are selected we need to collect reqs for extras,
# because some extras consist of only a marker which needs to be evaluated
requirements['install_requires'] += self.get_reqs_for_extras(c.name, c.ver, c.extras)
requirements['install_requires'] += self.get_reqs_for_extras(c.name, c.ver, c.selected_extras)
return requirements['install_requires'], requirements['setup_requires']
def all_candidates(self, pkg_name, extras=None, build=None) -> Iterable[Candidate]:
@ -515,7 +512,10 @@ class CondaDependencyProvider(DependencyProviderBase):
if ver not in self.pkgs[name]:
self.pkgs[name][ver] = {}
if build in self.pkgs[name][ver]:
print(f"WARNING: colliding package {p['name']}")
if 'collisions' not in self.pkgs[name][ver][build]:
self.pkgs[name][ver][build]['collisions'] = []
self.pkgs[name][ver][build]['collisions'].append(p['subdir'])
continue
self.pkgs[name][ver][build] = p
self.pkgs[name][ver][build]['fname'] = fname
super().__init__(py_ver, platform, system, *args, **kwargs)
@ -525,35 +525,41 @@ class CondaDependencyProvider(DependencyProviderBase):
return f"conda/{self.channel}"
def get_pkg_reqs(self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]:
candidate = self.choose_candidate(c.name, c.ver)
name = normalize_name(c.name)
deviated_ver = self.deviated_version(name, c.ver, c.build)
candidate = self.pkgs[name][deviated_ver][c.build]
depends = list(filter(
lambda d: d.split()[0] not in self.ignored_pkgs and not d.startswith('_'),
candidate['depends']))
# print(f"candidate {c.name}:{c.ver} depends on {depends}")
candidate['depends']
# always add optional dependencies to ensure constraints are applied
+ (candidate['constrains'] if 'constrains' in candidate else [])
))
return list(parse_reqs(depends)), []
@cached()
def all_candidates(self, pkg_name, extras=None, build=None) -> Iterable[Version]:
def all_candidates(self, pkg_name, extras=None, build=None) -> Iterable[Candidate]:
pkg_name = normalize_name(pkg_name)
if pkg_name not in self.pkgs:
return []
candidates = []
for ver in self.pkgs[pkg_name].keys():
candidates += [
Candidate(
for p in self.compatible_builds(pkg_name, parse_ver(ver), build):
candidates.append(Candidate(
p['name'],
parse_ver(p['version']),
extras,
selected_extras=tuple(),
build=p['build'],
provider_info=ProviderInfo(
self,
url=f"https://anaconda.org/anaconda/{p['name']}/"
url=f"https://anaconda.org/{self.channel}/{p['name']}/"
f"{p['version']}/download/{p['subdir']}/{p['fname']}",
hash=p['sha256']
)
)
for p in self.compatible_builds(pkg_name, parse_ver(ver), build)
]
))
if 'collisions' in p:
print(
f"WARNING: Colliding conda package in {self.channel}. Ignoring {p['name']} from {p['collisions']} "
f"in favor of {p['name']} from '{p['subdir']}'")
return candidates
def deviated_version(self, pkg_name, normalized_version: Version, build):
@ -587,9 +593,3 @@ class CondaDependencyProvider(DependencyProviderBase):
# python is compatible
compatible.append(build)
return compatible
def choose_candidate(self, pkg_name, pkg_version: Version):
pkg_name = normalize_name(pkg_name)
candidate = self.compatible_builds(pkg_name, pkg_version)[0]
# print(f"chosen candidate {pkg_name}{candidate['version']} for {pkg_version}")
return candidate

View file

@ -11,10 +11,13 @@ def make_name(pkg: ResolvedPkg, nixpkgs: NixpkgsIndex):
pi = pkg.provider_info
extras = f"[{' '.join(pkg.extras_selected)}]" if pkg.extras_selected else ''
name = f"{pkg.name}{extras} - {pkg.ver} - {pi.provider.name}"
if pi.provider == 'wheel':
if pi.provider.name == 'wheel':
name += f" - {'-'.join(pi.wheel_fname.split('-')[-3:])[:-4]}"
if pi.provider == 'nixpkgs':
if pi.provider.name == 'nixpkgs':
name += f" (attrs: {' '.join(c.nix_key for c in nixpkgs.get_all_candidates(pkg.name))})"
if pi.provider.name.startswith('conda'):
if pkg.build:
name += f" - {pkg.build}"
return name

View file

@ -101,4 +101,9 @@ rec {
tensorflow-gpu = tensorflow;
websockets.remove-patchPhase = {
_cond = {prov, ... }: elem prov [ "sdist" "nixpkgs" ];
patchPhase = "";
};
}

View file

@ -16,6 +16,7 @@ from mach_nix.requirements import parse_reqs, filter_reqs_by_eval_marker, contex
from mach_nix.resolver.resolvelib_resolver import ResolvelibResolver
from mach_nix.versions import PyVer
def load_env(name, *args, **kwargs):
var = os.environ.get(name, *args, **kwargs)
if var is None:

View file

@ -1,7 +1,7 @@
{
"url": "https://github.com/nixos/nixpkgs",
"rev": "cfed29bfcb28259376713005d176a6f82951014a",
"date": "2020-10-11T20:38:29-07:00",
"sha256": "034m892hxygminkj326y7l3bp4xhx0v154jcmla7wdfqd23dk5xm",
"rev": "2da96f3f9c44cf87e07387321f4ca406c1a8bb7e",
"date": "2020-10-31T10:14:50+00:00",
"sha256": "03d0iym6m5kbp11lz5jcaf2627cqvry0lzmrmf6gmqvvfcal9vdl",
"fetchSubmodules": false
}

View file

@ -1,7 +1,7 @@
{
"url": "https://github.com/davhau/pypi-deps-db",
"rev": "b0971936e8f3b27a49345088d0739b154284fd7b",
"date": "2020-10-24T08:14:50+00:00",
"sha256": "1683nzdzka97p76rz498crdfik64cgh0a9ahbz716jsqkfb6xlz2",
"rev": "e6a98c8a6c2c9b759011fcb1b3f30734a510da39",
"date": "2020-10-31T08:21:13+00:00",
"sha256": "0cyiq0j42abibfr8zmmfmas13axbl80im4pgpqs9nd9j98g552mr",
"fetchSubmodules": false
}

View file

@ -2,6 +2,7 @@
requirements, # content from a requirements.txt file
python, # python from nixpkgs as base for overlay
pkgs,
condaChannelsExtra ? {},
tests ? false, # disable tests wherever possible
overrides ? [],
providers ? {}, # re-order to change provider priority or remove providers
@ -11,26 +12,37 @@
_providerDefaults ? with builtins; fromTOML (readFile ../provider_defaults.toml)
}:
let
l = import ./lib.nix { inherit (pkgs) lib; inherit pkgs; };
_providers = l.parseProviders (_providerDefaults // providers);
nixpkgs_json = import ./nixpkgs-json.nix {
inherit overrides pkgs python;
};
builder_python = pkgs.python37.withPackages(ps:
(pkgs.lib.attrValues (import ./python-deps.nix {python = pkgs.python37; fetchurl = pkgs.fetchurl; }))
);
src = ./../../.;
db_and_fetcher = import ./deps-db-and-fetcher.nix {
inherit pkgs;
pypi_deps_db_commit = pypiDataRev;
pypi_deps_db_sha256 = pypiDataSha256;
};
providers_json = builtins.toJSON ( _providerDefaults // providers);
providers_json_file = pkgs.writeText "providers" (builtins.toJSON _providers);
mach_nix_file = pkgs.runCommand "mach_nix_file"
{ buildInputs = [ src builder_python db_and_fetcher.pypi_deps_db_src];
inherit nixpkgs_json requirements;
inherit (db_and_fetcher) pypi_deps_db_src pypi_fetcher_commit pypi_fetcher_sha256;
conda_channels_json = (import ./conda-channels.nix {}).condaChannelsJson;
conda_channels_json = (import ./conda-channels.nix {
inherit condaChannelsExtra pkgs;
providers = _providers;
}).condaChannelsJson;
disable_checks = ! tests;
providers = providers_json;
providers = providers_json_file;
py_ver_str = python.version;
}
''

View file

@ -1,7 +1,10 @@
{
condaChannelsExtra ? {},
pkgs ? import (import ./nixpkgs-src.nix) {},
providers ? builtins.fromJSON (builtins.readFile (builtins.getEnv "providers")),
system ? "x86_64-linux",
channels ? [ "main" "r" "conda-forge" ],
# conda-channels index
repoName ? "conda-channels",
repoOwner ? "DavHau",
rev ? "e742cc6152473ddffb33e91181ff5d1b23222fc8",
@ -10,27 +13,46 @@
with builtins;
with pkgs.lib;
let
systemMap = {
x86_64-linux = "linux-64";
x86_64-darwin = "osx-64";
aarch64-linux = "linux-aarch64";
};
channelIndex = fromJSON (readFile (fetchurl {
allProviders = flatten (attrValues providers);
usedChannels =
filter (p: p != null)
(map (p: if hasPrefix "conda/" p then removePrefix "conda/" p else null) allProviders);
channelRegistry = fromJSON (readFile (fetchurl {
name = "conda-channels-index";
url = "https://raw.githubusercontent.com/${repoOwner}/${repoName}/${rev}/sha256.json";
inherit sha256;
}));
condaChannels = listToAttrs (map (chan: nameValuePair chan (
map ( sys:
(builtins.fetchurl {
url = "https://raw.githubusercontent.com/${repoOwner}/${repoName}/${rev}/${chan}/${sys}.json";
sha256 = channelIndex."./${chan}/${sys}.json";
})
) [ systemMap."${system}" "noarch" ]
)) channels);
condaChannelsJson = pkgs.writeText "conda-channels.json" (toJSON condaChannels);
registryChannels = mapAttrs' (filepath: hash:
let
split = splitString "/" filepath;
chan = elemAt split 1;
sys = removeSuffix ".json" (tail split);
in
nameValuePair
chan
(map (sys: (builtins.fetchurl {
url = "https://raw.githubusercontent.com/${repoOwner}/${repoName}/${rev}/${chan}/${sys}.json";
sha256 = channelRegistry."./${chan}/${sys}.json";
})) [ systemMap."${system}" "noarch" ])
) channelRegistry;
_registryChannels = filterAttrs (chan: json: elem chan usedChannels) registryChannels;
_condaChannelsExtra = filterAttrs (chan: json: elem chan usedChannels) condaChannelsExtra;
allCondaChannels = (_registryChannels // _condaChannelsExtra);
condaChannelsJson = pkgs.writeText "conda-channels.json" (toJSON allCondaChannels);
in
{ inherit
condaChannels
condaChannelsJson;
}
trace "using conda channels: ${toString (intersperse "," (attrNames allCondaChannels))}"
{ inherit condaChannelsJson; }

View file

@ -1,12 +1,50 @@
{ lib, pkgs, ... }:
{
pkgs ? import (import ./nixpkgs-src.nix) { config = {}; overlays = []; },
...
}:
with builtins;
with lib;
with pkgs.lib;
let
nonCondaProviders = [
"wheel"
"sdist"
"nixpkgs"
];
in
rec {
mergeOverrides = foldl composeExtensions (self: super: { });
autoPatchelfHook = import ./auto_patchelf_hook.nix {inherit (pkgs) fetchurl makeSetupHook writeText;};
parseProviders = providers:
let
# transform strings to lists
_providers = mapAttrs (pkg: providers:
if isString providers then
splitString "," providers
else providers
) providers;
in
# convert "some-conda-channel" to "conda/some-conda-channel"
mapAttrs (pkg: providers:
flatten (map (p:
if elem p nonCondaProviders || hasPrefix "conda/" p then
p
else if p == "conda" then
[ "conda/main" "conda/r" ]
else
"conda/${p}"
) providers)
) _providers;
parseProvidersToJson =
let
providers = (fromJSON (getEnv "providers"));
in
trace (getEnv "providers")
pkgs.writeText "providers-json" (toJSON (parseProviders providers));
concat_reqs = reqs_list:
let
concat = s1: s2: s1 + "\n" + s2;

View file

@ -1,4 +1,4 @@
{ pkgs, pypiDataRev, pypiDataSha256, ... }:
{ condaChannelsExtra, pkgs, pypiDataRev, pypiDataSha256, ... }:
with builtins;
with pkgs.lib;
@ -90,7 +90,7 @@ let
py = python_pkg.override { packageOverrides = l.mergeOverrides overridesPre; };
result = l.compileOverrides {
inherit pkgs providers pypiDataRev pypiDataSha256 tests _providerDefaults;
inherit condaChannelsExtra pkgs providers pypiDataRev pypiDataSha256 tests _providerDefaults;
overrides = overridesPre ++ overrides_pre_extra ++ extra_pkgs_py_overrides;
python = py;
requirements = l.concat_reqs ([requirements] ++ extra_pkgs_py_reqs ++ [extra_pkgs_r_reqs]);

View file

@ -2,3 +2,4 @@
set -e
nix-shell -p nix-prefetch-git --run "nix-prefetch-git --url https://github.com/davhau/pypi-deps-db --rev refs/heads/master --no-deepClone" | python -m json.tool - PYPI_DEPS_DB.json
nix-shell -p nix-prefetch-git --run "nix-prefetch-git --url https://github.com/nixos/nixpkgs --rev refs/heads/nixpkgs-unstable --no-deepClone" | python -m json.tool - NIXPKGS.json
nix-shell -p nix-prefetch-git --run "nix-prefetch-git --url https://github.com/davhau/conda-channels --rev refs/heads/master --no-deepClone" | python -m json.tool - CONDA_CHANNELS.json

View file

@ -1,5 +1,4 @@
#_default = "wheel,sdist,nixpkgs"
_default = "conda/conda-forge,wheel,sdist,nixpkgs"
_default = "conda,wheel,sdist,nixpkgs"
gdal = "nixpkgs"
pip = "nixpkgs,sdist"

View file

@ -3,7 +3,6 @@ from typing import Iterable
import distlib.markers
import pkg_resources
from distlib.markers import DEFAULT_CONTEXT
from packaging.version import parse, _Version
from pkg_resources._vendor.packaging.specifiers import SpecifierSet
from mach_nix.cache import cached
@ -34,21 +33,6 @@ class Requirement(pkg_resources.Requirement):
def __hash__(self):
return hash((super().__hash__(), self.build))
# @staticmethod
# def norm_specs(specs):
# # PEP 440: Compatible Release
# for spec in specs:
# if spec[0] == "~=":
# ver = spec[1]
# yield ('>=', ver)
# ver = parse(parse(ver).base_version)
# ver_as_dict = ver._version._asdict()
# ver_as_dict['release'] = ver_as_dict['release'][:-1] + ('*',)
# ver._version = _Version(**ver_as_dict)
# yield ('==', str(ver))
# else:
# yield spec
def filter_reqs_by_eval_marker(reqs: Iterable[Requirement], context: dict, selected_extras=None):
# filter requirements relevant for current environment
@ -66,18 +50,7 @@ def filter_reqs_by_eval_marker(reqs: Iterable[Requirement], context: dict, selec
yield req
# @cached(lambda args: tuple(args[0]) if isinstance(args[0], list) else args[0])
# def parse_reqs(strs):
# if isinstance(strs, str):
# strs = [strs]
# strs = list(map(
# lambda s: s.replace(' ', '==') if not any(op in s for op in ('==', '!=', '<=', '>=', '<', '>', '~=')) else s,
# strs
# ))
# reqs = list(pkg_resources.parse_requirements(strs))
# for req in reqs:
# r = Requirement(str(req))
# yield r
all_ops = {'==', '!=', '<=', '>=', '<', '>', '~=', ';'}
@cached(lambda args: tuple(args[0]) if isinstance(args[0], list) else args[0])
@ -92,17 +65,31 @@ def parse_reqs(strs):
line += next(lines)
except StopIteration:
return
yield Requirement(*parse_reqs_line(line))
# handle conda requirements
def parse_reqs_line(line):
build = None
if not any(op in line for op in ('==', '!=', '<=', '>=', '<', '>', '~=', ';')):
# conda spec with build like "tensorflow-base 2.0.0 gpu_py36h0ec5d1f_0"
splitted = line.split(' ')
if len(splitted) == 3:
name, ver, build = splitted
line = f"{name}=={ver}"
# transform conda specifiers without operator like "requests 2.24.*"
else:
line = line.replace(' ', '==')
splitted = line.strip().split(' ')
yield Requirement(line, build)
# conda spec with build like "tensorflow-base 2.0.0 gpu_py36h0ec5d1f_0"
# or "hdf5 >=1.10.5,<1.10.6.0a0 mpi_mpich_*"
if len(splitted) == 3 \
and (
splitted[-1][-2] == '_' or '*' in splitted[-1]
or
not any(op in splitted[1] for op in all_ops)
):
name, ver_spec, build = splitted
if not any(op in ver_spec for op in all_ops):
ver_spec = f"=={ver_spec}"
line = f"{name}{ver_spec}"
# parse conda specifiers without operator like "requests 2.24.*"
elif len(splitted) == 2:
name, ver_spec = splitted
if not any(op in ver_spec for op in all_ops):
ver_spec = f"=={ver_spec}"
line = f"{name}{ver_spec}"
return line, build

View file

@ -18,7 +18,8 @@ class Provider:
self.provider = deps_db
def get_extras_for(self, dependency):
return tuple(sorted(dependency.extras))
# return selected extras
return tuple(sorted(dependency.selected_extras))
def get_base_requirement(self, candidate):
return Requirement("{}=={}".format(candidate.name, candidate.ver))
@ -34,7 +35,7 @@ class Provider:
def is_satisfied_by(self, requirement, candidate: Candidate):
res = None
if not set(requirement.extras).issubset(set(candidate.extras)):
if not set(requirement.extras).issubset(set(candidate.selected_extras)):
res = False
res = bool(len(list(filter_versions([candidate.ver], requirement.specs))))
#print(f"{res} {requirement} satisfied by {candidate}")
@ -71,7 +72,7 @@ class ResolvelibResolver(Resolver):
prop_build_inputs=prop_build_inputs,
is_root=is_root,
provider_info=candidate.provider_info,
extras_selected=list(result.mapping[name].extras),
extras_selected=list(result.mapping[name].selected_extras),
build=candidate.build
))
remove_circles_and_print(nix_py_pkgs, self.nixpkgs)

View file

@ -0,0 +1,29 @@
import pytest
from mach_nix.requirements import parse_reqs_line
@pytest.mark.parametrize("exp_build, exp_line, line", [
# with multiple versions
(None, 'requests==2.24.0', 'requests 2.24.0'),
(None, 'requests == 2.24.0', 'requests == 2.24.0'),
(None, 'requests==2.24.0', 'requests 2.24.0'),
(None, 'requests==2.24.0', 'requests 2.24.0 '),
(None, 'requests==2.24.0', ' requests 2.24.0 '),
(None, 'pdfminer.six == 20200726', 'pdfminer.six == 20200726'),
('openblas', 'blas==1.*', 'blas 1.* openblas'),
('openblas', 'blas==*', 'blas * openblas'),
('openblas', 'blas==1.1', 'blas 1.1 openblas'),
('build123*', 'requests>=2.24.0', 'requests >=2.24.0 build123*'),
('build123*', 'requests>=2.24.0', 'requests >=2.24.0 build123*'),
('build123*', 'requests==2.24.*', 'requests ==2.24.* build123*'),
('build123*', 'requests==2.24.*', 'requests 2.24.* build123*'),
('build123*', 'requests==2.24.0', 'requests 2.24.0 build123*'),
('build123*', 'requests==2.24.0', ' requests 2.24.0 build123*'),
('build123*', 'requests==2.24.0', 'requests 2.24.0 build123* '),
('build123*', 'requests==2.24.0', ' requests 2.24.0 build123* '),
])
def test_parss_requirements(exp_build, exp_line, line):
new_line, build = parse_reqs_line(line)
assert (build, new_line) == (exp_build, exp_line)

View file

@ -269,7 +269,7 @@ in
done
done
echo "{}" > sha256.json
for f in $(find . -type f -not -path './.git/*' -not -name '.*'); do
for f in $(find . -type f -not -path './.git/*' -not -name '.*' -not -name 'sha256*'); do
jq ". + {\"$f\": \"$(cat $f | openssl dgst -binary -sha256 | openssl base64 | awk '{print $1}')\"}" sha256.json \
| sponge sha256.json
done

View file

@ -9,4 +9,5 @@ mach-nix.mkPython {
python-dateutil
'';
providers.python-dateutil = "sdist";
providers.setuptools-scm = "wheel,sdist,nixpkgs";
}