Fixes for Python venv changes
-----BEGIN PGP SIGNATURE----- iQFIBAABCAAyFiEE8TM4V0tmI4mGbHaCv/vSX3jHroMFAmRn7D4UHHBib256aW5p QHJlZGhhdC5jb20ACgkQv/vSX3jHroOHbQgAiQW824iL2Iw+wjYckp0rwLxe53+z P4kCdQePrfKW3sPglbeDArPr4gzuo7bdj75dscZmco+nBU40qGqEpRHBqjQol5pE kcQsmqx+0Udbsc6kJe47fgSsBLD2KbT1QQCVBgScNuDviogQ0/PCLNWjk9V4OhgL 0ZlK8QFnuv0qNthS+oNjkNi6SYGYNOw+4LQ/WcLWnowwhNRGUvYoq9QdOCocfyxD t+1xQvF4Pxqnhbkni51JRoXv/Np8U/yDHMgonvw8BLxTMNAes4nV7ifzyW2pltnf YEHGUKYPtrPR9dKLr/Au9ktr7n3O5ikOEpPIPSi4BwFqzv6hdE4DDAMXDA== =Auyq -----END PGP SIGNATURE----- Merge tag 'for-upstream-urgent' of https://gitlab.com/bonzini/qemu into staging Fixes for Python venv changes # -----BEGIN PGP SIGNATURE----- # # iQFIBAABCAAyFiEE8TM4V0tmI4mGbHaCv/vSX3jHroMFAmRn7D4UHHBib256aW5p # QHJlZGhhdC5jb20ACgkQv/vSX3jHroOHbQgAiQW824iL2Iw+wjYckp0rwLxe53+z # P4kCdQePrfKW3sPglbeDArPr4gzuo7bdj75dscZmco+nBU40qGqEpRHBqjQol5pE # kcQsmqx+0Udbsc6kJe47fgSsBLD2KbT1QQCVBgScNuDviogQ0/PCLNWjk9V4OhgL # 0ZlK8QFnuv0qNthS+oNjkNi6SYGYNOw+4LQ/WcLWnowwhNRGUvYoq9QdOCocfyxD # t+1xQvF4Pxqnhbkni51JRoXv/Np8U/yDHMgonvw8BLxTMNAes4nV7ifzyW2pltnf # YEHGUKYPtrPR9dKLr/Au9ktr7n3O5ikOEpPIPSi4BwFqzv6hdE4DDAMXDA== # =Auyq # -----END PGP SIGNATURE----- # gpg: Signature made Fri 19 May 2023 02:38:06 PM PDT # gpg: using RSA key F13338574B662389866C7682BFFBD25F78C7AE83 # gpg: issuer "pbonzini@redhat.com" # gpg: Good signature from "Paolo Bonzini <bonzini@gnu.org>" [undefined] # gpg: aka "Paolo Bonzini <pbonzini@redhat.com>" [undefined] # gpg: WARNING: This key is not certified with a trusted signature! # gpg: There is no indication that the signature belongs to the owner. # Primary key fingerprint: 46F5 9FBD 57D6 12E7 BFD4 E2F7 7E15 100C CD36 69B1 # Subkey fingerprint: F133 3857 4B66 2389 866C 7682 BFFB D25F 78C7 AE83 * tag 'for-upstream-urgent' of https://gitlab.com/bonzini/qemu: scripts: make sure scripts are invoked via $(PYTHON) gitlab: custom-runners: preserve more artifacts for debugging mkvenv: pass first missing package to diagnose() configure: fix backwards-compatibility for meson sphinx_build option build: rebuild build.ninja using "meson setup --reconfigure" mkvenv: replace distlib.database with importlib.metadata/pkg_resources remove remaining traces of meson submodule Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
commit
ffd9492f2a
@ -5,10 +5,6 @@
|
||||
before_script:
|
||||
- JOBS=$(expr $(nproc) + 1)
|
||||
script:
|
||||
- if test -n "$LD_JOBS";
|
||||
then
|
||||
scripts/git-submodule.sh update meson ;
|
||||
fi
|
||||
- mkdir build
|
||||
- cd build
|
||||
- ../configure --enable-werror --disable-docs --enable-fdt=system
|
||||
|
@ -20,8 +20,10 @@ variables:
|
||||
artifacts:
|
||||
name: "$CI_JOB_NAME-$CI_COMMIT_REF_SLUG"
|
||||
expire_in: 7 days
|
||||
when: always
|
||||
paths:
|
||||
- build/meson-logs/testlog.txt
|
||||
- build/build.ninja
|
||||
- build/meson-logs
|
||||
reports:
|
||||
junit: build/meson-logs/testlog.junit.xml
|
||||
|
||||
|
6
Makefile
6
Makefile
@ -115,15 +115,15 @@ Makefile.ninja: build.ninja
|
||||
$(NINJA) -t query build.ninja | sed -n '1,/^ input:/d; /^ outputs:/q; s/$$/ \\/p'; \
|
||||
} > $@.tmp && mv $@.tmp $@
|
||||
-include Makefile.ninja
|
||||
endif
|
||||
|
||||
ifneq ($(MESON),)
|
||||
# A separate rule is needed for Makefile dependencies to avoid -n
|
||||
build.ninja: build.ninja.stamp
|
||||
$(build-files):
|
||||
build.ninja.stamp: meson.stamp $(build-files)
|
||||
$(NINJA) $(if $V,-v,) build.ninja && touch $@
|
||||
endif
|
||||
$(MESON) setup --reconfigure $(SRC_PATH) && touch $@
|
||||
|
||||
ifneq ($(MESON),)
|
||||
Makefile.mtest: build.ninja scripts/mtest2make.py
|
||||
$(MESON) introspect --targets --tests --benchmarks | $(PYTHON) scripts/mtest2make.py > $@
|
||||
-include Makefile.mtest
|
||||
|
10
configure
vendored
10
configure
vendored
@ -1767,7 +1767,7 @@ if test -n "$gdb_bin"; then
|
||||
gdb_version=$($gdb_bin --version | head -n 1)
|
||||
if version_ge ${gdb_version##* } 9.1; then
|
||||
echo "HAVE_GDB_BIN=$gdb_bin" >> $config_host_mak
|
||||
gdb_arches=$("$source_path/scripts/probe-gdb-support.py" $gdb_bin)
|
||||
gdb_arches=$($python "$source_path/scripts/probe-gdb-support.py" $gdb_bin)
|
||||
else
|
||||
gdb_bin=""
|
||||
fi
|
||||
@ -1987,6 +1987,14 @@ if test "$skip_meson" = no; then
|
||||
if test "$?" -ne 0 ; then
|
||||
error_exit "meson setup failed"
|
||||
fi
|
||||
else
|
||||
if test -f meson-private/cmd_line.txt; then
|
||||
# Adjust old command line options that were removed
|
||||
# sed -i is not portable
|
||||
perl -i -ne '
|
||||
/^sphinx_build/ && next;
|
||||
print;' meson-private/cmd_line.txt
|
||||
fi
|
||||
fi
|
||||
|
||||
# Save the configure command line for later reuse.
|
||||
|
@ -76,7 +76,6 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
import venv
|
||||
import warnings
|
||||
|
||||
|
||||
# Try to load distlib, with a fallback to pip's vendored version.
|
||||
@ -84,7 +83,6 @@ import warnings
|
||||
# outside the venv or before a potential call to ensurepip in checkpip().
|
||||
HAVE_DISTLIB = True
|
||||
try:
|
||||
import distlib.database
|
||||
import distlib.scripts
|
||||
import distlib.version
|
||||
except ImportError:
|
||||
@ -92,7 +90,6 @@ except ImportError:
|
||||
# Reach into pip's cookie jar. pylint and flake8 don't understand
|
||||
# that these imports will be used via distlib.xxx.
|
||||
from pip._vendor import distlib
|
||||
import pip._vendor.distlib.database # noqa, pylint: disable=unused-import
|
||||
import pip._vendor.distlib.scripts # noqa, pylint: disable=unused-import
|
||||
import pip._vendor.distlib.version # noqa, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
@ -556,6 +553,57 @@ def pkgname_from_depspec(dep_spec: str) -> str:
|
||||
return match.group(0)
|
||||
|
||||
|
||||
def _get_version_importlib(package: str) -> Optional[str]:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# pylint: disable=no-name-in-module
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
# First preference: Python 3.8+ stdlib
|
||||
from importlib.metadata import ( # type: ignore
|
||||
PackageNotFoundError,
|
||||
distribution,
|
||||
)
|
||||
except ImportError as exc:
|
||||
logger.debug("%s", str(exc))
|
||||
# Second preference: Commonly available PyPI backport
|
||||
from importlib_metadata import ( # type: ignore
|
||||
PackageNotFoundError,
|
||||
distribution,
|
||||
)
|
||||
|
||||
try:
|
||||
return str(distribution(package).version)
|
||||
except PackageNotFoundError:
|
||||
return None
|
||||
|
||||
|
||||
def _get_version_pkg_resources(package: str) -> Optional[str]:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# Bundled with setuptools; has a good chance of being available.
|
||||
import pkg_resources
|
||||
|
||||
try:
|
||||
return str(pkg_resources.get_distribution(package).version)
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return None
|
||||
|
||||
|
||||
def _get_version(package: str) -> Optional[str]:
|
||||
try:
|
||||
return _get_version_importlib(package)
|
||||
except ImportError as exc:
|
||||
logger.debug("%s", str(exc))
|
||||
|
||||
try:
|
||||
return _get_version_pkg_resources(package)
|
||||
except ImportError as exc:
|
||||
logger.debug("%s", str(exc))
|
||||
raise Ouch(
|
||||
"Neither importlib.metadata nor pkg_resources found. "
|
||||
"Use Python 3.8+, or install importlib-metadata or setuptools."
|
||||
) from exc
|
||||
|
||||
|
||||
def diagnose(
|
||||
dep_spec: str,
|
||||
online: bool,
|
||||
@ -581,26 +629,7 @@ def diagnose(
|
||||
bad = False
|
||||
|
||||
pkg_name = pkgname_from_depspec(dep_spec)
|
||||
pkg_version = None
|
||||
|
||||
has_importlib = False
|
||||
try:
|
||||
# Python 3.8+ stdlib
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# pylint: disable=no-name-in-module
|
||||
# pylint: disable=import-error
|
||||
from importlib.metadata import ( # type: ignore
|
||||
PackageNotFoundError,
|
||||
version,
|
||||
)
|
||||
|
||||
has_importlib = True
|
||||
try:
|
||||
pkg_version = version(pkg_name)
|
||||
except PackageNotFoundError:
|
||||
pass
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
pkg_version = _get_version(pkg_name)
|
||||
|
||||
lines = []
|
||||
|
||||
@ -609,14 +638,9 @@ def diagnose(
|
||||
f"Python package '{pkg_name}' version '{pkg_version}' was found,"
|
||||
" but isn't suitable."
|
||||
)
|
||||
elif has_importlib:
|
||||
lines.append(
|
||||
f"Python package '{pkg_name}' was not found nor installed."
|
||||
)
|
||||
else:
|
||||
lines.append(
|
||||
f"Python package '{pkg_name}' is either not found or"
|
||||
" not a suitable version."
|
||||
f"Python package '{pkg_name}' was not found nor installed."
|
||||
)
|
||||
|
||||
if wheels_dir:
|
||||
@ -698,7 +722,8 @@ def _do_ensure(
|
||||
dep_specs: Sequence[str],
|
||||
online: bool = False,
|
||||
wheels_dir: Optional[Union[str, Path]] = None,
|
||||
) -> None:
|
||||
prog: Optional[str] = None,
|
||||
) -> Optional[Tuple[str, bool]]:
|
||||
"""
|
||||
Use pip to ensure we have the package specified by @dep_specs.
|
||||
|
||||
@ -711,30 +736,41 @@ def _do_ensure(
|
||||
:param online: If True, fall back to PyPI.
|
||||
:param wheels_dir: If specified, search this path for packages.
|
||||
"""
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="distlib"
|
||||
)
|
||||
dist_path = distlib.database.DistributionPath(include_egg=True)
|
||||
absent = []
|
||||
present = []
|
||||
for spec in dep_specs:
|
||||
matcher = distlib.version.LegacyMatcher(spec)
|
||||
dist = dist_path.get_distribution(matcher.name)
|
||||
if dist is None or not matcher.match(dist.version):
|
||||
absent.append(spec)
|
||||
else:
|
||||
logger.info("found %s", dist)
|
||||
present.append(matcher.name)
|
||||
absent = []
|
||||
present = []
|
||||
for spec in dep_specs:
|
||||
matcher = distlib.version.LegacyMatcher(spec)
|
||||
ver = _get_version(matcher.name)
|
||||
if ver is None or not matcher.match(
|
||||
distlib.version.LegacyVersion(ver)
|
||||
):
|
||||
absent.append(spec)
|
||||
else:
|
||||
logger.info("found %s %s", matcher.name, ver)
|
||||
present.append(matcher.name)
|
||||
|
||||
if present:
|
||||
generate_console_scripts(present)
|
||||
|
||||
if absent:
|
||||
# Some packages are missing or aren't a suitable version,
|
||||
# install a suitable (possibly vendored) package.
|
||||
print(f"mkvenv: installing {', '.join(absent)}", file=sys.stderr)
|
||||
pip_install(args=absent, online=online, wheels_dir=wheels_dir)
|
||||
if online or wheels_dir:
|
||||
# Some packages are missing or aren't a suitable version,
|
||||
# install a suitable (possibly vendored) package.
|
||||
print(f"mkvenv: installing {', '.join(absent)}", file=sys.stderr)
|
||||
try:
|
||||
pip_install(args=absent, online=online, wheels_dir=wheels_dir)
|
||||
return None
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
return diagnose(
|
||||
absent[0],
|
||||
online,
|
||||
wheels_dir,
|
||||
prog if absent[0] == dep_specs[0] else None,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def ensure(
|
||||
@ -764,14 +800,12 @@ def ensure(
|
||||
if not HAVE_DISTLIB:
|
||||
raise Ouch("a usable distlib could not be found, please install it")
|
||||
|
||||
try:
|
||||
_do_ensure(dep_specs, online, wheels_dir)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
result = _do_ensure(dep_specs, online, wheels_dir, prog)
|
||||
if result:
|
||||
# Well, that's not good.
|
||||
msg, bad = diagnose(dep_specs[0], online, wheels_dir, prog)
|
||||
if bad:
|
||||
raise Ouch(msg) from exc
|
||||
raise SystemExit(f"\n{msg}\n\n") from exc
|
||||
if result[1]:
|
||||
raise Ouch(result[0])
|
||||
raise SystemExit(f"\n{result[0]}\n\n")
|
||||
|
||||
|
||||
def post_venv_setup() -> None:
|
||||
@ -843,10 +877,6 @@ def main() -> int:
|
||||
if os.environ.get("V"):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# These are incredibly noisy even for V=1
|
||||
logging.getLogger("distlib.metadata").addFilter(lambda record: False)
|
||||
logging.getLogger("distlib.database").addFilter(lambda record: False)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="mkvenv",
|
||||
description="QEMU pyvenv bootstrapping utility",
|
||||
|
@ -115,9 +115,6 @@ ignore_missing_imports = True
|
||||
[mypy-distlib]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-distlib.database]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-distlib.scripts]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@ -127,9 +124,6 @@ ignore_missing_imports = True
|
||||
[mypy-pip._vendor.distlib]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pip._vendor.distlib.database]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pip._vendor.distlib.scripts]
|
||||
ignore_missing_imports = True
|
||||
|
||||
|
@ -26,7 +26,7 @@ sub_file="${sub_tdir}/submodule.tar"
|
||||
# independent of what the developer currently has initialized
|
||||
# in their checkout, because the build environment is completely
|
||||
# different to the host OS.
|
||||
submodules="dtc meson ui/keycodemapdb"
|
||||
submodules="dtc ui/keycodemapdb"
|
||||
submodules="$submodules tests/fp/berkeley-softfloat-3 tests/fp/berkeley-testfloat-3"
|
||||
sub_deinit=""
|
||||
|
||||
|
0
scripts/meson-buildoptions.py
Executable file → Normal file
0
scripts/meson-buildoptions.py
Executable file → Normal file
0
scripts/modinfo-collect.py
Executable file → Normal file
0
scripts/modinfo-collect.py
Executable file → Normal file
0
scripts/modinfo-generate.py
Executable file → Normal file
0
scripts/modinfo-generate.py
Executable file → Normal file
0
scripts/probe-gdb-support.py
Executable file → Normal file
0
scripts/probe-gdb-support.py
Executable file → Normal file
Loading…
Reference in New Issue
Block a user