Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 1 addition & 19 deletions relenv/build/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from types import FrameType, ModuleType

from . import darwin, linux, windows
from .common import CHECK_VERSIONS_SUPPORT, builds
from .common import builds
from ..common import DEFAULT_PYTHON, build_arch
from ..pyversions import Version, python_versions

Expand Down Expand Up @@ -105,12 +105,6 @@ def setup_parser(
"has no chance of being succesful. "
),
)
build_subparser.add_argument(
"--check-versions",
default=False,
action="store_true",
help="Check for new version of python and it's depenencies, then exit.",
)
build_subparser.add_argument(
"--no-pretty",
default=False,
Expand Down Expand Up @@ -176,18 +170,6 @@ def main(args: argparse.Namespace) -> None:
build.recipies["python"]["download"].version = str(build_version)
build.recipies["python"]["download"].checksum = pyversions[build_version]

if args.check_versions:
if not CHECK_VERSIONS_SUPPORT:
print(
"Check versions not supported. Please install the "
"packaging and looseversion python packages."
)
sys.exit(2)
if not build.check_versions():
sys.exit(1)
else:
sys.exit(0)

build.set_arch(args.arch)
if build.build_arch != build.arch:
print(
Expand Down
209 changes: 59 additions & 150 deletions relenv/build/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import tempfile
import time
import tarfile
from html.parser import HTMLParser
from types import ModuleType
from typing import (
Any,
Expand All @@ -38,7 +37,7 @@
cast,
)

from typing import TYPE_CHECKING, Protocol, TypedDict
from typing import TYPE_CHECKING, TypedDict

if TYPE_CHECKING:
from multiprocessing.synchronize import Event as SyncEvent
Expand All @@ -59,7 +58,6 @@
get_triplet,
runcmd,
work_dirs,
fetch_url,
Version,
WorkDirs,
)
Expand All @@ -68,14 +66,6 @@

PathLike = Union[str, os.PathLike[str]]


CHECK_VERSIONS_SUPPORT = True
try:
from packaging.version import InvalidVersion, parse
from looseversion import LooseVersion
except ImportError:
CHECK_VERSIONS_SUPPORT = False

log = logging.getLogger(__name__)


Expand Down Expand Up @@ -204,11 +194,14 @@ def print_ui(

def verify_checksum(file: PathLike, checksum: Optional[str]) -> bool:
"""
Verify the checksum of a files.
Verify the checksum of a file.

Supports both SHA-1 (40 hex chars) and SHA-256 (64 hex chars) checksums.
The hash algorithm is auto-detected based on checksum length.

:param file: The path to the file to check.
:type file: str
:param checksum: The checksum to verify against
:param checksum: The checksum to verify against (SHA-1 or SHA-256)
:type checksum: str

:raises RelenvException: If the checksum verification failed
Expand All @@ -219,11 +212,26 @@ def verify_checksum(file: PathLike, checksum: Optional[str]) -> bool:
if checksum is None:
log.error("Can't verify checksum because none was given")
return False

# Auto-detect hash type based on length
# SHA-1: 40 hex chars, SHA-256: 64 hex chars
if len(checksum) == 64:
hash_algo = hashlib.sha256()
hash_name = "sha256"
elif len(checksum) == 40:
hash_algo = hashlib.sha1()
hash_name = "sha1"
else:
raise RelenvException(
f"Invalid checksum length {len(checksum)}. Expected 40 (SHA-1) or 64 (SHA-256)"
)

with open(file, "rb") as fp:
file_checksum = hashlib.sha1(fp.read()).hexdigest()
hash_algo.update(fp.read())
file_checksum = hash_algo.hexdigest()
if checksum != file_checksum:
raise RelenvException(
f"sha1 checksum verification failed. expected={checksum} found={file_checksum}"
f"{hash_name} checksum verification failed. expected={checksum} found={file_checksum}"
)
return True

Expand Down Expand Up @@ -487,8 +495,6 @@ def patch_file(path: PathLike, old: str, new: str) -> None:
:type path: str
"""
log.debug("Patching file: %s", path)
import re

with open(path, "r") as fp:
content = fp.read()
new_content = ""
Expand All @@ -499,123 +505,52 @@ def patch_file(path: PathLike, old: str, new: str) -> None:
fp.write(new_content)


def tarball_version(href: str) -> Optional[str]:
if href.endswith("tar.gz"):
try:
x = href.split("-", 1)[1][:-7]
if x != "latest":
return x
except IndexError:
return None
return None


def sqlite_version(href: str) -> Optional[str]:
if "releaselog" in href:
link = href.split("/")[1][:-5]
return "{:d}{:02d}{:02d}00".format(*[int(_) for _ in link.split("_")])
return None

def get_dependency_version(name: str, platform: str) -> Optional[Dict[str, str]]:
"""
Get dependency version and metadata from python-versions.json.

def github_version(href: str) -> Optional[str]:
if "tag/" in href:
return href.split("/v")[-1]
return None
Returns dict with keys: version, url, sha256, and any extra fields (e.g., sqliteversion)
Returns None if dependency not found.

:param name: Dependency name (openssl, sqlite, xz)
:param platform: Platform name (linux, darwin, win32)
:return: Dict with version, url, sha256, and extra fields, or None
"""
versions_file = MODULE_DIR / "python-versions.json"
if not versions_file.exists():
return None

def krb_version(href: str) -> Optional[str]:
if re.match(r"\d\.\d\d/", href):
return href[:-1]
return None
import json

data = json.loads(versions_file.read_text())
dependencies = data.get("dependencies", {})

def python_version(href: str) -> Optional[str]:
if re.match(r"(\d+\.)+\d/", href):
return href[:-1]
return None
if name not in dependencies:
return None

# Get the latest version for this dependency that supports the platform
dep_versions = dependencies[name]
for version, info in sorted(
dep_versions.items(),
key=lambda x: [int(n) for n in x[0].split(".")],
reverse=True,
):
if platform in info.get("platforms", []):
# Build result dict with version, url, sha256, and any extra fields
result = {
"version": version,
"url": info["url"],
"sha256": info.get("sha256", ""),
}
# Add any extra fields (like sqliteversion for SQLite)
for key, value in info.items():
if key not in ["url", "sha256", "platforms"]:
result[key] = value
return result

def uuid_version(href: str) -> Optional[str]:
if "download" in href and "latest" not in href:
return href[:-16].rsplit("/")[-1].replace("libuuid-", "")
return None


def parse_links(text: str) -> List[str]:
class HrefParser(HTMLParser):
def __init__(self) -> None:
super().__init__()
self.hrefs: List[str] = []

def handle_starttag(
self, tag: str, attrs: List[Tuple[str, Optional[str]]]
) -> None:
if tag == "a":
link = dict(attrs).get("href")
if link:
self.hrefs.append(link)

parser = HrefParser()
parser.feed(text)
return parser.hrefs


class Comparable(Protocol):
"""Protocol capturing the comparison operations we rely on."""

def __lt__(self, other: Any) -> bool:
"""Return True when self is ordered before *other*."""

def __gt__(self, other: Any) -> bool:
"""Return True when self is ordered after *other*."""


def check_files(
name: str,
location: str,
func: Optional[Callable[[str], Optional[str]]],
current: str,
) -> None:
fp = io.BytesIO()
fetch_url(location, fp)
fp.seek(0)
text = fp.read().decode()
loose = False
current_version: Comparable
try:
current_version = cast(Comparable, parse(current))
except InvalidVersion:
current_version = LooseVersion(current)
loose = True

versions: List[Comparable] = []
if func is None:
return
for link in parse_links(text):
version = func(link)
if version:
if loose:
versions.append(LooseVersion(version))
else:
try:
versions.append(cast(Comparable, parse(version)))
except InvalidVersion:
pass
versions.sort()
compare_versions(name, current_version, versions)


def compare_versions(
name: str, current: Comparable, versions: Sequence[Comparable]
) -> None:
for version in versions:
try:
if version > current:
print(f"Found new version of {name} {version} > {current}")
except TypeError:
print(f"Unable to compare versions {version}")


class Download:
"""
A utility that holds information about content to be downloaded.
Expand Down Expand Up @@ -644,8 +579,6 @@ def __init__(
destination: PathLike = "",
version: str = "",
checksum: Optional[str] = None,
checkfunc: Optional[Callable[[str], Optional[str]]] = None,
checkurl: Optional[str] = None,
) -> None:
self.name = name
self.url_tpl = url
Expand All @@ -656,8 +589,6 @@ def __init__(
self._destination = pathlib.Path(destination)
self.version = version
self.checksum = checksum
self.checkfunc = checkfunc
self.checkurl = checkurl

def copy(self) -> "Download":
return Download(
Expand All @@ -668,8 +599,6 @@ def copy(self) -> "Download":
self.destination,
self.version,
self.checksum,
self.checkfunc,
self.checkurl,
)

@property
Expand Down Expand Up @@ -838,16 +767,6 @@ def __call__(
sys.exit(1)
return valid

def check_version(self) -> bool:
if self.checkfunc is None:
return True
if self.checkurl:
url = self.checkurl
else:
url = self.url.rsplit("/", 1)[0]
check_files(self.name, url, self.checkfunc, self.version)
return True


class Recipe(TypedDict):
"""Typed description of a build recipe entry."""
Expand Down Expand Up @@ -1503,16 +1422,6 @@ def __call__(
if stream_handler is not None:
log.removeHandler(stream_handler)

def check_versions(self) -> bool:
success = True
for step in list(self.recipies):
download = self.recipies[step]["download"]
if not download:
continue
if not download.check_version():
success = False
return success


class Builds:
"""Collection of platform-specific builders."""
Expand Down
Loading
Loading