Skip to content

Commit 2d02dca

Browse files
committed
Use product-specific repositories for SLFO/Gitea incidents
This is not configurable via metadata because metadata only seems to come into play when scheduling products. However, here we need to be aware of whether/which products to schedule already from the beginning (when syncing incidents from Gitea). Hence I made this configurable via two env variables for now: * `OBS_REPO_TYPE`: specifies the repo typo to look at, e.g. `standard` or `product` or an empty string to consider all repo types * `OBS_PRODUCTS`: specifies a comma-separated list of products to consider and if an empty value appears on the list the code stream is considered as well So with `OBS_REPO_TYPE=standard` and `OBS_PRODUCTS=` we get the current behavior of using the code stream repository. With `OBS_REPO_TYPE=product` and `OBS_PRODUCTS=SLES` we would use the SLES-specific repository. Note that the product version (e.g. `15.99` as in PR 166) can be read from the build results of OBS (from the `scmsync` tag) which we read anyway. So there's no need to supply a mapping from e.g. `1.1.99` to `15.99`. To determine the available architectures of product repositories the bot now reads the `_multibuild` info from OBS. This seems to be required because the build info returns results like this: ``` <result project="SUSE:SLFO:1.1.99:PullRequest:166:SLES" repository="product" arch="ppc64le" code="published" state="published"> ``` However, there is no actual product repository for this repository present. The absence of `ppc64le` and `local` in the `_multibuild` XML explains why these product repositories are missing and therefore we need to check this file for the actually relevant archs. Otherwise the bot would run later into errors, e.g. when computing the repo hash of these non-existent repos. Related ticket: https://progress.opensuse.org/issues/180812
1 parent a7a769e commit 2d02dca

10 files changed

+195
-41
lines changed

openqabot/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
OBS_DOWNLOAD_URL = os.environ.get("OBS_DOWNLOAD_URL", "http://download.suse.de/ibs")
1313
OBS_MAINT_PRJ = "SUSE:Maintenance"
1414
OBS_GROUP = "qam-openqa"
15+
OBS_REPO_TYPE = os.environ.get("OBS_REPO_TYPE", "product")
16+
OBS_PRODUCTS = set(os.environ.get("OBS_PRODUCTS", "SLES").split(","))
1517
ALLOW_DEVELOPMENT_GROUPS = os.environ.get("QEM_BOT_ALLOW_DEVELOPMENT_GROUPS")
1618
DEVELOPMENT_PARENT_GROUP_ID = 9
1719
DOWNLOAD_BASE = os.environ.get(

openqabot/loader/gitea.py

Lines changed: 104 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,21 @@
22
# SPDX-License-Identifier: MIT
33
import concurrent.futures as CT
44
from logging import getLogger
5-
from typing import Any, List, Set, Dict
5+
from typing import Any, List, Set, Dict, Tuple
66
import re
77
import xml.etree.ElementTree as ET
88

99
import json
1010
import urllib3
1111
import urllib3.exceptions
1212

13+
from osc.core import MultibuildFlavorResolver
1314
import osc.conf
1415
import osc.core
1516
import osc.util.xml
1617

1718
from ..utils import retry10 as requests
18-
from .. import GITEA, OBS_GROUP, OBS_URL
19-
20-
PROJECT_REGEX = ".*:PullRequest:\\d+:(.*)"
19+
from .. import GITEA, OBS_GROUP, OBS_URL, OBS_REPO_TYPE, OBS_PRODUCTS
2120

2221
log = getLogger("bot.loader.gitea")
2322

@@ -51,6 +50,11 @@ def post_json(
5150
raise e
5251

5352

53+
def read_utf8(name: str) -> Any:
54+
with open("responses/%s" % name, "r", encoding="utf8") as utf8:
55+
return utf8.read()
56+
57+
5458
def read_json(name: str) -> Any:
5559
with open("responses/%s.json" % name, "r", encoding="utf8") as json_file:
5660
return json.loads(json_file.read())
@@ -75,6 +79,33 @@ def comments_url(repo_name: str, number: int):
7579
return "repos/%s/issues/%s/comments" % (repo_name, number)
7680

7781

82+
def get_product_name(obs_project: str):
83+
product_match = re.search(".*:PullRequest:\\d+:(.*)", obs_project)
84+
return product_match.group(1) if product_match else ""
85+
86+
87+
def get_product_name_and_version_from_scmsync(scmsync_url: str):
88+
m = re.search(".*/products/(.*)#(.*)", scmsync_url)
89+
return (m.group(1), m.group(2)) if m else ("", "")
90+
91+
92+
def compute_repo_url(
93+
base: str, product_name: str, repo: List[Tuple[str, str, str]], arch: str, path: str = "repodata/repomd.xml"
94+
):
95+
# use codestream repo if product name is empty
96+
if product_name == "":
97+
# assing something like `http://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166/standard/repodata/repomd.xml`
98+
return f"{base}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/{OBS_REPO_TYPE}/{path}"
99+
# use product repo
100+
# assing something like `https://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166:/SLES/product/repo/SLES-15.99-x86_64/repodata/repomd.xml`
101+
return f"{base}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/{OBS_REPO_TYPE}/repo/{product_name}-{repo[2]}-{arch}/{path}"
102+
103+
104+
def compute_repo_url_for_job_setting(base: str, repo: List[Tuple[str, str, str]], arch: str):
105+
product_name = get_product_name(repo[1])
106+
return compute_repo_url(base, product_name, repo, arch, "")
107+
108+
78109
def get_open_prs(token: Dict[str, str], repo: str, dry: bool) -> List[Any]:
79110
if dry:
80111
return read_json("pulls")
@@ -152,14 +183,18 @@ def add_reviews(incident: Dict[str, Any], reviews: List[Any]) -> int:
152183
def add_build_result(
153184
incident: Dict[str, Any],
154185
res: Any,
186+
projects: Set[str],
155187
successful_packages: Set[str],
156188
unpublished_repos: Set[str],
157189
failed_packages: Set[str],
158190
):
159191
state = res.get("state")
160192
project = res.get("project")
161-
project_match = re.search(PROJECT_REGEX, project)
162-
scm_info_key = "scminfo_" + project_match.group(1) if project_match else "scminfo"
193+
product_name = get_product_name(project)
194+
arch = res.get("arch")
195+
channel = ":".join([project, arch])
196+
# read Git hash from scminfo element
197+
scm_info_key = "scminfo_" + product_name if len(product_name) != 0 else "scminfo"
163198
for scminfo_element in res.findall("scminfo"):
164199
found_scminfo = scminfo_element.text
165200
existing_scminfo = incident.get(scm_info_key, None)
@@ -174,11 +209,20 @@ def add_build_result(
174209
found_scminfo,
175210
existing_scminfo,
176211
)
177-
# require codestream builds to be successful and published …
178-
if project_match:
179-
return # … but skip those checks for project-specific builds/repos as we do not use them anyway
212+
# read product version from scmsync element, e.g. 15.99
213+
for scmsync_element in res.findall("scmsync"):
214+
(_, product_version) = get_product_name_and_version_from_scmsync(
215+
scmsync_element.text
216+
)
217+
if len(product_version) > 0:
218+
channel = "#".join([channel, product_version])
219+
break
220+
projects.add(channel)
221+
# require only relevant projects to be built/published
222+
if product_name not in OBS_PRODUCTS:
223+
return
180224
if state != "published":
181-
unpublished_repos.add("@".join([project, res.get("arch")]))
225+
unpublished_repos.add(channel)
182226
return
183227
for status in res.findall("status"):
184228
code = status.get("code")
@@ -190,6 +234,43 @@ def add_build_result(
190234
failed_packages.add(status.get("package"))
191235

192236

237+
def get_multibuild_data(obs_project: str):
238+
r = MultibuildFlavorResolver(OBS_URL, obs_project, "000productcompose")
239+
return r.get_multibuild_data()
240+
241+
242+
def determine_relevant_archs_from_multibuild_info(obs_project: str, dry: bool):
243+
# retrieve the _multibuild info like `osc cat SUSE:SLFO:1.1.99:PullRequest:124:SLES 000productcompose _multibuild`
244+
product_name = get_product_name(obs_project)
245+
if product_name == "":
246+
return None
247+
product_prefix = product_name.replace(":", "_").lower() + "_"
248+
prefix_len = len(product_prefix)
249+
if dry:
250+
multibuild_data = read_utf8("_multibuild-124-" + obs_project + ".xml")
251+
else:
252+
try:
253+
multibuild_data = get_multibuild_data(obs_project)
254+
except Exception as e:
255+
log.warning("Unable to determine relevant archs for %s: %s", obs_project, e)
256+
return None
257+
258+
# determine from the flavors we got what architectures are actually expected to be present
259+
# note: The build info will contain result elements for archs like `local` and `ppc64le` that and the published
260+
# flag set even though no repos for those products are actually present. Considering these would lead to
261+
# problems later on (e.g. when computing the repohash) so it makes sense to reduce the archs we are considering
262+
# to actually relevant ones.
263+
flavors = MultibuildFlavorResolver.parse_multibuild_data(multibuild_data)
264+
relevant_archs = set()
265+
for flavor in flavors:
266+
if flavor.startswith(product_prefix):
267+
arch = flavor[prefix_len:]
268+
if arch in ("x86_64", "aarch64", "ppc64le", "s390x"):
269+
relevant_archs.add(arch)
270+
log.debug("Relevant archs for %s: %s", obs_project, str(sorted(relevant_archs)))
271+
return relevant_archs
272+
273+
193274
def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
194275
successful_packages = set()
195276
unpublished_repos = set()
@@ -198,18 +279,26 @@ def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
198279
for url in obs_urls:
199280
project_match = re.search(".*/project/show/(.*)", url)
200281
if project_match:
282+
obs_project = project_match.group(1)
283+
relevant_archs = determine_relevant_archs_from_multibuild_info(
284+
obs_project, dry
285+
)
201286
build_info_url = osc.core.makeurl(
202-
OBS_URL, ["build", project_match.group(1), "_result"]
287+
OBS_URL, ["build", obs_project, "_result"]
203288
)
204289
if dry:
205-
build_info = read_xml("build-results-124-" + project_match.group(1))
290+
build_info = read_xml("build-results-124-" + obs_project)
206291
else:
207292
build_info = osc.util.xml.xml_parse(osc.core.http_GET(build_info_url))
208293
for res in build_info.getroot().findall("result"):
209-
projects.add(":".join([res.get("project"), res.get("arch")]))
294+
if OBS_REPO_TYPE != "" and res.get("repository") != OBS_REPO_TYPE:
295+
continue
296+
if relevant_archs is not None and res.get("arch") not in relevant_archs:
297+
continue
210298
add_build_result(
211299
incident,
212300
res,
301+
projects,
213302
successful_packages,
214303
unpublished_repos,
215304
failed_packages,
@@ -229,6 +318,8 @@ def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
229318
incident["channels"] = [*projects]
230319
incident["failed_or_unpublished_packages"] = [*failed_packages, *unpublished_repos]
231320
incident["successful_packages"] = [*successful_packages]
321+
if "scminfo" not in incident and len(OBS_PRODUCTS) == 1:
322+
incident["scminfo"] = incident.get("scminfo_" + next(iter(OBS_PRODUCTS)), "")
232323

233324

234325
def add_comments_and_referenced_build_results(

openqabot/loader/repohash.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,14 @@
44
from logging import getLogger
55
from typing import List, Tuple
66
from xml.etree import ElementTree as ET
7-
import re
87

98
from requests import ConnectionError, HTTPError
109
from requests.exceptions import RetryError
1110

12-
from .. import OBS_DOWNLOAD_URL
13-
from .gitea import PROJECT_REGEX
11+
from .. import OBS_DOWNLOAD_URL, OBS_PRODUCTS
1412
from ..errors import NoRepoFoundError
1513
from ..utils import retry5 as requests
14+
from . import gitea
1615

1716
log = getLogger("bot.loader.repohash")
1817

@@ -23,17 +22,21 @@ def get_max_revision(
2322
project: str,
2423
) -> int:
2524
max_rev = 0
26-
2725
url_base = f"{OBS_DOWNLOAD_URL}/{project.replace(':', ':/')}"
2826

2927
for repo in repos:
3028
# handle URLs for SLFO specifically
3129
if project == "SLFO":
32-
# assing something like `http://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166/standard/repodata/repomd.xml`
33-
url = f"{OBS_DOWNLOAD_URL}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/standard/repodata/repomd.xml"
34-
if re.search(PROJECT_REGEX, repo[1]):
35-
log.info("skipping repohash of product-specifc repo '%s'" % url)
36-
continue # skip product repositories here (only consider code stream repositories)
30+
product_name = gitea.get_product_name(repo[1])
31+
if product_name not in OBS_PRODUCTS:
32+
log.info(
33+
"skipping repo '%s' as product '%s' is not considered",
34+
repo[1],
35+
product_name,
36+
)
37+
continue
38+
url = gitea.compute_repo_url(OBS_DOWNLOAD_URL, product_name, repo, arch)
39+
log.debug("computing repohash for '%s' via: %s", repo[1], url)
3740
# openSUSE and SLE incidents have different handling of architecture
3841
elif repo[0].startswith("openSUSE"):
3942
url = f"{url_base}/SUSE_Updates_{repo[0]}_{repo[1]}/repodata/repomd.xml"
@@ -58,7 +61,6 @@ def get_max_revision(
5861
if cs is None:
5962
log.error("%s's revision is None", url)
6063
raise NoRepoFoundError
61-
6264
max_rev = max(max_rev, int(str(cs.text)))
6365

6466
return max_rev

openqabot/types/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ class Repos(NamedTuple):
77
product: str
88
version: str
99
arch: str
10+
product_version: str = "" # if non-empty, "version" is the codestream version
1011

1112

1213
class ProdVer(NamedTuple):

openqabot/types/incident.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def __init__(self, incident):
5858
]
5959
# add channels for Gitea-based incidents
6060
self.channels += [
61-
Repos(":".join(val[0:2]), ":".join(val[2:-1]), val[-1])
61+
Repos(":".join(val[0:2]), ":".join(val[2:-1]), *(val[-1].split("#")))
6262
for val in (
6363
r.split(":")
6464
for r in (i for i in incident["channels"] if i.startswith("SUSE:SLFO"))
@@ -110,12 +110,11 @@ def _rev(channels: List[Repos], project: str) -> Dict[ArchVer, int]:
110110
if v:
111111
version = v.group(0)
112112

113+
repo_info = (repo.product, repo.version, repo.product_version)
113114
if ArchVer(repo.arch, version) in tmpdict:
114-
tmpdict[ArchVer(repo.arch, version)].append(
115-
(repo.product, repo.version)
116-
)
115+
tmpdict[ArchVer(repo.arch, version)].append(repo_info)
117116
else:
118-
tmpdict[ArchVer(repo.arch, version)] = [(repo.product, repo.version)]
117+
tmpdict[ArchVer(repo.arch, version)] = [repo_info]
119118

120119
if tmpdict:
121120
for archver, lrepos in tmpdict.items():
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
<multibuild>
2+
<flavor>sles_aarch64</flavor>
3+
<flavor>sles_x86_64</flavor>
4+
5+
<flavor>sles_offline_aarch64</flavor>
6+
<flavor>sles_offline_x86_64</flavor>
7+
8+
<flavor>sles_sap_x86_64</flavor>
9+
10+
<flavor>sles_ha_aarch64</flavor>
11+
<flavor>sles_ha_x86_64</flavor>
12+
13+
<flavor>sles_online_aarch64</flavor>
14+
<flavor>sles_online_x86_64</flavor>
15+
</multibuild>

responses/build-results-124-SUSE:SLFO:1.1.99:PullRequest:124:SLES.xml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,28 +21,28 @@
2121
<status package="busybox-image" code="excluded" />
2222
<status package="gcc-15-image" code="succeeded" />
2323
</result>
24-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="aarch64" code="published" state="published">
24+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="aarch64" code="published" state="unpublished">
2525
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
2626
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
2727
<status package="base-image" code="excluded" />
2828
<status package="busybox-image" code="excluded" />
2929
<status package="gcc-15-image" code="succeeded" />
3030
</result>
31-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="local" code="published" state="published">
31+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="local" code="published" state="unpublished">
3232
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
3333
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
3434
<status package="base-image" code="excluded" />
3535
<status package="busybox-image" code="excluded" />
3636
<status package="gcc-15-image" code="succeeded" />
3737
</result>
38-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="ppc64le" code="published" state="published">
38+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="ppc64le" code="published" state="unpublished">
3939
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
4040
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
4141
<status package="base-image" code="excluded" />
4242
<status package="busybox-image" code="excluded" />
4343
<status package="gcc-15-image" code="succeeded" />
4444
</result>
45-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="x86_64" code="published" state="published">
45+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="x86_64" code="published" state="unpublished">
4646
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
4747
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
4848
<status package="base-image" code="excluded" />

tests/test_aggregate.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ class Repos(NamedTuple):
8989
product: str
9090
version: str
9191
arch: str
92+
product_version: str = ""
9293

9394
class MockIncident:
9495
def __init__(self, repo, embargoed):

0 commit comments

Comments
 (0)