Skip to content

Commit ff4d6ff

Browse files
committed
Use product-specific repositories for SLFO/Gitea incidents
This is not configurable via metadata because metadata only seems to come into play when scheduling products. However, here we need to be aware of whether/which products to schedule already from the beginning (when syncing incidents from Gitea). Hence I made this configurable via two env variables for now: * `OBS_REPO_TYPE`: specifies the repo typo to look at, e.g. `standard` or `product` or an empty string to consider all repo types * `OBS_PRODUCTS`: specifies a comma-separated list of products to consider and if an empty value appears on the list the code stream is considered as well So with `OBS_REPO_TYPE=standard` and `OBS_PRODUCTS=` we get the current behavior of using the code stream repository. With `OBS_REPO_TYPE=product` and `OBS_PRODUCTS=SLES` we would use the SLES-specific repository. Note that the product version (e.g. `15.99` as in PR 166) can be read from the build results of OBS (from the `scmsync` tag) which we read anyway. So there's no need to supply a mapping from e.g. `1.1.99` to `15.99`. To determine the available architectures of product repositories the bot now reads the `_multibuild` info from OBS. This seems to be required because the build info returns results like this: ``` <result project="SUSE:SLFO:1.1.99:PullRequest:166:SLES" repository="product" arch="ppc64le" code="published" state="published"> ``` However, there is no actual product repository for this repository present. The absence of `ppc64le` and `local` in the `_multibuild` XML explains why these product repositories are missing and therefore we need to check this file for the actually relevant archs. Otherwise the bot would run later into errors, e.g. when computing the repo hash of these non-existent repos. Related ticket: https://progress.opensuse.org/issues/180812
1 parent a7a769e commit ff4d6ff

11 files changed

+205
-44
lines changed

openqabot/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
OBS_DOWNLOAD_URL = os.environ.get("OBS_DOWNLOAD_URL", "http://download.suse.de/ibs")
1313
OBS_MAINT_PRJ = "SUSE:Maintenance"
1414
OBS_GROUP = "qam-openqa"
15+
OBS_REPO_TYPE = os.environ.get("OBS_REPO_TYPE", "product")
16+
OBS_PRODUCTS = set(os.environ.get("OBS_PRODUCTS", "SLES").split(","))
1517
ALLOW_DEVELOPMENT_GROUPS = os.environ.get("QEM_BOT_ALLOW_DEVELOPMENT_GROUPS")
1618
DEVELOPMENT_PARENT_GROUP_ID = 9
1719
DOWNLOAD_BASE = os.environ.get(

openqabot/loader/gitea.py

Lines changed: 110 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,21 @@
22
# SPDX-License-Identifier: MIT
33
import concurrent.futures as CT
44
from logging import getLogger
5-
from typing import Any, List, Set, Dict
5+
from typing import Any, List, Set, Dict, Tuple
66
import re
77
import xml.etree.ElementTree as ET
88

99
import json
1010
import urllib3
1111
import urllib3.exceptions
1212

13+
from osc.core import MultibuildFlavorResolver
1314
import osc.conf
1415
import osc.core
1516
import osc.util.xml
1617

1718
from ..utils import retry10 as requests
18-
from .. import GITEA, OBS_GROUP, OBS_URL
19-
20-
PROJECT_REGEX = ".*:PullRequest:\\d+:(.*)"
19+
from .. import GITEA, OBS_GROUP, OBS_URL, OBS_REPO_TYPE, OBS_PRODUCTS
2120

2221
log = getLogger("bot.loader.gitea")
2322

@@ -51,6 +50,11 @@ def post_json(
5150
raise e
5251

5352

53+
def read_utf8(name: str) -> Any:
54+
with open("responses/%s" % name, "r", encoding="utf8") as utf8:
55+
return utf8.read()
56+
57+
5458
def read_json(name: str) -> Any:
5559
with open("responses/%s.json" % name, "r", encoding="utf8") as json_file:
5660
return json.loads(json_file.read())
@@ -75,6 +79,39 @@ def comments_url(repo_name: str, number: int):
7579
return "repos/%s/issues/%s/comments" % (repo_name, number)
7680

7781

82+
def get_product_name(obs_project: str):
83+
product_match = re.search(".*:PullRequest:\\d+:(.*)", obs_project)
84+
return product_match.group(1) if product_match else ""
85+
86+
87+
def get_product_name_and_version_from_scmsync(scmsync_url: str):
88+
m = re.search(".*/products/(.*)#(.*)", scmsync_url)
89+
return (m.group(1), m.group(2)) if m else ("", "")
90+
91+
92+
def compute_repo_url(
93+
base: str,
94+
product_name: str,
95+
repo: List[Tuple[str, str, str]],
96+
arch: str,
97+
path: str = "repodata/repomd.xml",
98+
):
99+
# use codestream repo if product name is empty
100+
if product_name == "":
101+
# assing something like `http://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166/standard/repodata/repomd.xml`
102+
return f"{base}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/{OBS_REPO_TYPE}/{path}"
103+
# use product repo
104+
# assing something like `https://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166:/SLES/product/repo/SLES-15.99-x86_64/repodata/repomd.xml`
105+
return f"{base}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/{OBS_REPO_TYPE}/repo/{product_name}-{repo[2]}-{arch}/{path}"
106+
107+
108+
def compute_repo_url_for_job_setting(
109+
base: str, repo: List[Tuple[str, str, str]], arch: str
110+
):
111+
product_name = get_product_name(repo[1])
112+
return compute_repo_url(base, product_name, repo, arch, "")
113+
114+
78115
def get_open_prs(token: Dict[str, str], repo: str, dry: bool) -> List[Any]:
79116
if dry:
80117
return read_json("pulls")
@@ -152,14 +189,18 @@ def add_reviews(incident: Dict[str, Any], reviews: List[Any]) -> int:
152189
def add_build_result(
153190
incident: Dict[str, Any],
154191
res: Any,
192+
projects: Set[str],
155193
successful_packages: Set[str],
156194
unpublished_repos: Set[str],
157195
failed_packages: Set[str],
158196
):
159197
state = res.get("state")
160198
project = res.get("project")
161-
project_match = re.search(PROJECT_REGEX, project)
162-
scm_info_key = "scminfo_" + project_match.group(1) if project_match else "scminfo"
199+
product_name = get_product_name(project)
200+
arch = res.get("arch")
201+
channel = ":".join([project, arch])
202+
# read Git hash from scminfo element
203+
scm_info_key = "scminfo_" + product_name if len(product_name) != 0 else "scminfo"
163204
for scminfo_element in res.findall("scminfo"):
164205
found_scminfo = scminfo_element.text
165206
existing_scminfo = incident.get(scm_info_key, None)
@@ -174,11 +215,20 @@ def add_build_result(
174215
found_scminfo,
175216
existing_scminfo,
176217
)
177-
# require codestream builds to be successful and published …
178-
if project_match:
179-
return # … but skip those checks for project-specific builds/repos as we do not use them anyway
218+
# read product version from scmsync element, e.g. 15.99
219+
for scmsync_element in res.findall("scmsync"):
220+
(_, product_version) = get_product_name_and_version_from_scmsync(
221+
scmsync_element.text
222+
)
223+
if len(product_version) > 0:
224+
channel = "#".join([channel, product_version])
225+
break
226+
projects.add(channel)
227+
# require only relevant projects to be built/published
228+
if product_name not in OBS_PRODUCTS:
229+
return
180230
if state != "published":
181-
unpublished_repos.add("@".join([project, res.get("arch")]))
231+
unpublished_repos.add(channel)
182232
return
183233
for status in res.findall("status"):
184234
code = status.get("code")
@@ -190,6 +240,43 @@ def add_build_result(
190240
failed_packages.add(status.get("package"))
191241

192242

243+
def get_multibuild_data(obs_project: str):
244+
r = MultibuildFlavorResolver(OBS_URL, obs_project, "000productcompose")
245+
return r.get_multibuild_data()
246+
247+
248+
def determine_relevant_archs_from_multibuild_info(obs_project: str, dry: bool):
249+
# retrieve the _multibuild info like `osc cat SUSE:SLFO:1.1.99:PullRequest:124:SLES 000productcompose _multibuild`
250+
product_name = get_product_name(obs_project)
251+
if product_name == "":
252+
return None
253+
product_prefix = product_name.replace(":", "_").lower() + "_"
254+
prefix_len = len(product_prefix)
255+
if dry:
256+
multibuild_data = read_utf8("_multibuild-124-" + obs_project + ".xml")
257+
else:
258+
try:
259+
multibuild_data = get_multibuild_data(obs_project)
260+
except Exception as e:
261+
log.warning("Unable to determine relevant archs for %s: %s", obs_project, e)
262+
return None
263+
264+
# determine from the flavors we got what architectures are actually expected to be present
265+
# note: The build info will contain result elements for archs like `local` and `ppc64le` that and the published
266+
# flag set even though no repos for those products are actually present. Considering these would lead to
267+
# problems later on (e.g. when computing the repohash) so it makes sense to reduce the archs we are considering
268+
# to actually relevant ones.
269+
flavors = MultibuildFlavorResolver.parse_multibuild_data(multibuild_data)
270+
relevant_archs = set()
271+
for flavor in flavors:
272+
if flavor.startswith(product_prefix):
273+
arch = flavor[prefix_len:]
274+
if arch in ("x86_64", "aarch64", "ppc64le", "s390x"):
275+
relevant_archs.add(arch)
276+
log.debug("Relevant archs for %s: %s", obs_project, str(sorted(relevant_archs)))
277+
return relevant_archs
278+
279+
193280
def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
194281
successful_packages = set()
195282
unpublished_repos = set()
@@ -198,18 +285,26 @@ def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
198285
for url in obs_urls:
199286
project_match = re.search(".*/project/show/(.*)", url)
200287
if project_match:
288+
obs_project = project_match.group(1)
289+
relevant_archs = determine_relevant_archs_from_multibuild_info(
290+
obs_project, dry
291+
)
201292
build_info_url = osc.core.makeurl(
202-
OBS_URL, ["build", project_match.group(1), "_result"]
293+
OBS_URL, ["build", obs_project, "_result"]
203294
)
204295
if dry:
205-
build_info = read_xml("build-results-124-" + project_match.group(1))
296+
build_info = read_xml("build-results-124-" + obs_project)
206297
else:
207298
build_info = osc.util.xml.xml_parse(osc.core.http_GET(build_info_url))
208299
for res in build_info.getroot().findall("result"):
209-
projects.add(":".join([res.get("project"), res.get("arch")]))
300+
if OBS_REPO_TYPE != "" and res.get("repository") != OBS_REPO_TYPE:
301+
continue
302+
if relevant_archs is not None and res.get("arch") not in relevant_archs:
303+
continue
210304
add_build_result(
211305
incident,
212306
res,
307+
projects,
213308
successful_packages,
214309
unpublished_repos,
215310
failed_packages,
@@ -229,6 +324,8 @@ def add_build_results(incident: Dict[str, Any], obs_urls: List[str], dry: bool):
229324
incident["channels"] = [*projects]
230325
incident["failed_or_unpublished_packages"] = [*failed_packages, *unpublished_repos]
231326
incident["successful_packages"] = [*successful_packages]
327+
if "scminfo" not in incident and len(OBS_PRODUCTS) == 1:
328+
incident["scminfo"] = incident.get("scminfo_" + next(iter(OBS_PRODUCTS)), "")
232329

233330

234331
def add_comments_and_referenced_build_results(

openqabot/loader/repohash.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,14 @@
44
from logging import getLogger
55
from typing import List, Tuple
66
from xml.etree import ElementTree as ET
7-
import re
87

98
from requests import ConnectionError, HTTPError
109
from requests.exceptions import RetryError
1110

12-
from .. import OBS_DOWNLOAD_URL
13-
from .gitea import PROJECT_REGEX
11+
from .. import OBS_DOWNLOAD_URL, OBS_PRODUCTS
1412
from ..errors import NoRepoFoundError
1513
from ..utils import retry5 as requests
14+
from . import gitea
1615

1716
log = getLogger("bot.loader.repohash")
1817

@@ -23,17 +22,21 @@ def get_max_revision(
2322
project: str,
2423
) -> int:
2524
max_rev = 0
26-
2725
url_base = f"{OBS_DOWNLOAD_URL}/{project.replace(':', ':/')}"
2826

2927
for repo in repos:
3028
# handle URLs for SLFO specifically
3129
if project == "SLFO":
32-
# assing something like `http://download.suse.de/ibs/SUSE:/SLFO:/1.1.99:/PullRequest:/166/standard/repodata/repomd.xml`
33-
url = f"{OBS_DOWNLOAD_URL}/{repo[0].replace(':', ':/')}:/{repo[1].replace(':', ':/')}/standard/repodata/repomd.xml"
34-
if re.search(PROJECT_REGEX, repo[1]):
35-
log.info("skipping repohash of product-specifc repo '%s'" % url)
36-
continue # skip product repositories here (only consider code stream repositories)
30+
product_name = gitea.get_product_name(repo[1])
31+
if product_name not in OBS_PRODUCTS:
32+
log.info(
33+
"skipping repo '%s' as product '%s' is not considered",
34+
repo[1],
35+
product_name,
36+
)
37+
continue
38+
url = gitea.compute_repo_url(OBS_DOWNLOAD_URL, product_name, repo, arch)
39+
log.debug("computing repohash for '%s' via: %s", repo[1], url)
3740
# openSUSE and SLE incidents have different handling of architecture
3841
elif repo[0].startswith("openSUSE"):
3942
url = f"{url_base}/SUSE_Updates_{repo[0]}_{repo[1]}/repodata/repomd.xml"
@@ -58,7 +61,6 @@ def get_max_revision(
5861
if cs is None:
5962
log.error("%s's revision is None", url)
6063
raise NoRepoFoundError
61-
6264
max_rev = max(max_rev, int(str(cs.text)))
6365

6466
return max_rev

openqabot/types/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ class Repos(NamedTuple):
77
product: str
88
version: str
99
arch: str
10+
product_version: str = "" # if non-empty, "version" is the codestream version
1011

1112

1213
class ProdVer(NamedTuple):

openqabot/types/incident.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def __init__(self, incident):
5858
]
5959
# add channels for Gitea-based incidents
6060
self.channels += [
61-
Repos(":".join(val[0:2]), ":".join(val[2:-1]), val[-1])
61+
Repos(":".join(val[0:2]), ":".join(val[2:-1]), *(val[-1].split("#")))
6262
for val in (
6363
r.split(":")
6464
for r in (i for i in incident["channels"] if i.startswith("SUSE:SLFO"))
@@ -110,12 +110,11 @@ def _rev(channels: List[Repos], project: str) -> Dict[ArchVer, int]:
110110
if v:
111111
version = v.group(0)
112112

113+
repo_info = (repo.product, repo.version, repo.product_version)
113114
if ArchVer(repo.arch, version) in tmpdict:
114-
tmpdict[ArchVer(repo.arch, version)].append(
115-
(repo.product, repo.version)
116-
)
115+
tmpdict[ArchVer(repo.arch, version)].append(repo_info)
117116
else:
118-
tmpdict[ArchVer(repo.arch, version)] = [(repo.product, repo.version)]
117+
tmpdict[ArchVer(repo.arch, version)] = [repo_info]
119118

120119
if tmpdict:
121120
for archver, lrepos in tmpdict.items():

openqabot/types/incidents.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,9 @@ def _is_scheduled_job(
8181

8282
return False
8383

84-
def _make_repo_url(self, inc: Incident, chan: Repos):
84+
def _make_repo_url(self, inc: Incident, chan: Repos, arch: str):
8585
return (
86-
f"{DOWNLOAD_SLFO}{chan.version}:/PullRequest:/{inc.id}/standard/"
86+
gitea.compute_repo_url_for_job_setting(DOWNLOAD_BASE, chan, arch)
8787
if chan.product == "SLFO"
8888
else f"{DOWNLOAD_MAINTENANCE}{inc.id}/SUSE_Updates_{'_'.join(self._repo_osuse(chan))}"
8989
)
@@ -157,6 +157,7 @@ def _handle_incident( # pylint: disable=too-many-return-statements
157157
issue_dict = {}
158158

159159
log.debug("Incident channels: %s", inc.channels)
160+
# FIXME: populate channels_set from inc.channels for SLFO to get the product version
160161
for issue, channel in data["issues"].items():
161162
log.debug(
162163
"Meta-data channel: %s, %s, %s", channel.product, channel.version, arch
@@ -217,7 +218,7 @@ def _handle_incident( # pylint: disable=too-many-return-statements
217218
full_post["openqa"][key] = str(value.id)
218219

219220
full_post["openqa"]["INCIDENT_REPO"] = ",".join(
220-
sorted(self._make_repo_url(inc, chan) for chan in channels_set)
221+
sorted(self._make_repo_url(inc, chan, arch) for chan in channels_set)
221222
) # sorted for testability
222223

223224
full_post["qem"]["withAggregate"] = True
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
<multibuild>
2+
<flavor>sles_aarch64</flavor>
3+
<flavor>sles_x86_64</flavor>
4+
5+
<flavor>sles_offline_aarch64</flavor>
6+
<flavor>sles_offline_x86_64</flavor>
7+
8+
<flavor>sles_sap_x86_64</flavor>
9+
10+
<flavor>sles_ha_aarch64</flavor>
11+
<flavor>sles_ha_x86_64</flavor>
12+
13+
<flavor>sles_online_aarch64</flavor>
14+
<flavor>sles_online_x86_64</flavor>
15+
</multibuild>

responses/build-results-124-SUSE:SLFO:1.1.99:PullRequest:124:SLES.xml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,28 +21,28 @@
2121
<status package="busybox-image" code="excluded" />
2222
<status package="gcc-15-image" code="succeeded" />
2323
</result>
24-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="aarch64" code="published" state="published">
24+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="aarch64" code="published" state="unpublished">
2525
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
2626
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
2727
<status package="base-image" code="excluded" />
2828
<status package="busybox-image" code="excluded" />
2929
<status package="gcc-15-image" code="succeeded" />
3030
</result>
31-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="local" code="published" state="published">
31+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="local" code="published" state="unpublished">
3232
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
3333
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
3434
<status package="base-image" code="excluded" />
3535
<status package="busybox-image" code="excluded" />
3636
<status package="gcc-15-image" code="succeeded" />
3737
</result>
38-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="ppc64le" code="published" state="published">
38+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="ppc64le" code="published" state="unpublished">
3939
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
4040
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
4141
<status package="base-image" code="excluded" />
4242
<status package="busybox-image" code="excluded" />
4343
<status package="gcc-15-image" code="succeeded" />
4444
</result>
45-
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="x86_64" code="published" state="published">
45+
<result project="SUSE:SLFO:1.1.99:PullRequest:124:SLES" repository="product" arch="x86_64" code="published" state="unpublished">
4646
<scmsync>https://src.suse.de/products/SLES#15.99</scmsync>
4747
<scminfo>18bfa2a23fb7985d5d0cc356474a96a19d91d2d8652442badf7f13bc07cd1f3d</scminfo>
4848
<status package="base-image" code="excluded" />

tests/test_aggregate.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ class Repos(NamedTuple):
8989
product: str
9090
version: str
9191
arch: str
92+
product_version: str = ""
9293

9394
class MockIncident:
9495
def __init__(self, repo, embargoed):

0 commit comments

Comments
 (0)