diff --git a/CONTENTS.md b/CONTENTS.md index 4e6c1b45a..0b91de1b1 100644 --- a/CONTENTS.md +++ b/CONTENTS.md @@ -117,15 +117,6 @@ Mirrors repositories from the build service to a local directory. * Package: openSUSE-release-tools * Usage: Used by other tools like `pkglistgen` or `repocheck` -#### biarchtool - -Manages biarch packages - -* Source: [biarchtool.py](biarchtool.py) -* Documentation: -- -* Package: openSUSE-release-tools -* Usage: ? - #### build-fail-reminder Sends e-mails about packages failing to build for a long time. diff --git a/biarchtool.py b/biarchtool.py deleted file mode 100755 index ba25b94b0..000000000 --- a/biarchtool.py +++ /dev/null @@ -1,374 +0,0 @@ -#!/usr/bin/python3 - -from lxml import etree as ET -import sys -import cmdln -import logging -from urllib.error import HTTPError - -import ToolBase - -logger = logging.getLogger() - -FACTORY = "openSUSE:Factory" - - -class BiArchTool(ToolBase.ToolBase): - - def __init__(self, project): - ToolBase.ToolBase.__init__(self) - self.project = project - self.biarch_packages = None - self._has_baselibs = dict() - self.packages = [] - self.arch = 'i586' - self.rdeps = None - self.package_metas = dict() - self.whitelist = { - 'i586': set([ - 'bzr', - 'git', - # _link to baselibs package - 'libjpeg62-turbo', - 'mercurial', - 'subversion', - 'ovmf']) - } - self.blacklist = { - 'i586': set([ - 'belle-sip', - 'release-notes-openSUSE', - 'openSUSE-EULAs', # translate-toolkit - 'skelcd-openSUSE', - 'plasma5-workspace', - 'patterns-base', - 'patterns-fonts', - 'patterns-rpm-macros', - 'patterns-yast', - '000release-packages']) - } - - def get_filelist(self, project, package, expand=False): - query = {} - if expand: - query['expand'] = 1 - root = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, package], query))) - return [node.get('name') for node in root.findall('entry')] - - def has_baselibs(self, package): - if package in self._has_baselibs: - return self._has_baselibs[package] - - is_multibuild = False - srcpkgname = package - if ':' in package: - is_multibuild = True - srcpkgname = package.split(':')[0] - - ret = False - files = self.get_filelist(self.project, srcpkgname) - if 'baselibs.conf' in files: - logger.debug('%s has baselibs', package) - if is_multibuild: - logger.warning('%s is multibuild and has baselibs. canot handle that!', package) - else: - ret = True - elif '_link' in files: - files = self.get_filelist(self.project, srcpkgname, expand=True) - if 'baselibs.conf' in files: - logger.warning('%s is linked to a baselibs package', package) - elif is_multibuild: - logger.warning('%s is multibuild', package) - self._has_baselibs[package] = ret - return ret - - def is_biarch_recursive(self, package): - logger.debug(package) - if package in self.blacklist[self.arch]: - logger.debug('%s is blacklisted', package) - return False - if package in self.biarch_packages: - logger.debug('%s is known biarch package', package) - return True - if package in self.whitelist[self.arch]: - logger.debug('%s is whitelisted', package) - return True - r = self.has_baselibs(package) - if r: - return r - if package in self.rdeps: - for p in self.rdeps[package]: - r = self.is_biarch_recursive(p) - if r: - break - return r - - def _init_biarch_packages(self): - if self.biarch_packages is None: - if ':Rings' in self.project: - self.biarch_packages = set() - else: - self.biarch_packages = set(self.meta_get_packagelist("%s:Rings:0-Bootstrap" % self.project)) - self.biarch_packages |= set(self.meta_get_packagelist("%s:Rings:1-MinimalX" % self.project)) - - self._init_rdeps() - self.fill_package_meta() - - def fill_package_meta(self): - url = self.makeurl(['search', 'package'], "match=[@project='%s']" % self.project) - root = ET.fromstring(self.cached_GET(url)) - for p in root.findall('package'): - name = p.attrib['name'] - self.package_metas[name] = p - - def _init_rdeps(self): - if self.rdeps is not None: - return - self.rdeps = dict() - url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo'], {'view': 'revpkgnames'}) - x = ET.fromstring(self.cached_GET(url)) - for pnode in x.findall('package'): - name = pnode.get('name') - for depnode in pnode.findall('pkgdep'): - depname = depnode.text - if depname == name: - logger.warning('%s requires itself for build', name) - continue - self.rdeps.setdefault(name, set()).add(depname) - - def select_packages(self, packages): - if packages == '__all__': - self.packages = self.meta_get_packagelist(self.project) - elif packages == '__latest__': - self.packages = self.latest_packages(self.project) - else: - self.packages = packages - - def remove_explicit_enable(self): - - self._init_biarch_packages() - - resulturl = self.makeurl(['build', self.project, '_result']) - result = ET.fromstring(self.cached_GET(resulturl)) - - packages = set() - - for n in result.findall("./result[@arch='{}']/status".format(self.arch)): - if n.get('code') not in ('disabled', 'excluded'): - packages.add(n.get('package')) - - for pkg in sorted(packages): - changed = False - - logger.debug("processing %s", pkg) - if pkg not in self.package_metas: - logger.error("%s not found", pkg) - continue - pkgmeta = self.package_metas[pkg] - - for build in pkgmeta.findall("./build"): - for n in build.findall("./enable[@arch='{}']".format(self.arch)): - logger.debug("disable %s", pkg) - build.remove(n) - changed = True - - if changed: - try: - pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta']) - self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta)) - if self.caching: - self._invalidate__cached_GET(pkgmetaurl) - except HTTPError as e: - logger.error('failed to update %s: %s', pkg, e) - - def add_explicit_disable(self, wipebinaries=False): - - self._init_biarch_packages() - - for pkg in self.packages: - - changed = False - - logger.debug("processing %s", pkg) - if pkg not in self.package_metas: - logger.error("%s not found", pkg) - continue - pkgmeta = self.package_metas[pkg] - - build = pkgmeta.findall("./build") - if not build: - logger.debug('disable %s for %s', pkg, self.arch) - bn = pkgmeta.find('build') - if bn is None: - bn = ET.SubElement(pkgmeta, 'build') - ET.SubElement(bn, 'disable', {'arch': self.arch}) - changed = True - - if changed: - try: - pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta']) - self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta)) - if self.caching: - self._invalidate__cached_GET(pkgmetaurl) - if wipebinaries: - self.http_POST(self.makeurl(['build', self.project], { - 'cmd': 'wipe', - 'arch': self.arch, - 'package': pkg})) - except HTTPError as e: - logger.error('failed to update %s: %s', pkg, e) - - def enable_baselibs_packages(self, force=False, wipebinaries=False): - self._init_biarch_packages() - todo = dict() - for pkg in self.packages: - logger.debug("processing %s", pkg) - if pkg not in self.package_metas: - logger.error("%s not found", pkg) - continue - pkgmeta = self.package_metas[pkg] - - is_enabled = None - is_disabled = None - must_disable = None - changed = None - - for n in pkgmeta.findall("./build/enable[@arch='{}']".format(self.arch)): - is_enabled = True - for n in pkgmeta.findall("./build/disable[@arch='{}']".format(self.arch)): - is_disabled = True - - if force: - must_disable = False - - if must_disable is None: - if self.is_biarch_recursive(pkg): - must_disable = False - else: - must_disable = True - - if not must_disable: - if is_disabled: - logger.info('enabling %s for %s', pkg, self.arch) - for build in pkgmeta.findall("./build"): - for n in build.findall("./disable[@arch='{}']".format(self.arch)): - build.remove(n) - changed = True - if not changed: - logger.error('build tag not found in %s/%s!?', pkg, self.arch) - else: - logger.debug('%s already enabled for %s', pkg, self.arch) - elif must_disable: - if not is_disabled: - logger.info('disabling %s for %s', pkg, self.arch) - bn = pkgmeta.find('build') - if bn is None: - bn = ET.SubElement(pkgmeta, 'build') - ET.SubElement(bn, 'disable', {'arch': self.arch}) - changed = True - else: - logger.debug('%s already disabled for %s', pkg, self.arch) - - if is_enabled: - logger.info('removing explicit enable %s for %s', pkg, self.arch) - for build in pkgmeta.findall("./build"): - for n in build.findall("./enable[@arch='{}']".format(self.arch)): - build.remove(n) - changed = True - if not changed: - logger.error('build tag not found in %s/%s!?', pkg, self.arch) - - if changed: - todo[pkg] = pkgmeta - - if todo: - logger.info("applying changes") - for pkg in sorted(todo.keys()): - pkgmeta = todo[pkg] - try: - pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta']) - self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta)) - if self.caching: - self._invalidate__cached_GET(pkgmetaurl) - - if wipebinaries and pkgmeta.find("./build/disable[@arch='{}']".format(self.arch)) is not None: - logger.debug("wiping %s", pkg) - self.http_POST(self.makeurl(['build', self.project], { - 'cmd': 'wipe', - 'arch': self.arch, - 'package': pkg})) - except HTTPError as e: - logger.error('failed to update %s: %s', pkg, e) - - -class CommandLineInterface(ToolBase.CommandLineInterface): - - def __init__(self, *args, **kwargs): - ToolBase.CommandLineInterface.__init__(self, args, kwargs) - - def get_optparser(self): - parser = ToolBase.CommandLineInterface.get_optparser(self) - parser.add_option('-p', '--project', dest='project', metavar='PROJECT', - help='project to process (default: %s)' % FACTORY, - default=FACTORY) - return parser - - def setup_tool(self): - tool = BiArchTool(self.options.project) - return tool - - def _select_packages(self, all, packages): - if packages: - self.tool.select_packages(packages) - elif all: - self.tool.select_packages('__all__') - else: - self.tool.select_packages('__latest__') - - @cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes") - @cmdln.option('-a', '--all', action='store_true', help='process all packages') - @cmdln.option('-f', '--force', action='store_true', help='enable in any case') - @cmdln.option('--wipe', action='store_true', help='also wipe binaries') - def do_enable_baselibs_packages(self, subcmd, opts, *packages): - """${cmd_name}: enable build for packages in Ring 0 or 1 or with - baselibs.conf - - ${cmd_usage} - ${cmd_option_list} - """ - def work(): - self._select_packages(opts.all, packages) - self.tool.enable_baselibs_packages(force=opts.force, wipebinaries=opts.wipe) - - self.runner(work, opts.interval) - - @cmdln.option('-a', '--all', action='store_true', help='process all packages') - def do_remove_explicit_enable(self, subcmd, opts, *packages): - """${cmd_name}: remove all explicit enable tags from packages - - ${cmd_usage} - ${cmd_option_list} - """ - - self.tool.remove_explicit_enable() - - @cmdln.option('-a', '--all', action='store_true', help='process all packages') - @cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes") - @cmdln.option('--wipe', action='store_true', help='also wipe binaries') - def do_add_explicit_disable(self, subcmd, opts, *packages): - """${cmd_name}: add explicit disable to all packages - - ${cmd_usage} - ${cmd_option_list} - """ - - def work(): - self._select_packages(opts.all, packages) - self.tool.add_explicit_disable(wipebinaries=opts.wipe) - - self.runner(work, opts.interval) - - -if __name__ == "__main__": - app = CommandLineInterface() - sys.exit(app.main()) diff --git a/cleanup_32bit.py b/cleanup_32bit.py new file mode 100755 index 000000000..9b256c95d --- /dev/null +++ b/cleanup_32bit.py @@ -0,0 +1,103 @@ +#!/usr/bin/python3 +from lxml import etree as ET +import sys +import ToolBase +from osclib.core import fileinfo_ext_all +from osclib.conf import Config +from osclib.stagingapi import StagingAPI +from osclib.cleanup_rings import CleanupRings + +class Cleanup32bit(ToolBase.ToolBase): + def run(self, prj: str, arch: str, verbose: bool=False): + Config(self.apiurl, prj) + cr = CleanupRings(StagingAPI(self.apiurl, prj)) + cr.force_required = { + "wine": "wine", "wine-nine-standalone": "wine", + "wine:staging": "wine", + "gstreamer": "boo#1210244", + "gstreamer-plugins-base": "boo#1210244", + "gstreamer-plugins-bad": "boo#1210244", + "gstreamer-plugins-good": "boo#1210244", + "gstreamer-plugins-ugly": "boo#1210244", + "gstreamer-plugins-libav": "boo#1210244", + "mangohud": "boo#1210199", + "gamemode": "boo#1210199", + "alsa-plugins": "boo#1210304", + "alsa-oss": "boo#1210137", + "apitrace": "boo#1210305", + "Mesa-demo": "boo#1210145", + "vulkan-tools": "boo#1210145", + "xf86-video-intel": "boo#1210145", + "grub2": "Creates grub2-i386-efi for x86_64", + "python:python-base": "File deps: some texlive stuff needs python2 and snobol4", + "snobol4": "File deps: some texlive stuff needs python2 and snobol4", + "gnome-keyring": "32bit PAM stack", + "pam_kwallet": "32bit PAM stack", + "libnvidia-egl-wayland": "boo#1214917", + } + + cr.fill_pkginfo(prj, "standard", arch) + + # _builddepinfo only has builddeps which might trigger a rebuild, + # but Preinstall, Support and service packages don't. Look at the + # actual builddep of a randomly chosen (tm) package to get the former, + # check_depinfo handles obs-service-*. + for bdep in cr.package_get_bdeps(prj, "glibc", "standard", arch): + if bdep not in cr.force_required: + cr.force_required[bdep] = "bdep of glibc" + + # Make sure those pkgs are also installable + for wppra in [("openSUSE:Factory:NonFree", "steam", "standard", "x86_64")]: + (wprj, wpkg, wrepo, warch) = wppra + for fileinfo in fileinfo_ext_all(self.apiurl, wprj, wrepo, warch, wpkg): + for providedby in fileinfo.findall('requires_ext/providedby[@name]'): + name = providedby.get('name') + # Those are not built as i586 + if name.startswith("libgcc") or name.startswith("libstdc++"): + continue + + if name.endswith("-32bit"): + name = name[:-len("-32bit")] + cr.force_required[cr.bin2src[name]] = "Runtime dep of" + wpkg + + pkgdeps = cr.check_depinfo(prj, "i586", True) + + print("Not needed:") + print("\n".join([src for src in sorted(cr.sources) if src not in pkgdeps])) + + print("List of onlybuilds:") + print("%ifarch %ix86") + if verbose: + print("\n".join([f"# {pkgdeps[src]}\nBuildFlags: onlybuild:{src}" for src in sorted(pkgdeps)])) + else: + print("\n".join([f"BuildFlags: onlybuild:{src}" for src in sorted(pkgdeps)])) + + print("%endif") + +class CommandLineInterface(ToolBase.CommandLineInterface): + def get_optparser(self): + parser = ToolBase.CommandLineInterface.get_optparser(self) + parser.add_option("-p", "--project", dest="project", + help="project to process (default: openSUSE:Factory)", + default="openSUSE:Factory") + parser.add_option("-a", "--arch", dest="arch", + help="arch to process (default: i586)", + default="i586") + return parser + + def setup_tool(self): + return Cleanup32bit() + + def do_run(self, subcmd, opts, *packages): + """${cmd_name}: Go through all packages in the given project that build + for the given arch and check whether they are necessary for the project + to fulfill build and runtime deps for certain packages. + + ${cmd_usage} + ${cmd_option_list} + """ + self.tool.run(self.options.project, self.options.arch, verbose=self.options.verbose) + +if __name__ == "__main__": + app = CommandLineInterface() + sys.exit(app.main()) diff --git a/dist/package/openSUSE-release-tools.spec b/dist/package/openSUSE-release-tools.spec index 99a9310c8..c51526aa0 100644 --- a/dist/package/openSUSE-release-tools.spec +++ b/dist/package/openSUSE-release-tools.spec @@ -362,7 +362,6 @@ exit 0 %files %doc README.md -%{_bindir}/osrt-biarchtool %{_bindir}/osrt-bs_mirrorfull %{_bindir}/osrt-bugowner %{_bindir}/osrt-build-fail-reminder diff --git a/osclib/cleanup_rings.py b/osclib/cleanup_rings.py index 5d6e9820a..e984148bb 100644 --- a/osclib/cleanup_rings.py +++ b/osclib/cleanup_rings.py @@ -14,24 +14,17 @@ def __init__(self, api): self.api = api self.links = {} self.commands = [] - self.whitelist = [ + self.force_required = { # Keep this in ring 1, even though ring 0 builds the main flavor # and ring 1 has that disabled. - 'automake:testsuite', - 'meson:test', - # buildtime services aren't visible in _builddepinfo - 'obs-service-recompress', - 'obs-service-set_version', - 'obs-service-tar_scm', - # Used by ARM only, but part of oS:F ring 1 in general - 'u-boot', - 'raspberrypi-firmware-dt', - 'raspberrypi-firmware-config', - # Added manually to notice failures early - 'vagrant', - # https://github.com/openSUSE/open-build-service/issues/14129 - 'snobol4', - ] + 'automake:testsuite': 'Keep in Ring 1', + 'meson:test': 'Keep in Ring 1', + 'u-boot': 'ARM Ring1', + 'raspberrypi-firmware-dt': 'ARM Ring1', + 'raspberrypi-firmware-config': 'ARM Ring1', + 'vagrant': 'Added manually to notice failures early', + 'snobol4': 'https://github.com/openSUSE/open-build-service/issues/14129' + } def perform(self): for index, ring in enumerate(self.api.rings): @@ -113,24 +106,30 @@ def repo_state_acceptable(self, project): return False return True + def package_get_bdeps(self, prj, pkg, repo, arch): + "For a given package, return which source packages it has as build deps." + ret = set() + url = makeurl(self.api.apiurl, ['build', prj, repo, arch, pkg, '_buildinfo']) + root = ET.parse(http_GET(url)).getroot() + # Keep the package itself + ret.add(pkg.split(':')[0]) + for bdep in root.findall('bdep'): + if 'name' not in bdep.attrib: + continue + b = bdep.attrib['name'] + if b not in self.bin2src: + print("{} not found in bin2src".format(b)) + continue + ret.add(self.bin2src[b]) + + return ret + def check_image_bdeps(self, project, arch): url = makeurl(self.api.apiurl, ['build', project, '_result']) root = ET.parse(http_GET(url)).getroot() for image in root.xpath(f"result[@repository = 'images' and @arch = '{arch}']/status[@code != 'excluded' and @code != 'disabled']"): - dvd = image.get('package') - url = makeurl(self.api.apiurl, ['build', project, 'images', arch, dvd, '_buildinfo']) - root = ET.parse(http_GET(url)).getroot() - # Don't delete the image itself - self.pkgdeps[dvd.split(':')[0]] = 'MYdvd{}'.format(self.api.rings.index(project)) - for bdep in root.findall('bdep'): - if 'name' not in bdep.attrib: - continue - b = bdep.attrib['name'] - if b not in self.bin2src: - print("{} not found in bin2src".format(b)) - continue - b = self.bin2src[b] - self.pkgdeps[b] = 'MYdvd{}'.format(self.api.rings.index(project)) + for bdep in self.package_get_bdeps(project, image.get('package'), 'images', arch): + self.pkgdeps[bdep] = dvd def check_buildconfig(self, project): url = makeurl(self.api.apiurl, ['build', project, 'standard', '_buildconfig']) @@ -171,11 +170,7 @@ def check_depinfo_ring(self, prj, nextprj): for arch in reversed(self.api.cstaging_archs): print(f"Arch {arch}") - # Dict of needed source pkg -> reason why it's needed - self.pkgdeps = {} - # Note: bin2src is not cleared, that way ring1 pkgs can depend - # on binaries from ring0. - self.fill_pkginfo(prj, 'standard', arch) + # TODO: This won't work. Also keep all_needed_sources across archs # 1. No images built, just for bootstrapping the rpm buildenv. # 2. Treat multibuild flavors as independent packages @@ -188,71 +183,7 @@ def check_depinfo_ring(self, prj, nextprj): else: self.check_image_bdeps(prj, arch) - # Keep all preinstallimages - for pkg in self.sources: - if pkg.startswith("preinstallimage"): - self.pkgdeps[pkg] = "preinstallimage" - - # Treat all binaries in the whitelist as needed - for pkg in self.whitelist: - if pkg in self.sources: - self.pkgdeps[pkg] = "whitelist" - - to_visit = set(self.pkgdeps) - # print("Directly needed: ", to_visit) - - url = makeurl(self.api.apiurl, ['build', prj, 'standard', arch, '_builddepinfo'], {"view": "pkgnames"}) - root = ET.parse(http_GET(url)).getroot() - - while len(to_visit) > 0: - new_deps = {} - for pkg in to_visit: - if not is_ring0: - # Outside of ring0, if one multibuild flavor is needed, add all of them - mainpkg = pkg.split(":")[0] - for src in self.sources: - if src.startswith(f"{mainpkg}:"): - new_deps[src] = pkg - - # Same for link groups - for ldst, lsrc in self.links.items(): - if lsrc == mainpkg: - new_deps[ldst] = pkg - elif ldst == mainpkg: - new_deps[lsrc] = pkg - - # Add all packages which this package depends on - for dep in root.xpath(f"package[@name='{pkg}']/pkgdep"): - new_deps[dep.text] = pkg - - # Filter out already visited deps - to_visit = set(new_deps).difference(set(self.pkgdeps)) - for pkg, reason in new_deps.items(): - self.pkgdeps[pkg] = reason - - all_needed_sources |= set(self.pkgdeps) - - # _builddepinfo only takes care of build deps. runtime deps are handled by - # fileinfo_ext_all, but that's really expensive. Thus the "obvious" algorithm - # of walking from needed packages to their deps would be too slow. Instead, - # walk from possibly unneeded packages (much fewer than needed) and check whether - # they satisfy runtime deps of needed packages. - # Do this after each batch of buildtime deps were resolved to minimize lookups. - if len(to_visit) != 0: - continue - - # Technically this should be self.pkgdeps, but on i586 pretty much nothing - # is needed (no built images) so we continue where x86_64 left off - maybe_unneeded = self.sources.difference(all_needed_sources) - for pkg in sorted(maybe_unneeded): - requiredby = self.package_get_requiredby(prj, pkg, 'standard', arch) - requiredby = requiredby.intersection(all_needed_sources) - # Required by needed packages? - if len(requiredby): - print(f"# {pkg} needed by {requiredby}") - # Include it and also resolve its build deps - self.pkgdeps[pkg] = requiredby - to_visit.add(pkg) + all_needed_sources = self.check_depinfo(prj, arch, not is_ring0) self.commands.append(f"# For {prj}:") for source in sorted(self.sources): @@ -263,3 +194,84 @@ def check_depinfo_ring(self, prj, nextprj): self.commands.append('osc rdelete -m cleanup {} {}'.format(prj, source)) if nextprj: self.commands.append('osc linkpac {} {} {}'.format(self.api.project, source, nextprj)) + + def check_depinfo(self, prj, arch, multibuild_independent): + all_needed_sources = set() + + # Dict of needed source pkg -> reason why it's needed + self.pkgdeps = {} + # Note: bin2src is not cleared, that way ring1 pkgs can depend + # on binaries from ring0. + self.fill_pkginfo(prj, 'standard', arch) + + for pkg in self.sources: + if pkg.startswith("preinstallimage"): + # Keep all preinstallimages + self.pkgdeps[pkg] = "preinstallimage" + elif pkg.startswith("obs-service-"): + # buildtime services aren't visible in _builddepinfo + self.pkgdeps[pkg] = "OBS service" + + # Include all force required packages + for pkg in self.force_required: + if pkg in self.sources: + self.pkgdeps[pkg] = "Forced: " + self.force_required[pkg] + + to_visit = set(self.pkgdeps) + # print("Directly needed: ", to_visit) + + url = makeurl(self.api.apiurl, ['build', prj, 'standard', arch, '_builddepinfo'], {"view": "pkgnames"}) + root = ET.parse(http_GET(url)).getroot() + + while len(to_visit) > 0: + new_deps = {} + for pkg in to_visit: + if not multibuild_independent: + # Outside of ring0, if one multibuild flavor is needed, add all of them + mainpkg = pkg.split(":")[0] + for src in self.sources: + if src.startswith(f"{mainpkg}:"): + new_deps[src] = mainpkg + + # Same for link groups + for ldst, lsrc in self.links.items(): + if lsrc == mainpkg: + new_deps[ldst] = mainpkg + elif ldst == mainpkg: + new_deps[lsrc] = mainpkg + + # Add all packages which this package depends on + for dep in root.xpath(f"package[@name='{pkg}']/pkgdep"): + new_deps[dep.text] = f"Builddep of {pkg}" + + # Filter out already visited deps + to_visit = set(new_deps).difference(set(self.pkgdeps)) + for pkg, reason in new_deps.items(): + self.pkgdeps[pkg] = reason + + all_needed_sources |= set(self.pkgdeps) + + # _builddepinfo only takes care of build deps. runtime deps are handled by + # fileinfo_ext_all, but that's really expensive. Thus the "obvious" algorithm + # of walking from needed packages to their deps would be too slow. Instead, + # walk from possibly unneeded packages (much fewer than needed) and check whether + # they satisfy runtime deps of needed packages. + # Do this after each batch of buildtime deps were resolved to minimize lookups. + if len(to_visit) != 0: + continue + + # Technically this should be self.pkgdeps, but on i586 pretty much nothing + # is needed (no built images) so we continue where x86_64 left off + maybe_unneeded = self.sources.difference(all_needed_sources) + + for pkg in sorted(maybe_unneeded): + requiredby = self.package_get_requiredby(prj, pkg, 'standard', arch) + requiredby = requiredby.intersection(all_needed_sources) + # Required by needed packages? + if len(requiredby): + # Include it and also resolve its build deps + if pkg not in self.pkgdeps: + self.pkgdeps[pkg] = f"Runtime dep of {', '.join(requiredby)}" + to_visit.add(pkg) + + return self.pkgdeps