asda?‰PNG  IHDR ? f ??C1 sRGB ??é gAMA ±? üa pHYs ? ??o¨d GIDATx^íüL”÷e÷Y?a?("Bh?_ò???¢§?q5k?*:t0A-o??¥]VkJ¢M??f?±8\k2íll£1]q?ù???T builddep.py000064400000022202151030231510006673 0ustar00# builddep.py # Install all the deps needed to build this package. # # Copyright (C) 2013-2015 Red Hat, Inc. # Copyright (C) 2015 Igor Gnatenko # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _, logger import argparse import dnf import dnf.cli import dnf.exceptions import dnf.rpm.transaction import dnf.yum.rpmtrans import libdnf.repo import os import rpm import shutil import tempfile @dnf.plugin.register_command class BuildDepCommand(dnf.cli.Command): aliases = ('builddep', 'build-dep') msg = "Install build dependencies for package or spec file" summary = _(msg) usage = _("[PACKAGE|PACKAGE.spec]") def __init__(self, cli): super(BuildDepCommand, self).__init__(cli) self._rpm_ts = dnf.rpm.transaction.initReadOnlyTransaction() self.tempdirs = [] def __del__(self): for temp_dir in self.tempdirs: shutil.rmtree(temp_dir) def _download_remote_file(self, pkgspec): """ In case pkgspec is a remote URL, download it to a temporary location and use the temporary file instead. """ location = dnf.pycomp.urlparse.urlparse(pkgspec) if location[0] in ('file', ''): # just strip the file:// prefix return location.path downloader = libdnf.repo.Downloader() temp_dir = tempfile.mkdtemp(prefix="dnf_builddep_") temp_file = os.path.join(temp_dir, os.path.basename(pkgspec)) self.tempdirs.append(temp_dir) temp_fo = open(temp_file, "wb+") try: downloader.downloadURL(self.base.conf._config, pkgspec, temp_fo.fileno()) except RuntimeError as ex: raise finally: temp_fo.close() return temp_file @staticmethod def set_argparser(parser): def macro_def(arg): arglist = arg.split(None, 1) if arg else [] if len(arglist) < 2: msg = _("'%s' is not of the format 'MACRO EXPR'") % arg raise argparse.ArgumentTypeError(msg) return arglist parser.add_argument('packages', nargs='+', metavar='package', help=_('packages with builddeps to install')) parser.add_argument('-D', '--define', action='append', default=[], metavar="'MACRO EXPR'", type=macro_def, help=_('define a macro for spec file parsing')) parser.add_argument('--skip-unavailable', action='store_true', default=False, help=_('skip build dependencies not available in repositories')) ptype = parser.add_mutually_exclusive_group() ptype.add_argument('--spec', action='store_true', help=_('treat commandline arguments as spec files')) ptype.add_argument('--srpm', action='store_true', help=_('treat commandline arguments as source rpm')) def pre_configure(self): if not self.opts.rpmverbosity: self.opts.rpmverbosity = 'error' def configure(self): demands = self.cli.demands demands.available_repos = True demands.resolving = True demands.root_user = True demands.sack_activation = True # enable source repos only if needed if not (self.opts.spec or self.opts.srpm): for pkgspec in self.opts.packages: if not (pkgspec.endswith('.src.rpm') or pkgspec.endswith('.nosrc.rpm') or pkgspec.endswith('.spec')): self.base.repos.enable_source_repos() break def run(self): rpmlog = dnf.yum.rpmtrans.RPMTransaction(self.base) # Push user-supplied macro definitions for spec parsing for macro in self.opts.define: rpm.addMacro(macro[0], macro[1]) pkg_errors = False for pkgspec in self.opts.packages: pkgspec = self._download_remote_file(pkgspec) try: if self.opts.srpm: self._src_deps(pkgspec) elif self.opts.spec: self._spec_deps(pkgspec) elif pkgspec.endswith('.src.rpm') or pkgspec.endswith('nosrc.rpm'): self._src_deps(pkgspec) elif pkgspec.endswith('.spec'): self._spec_deps(pkgspec) else: self._remote_deps(pkgspec) except dnf.exceptions.Error as e: for line in rpmlog.messages(): logger.error(_("RPM: {}").format(line)) logger.error(e) pkg_errors = True # Pop user macros so they don't affect future rpm calls for macro in self.opts.define: rpm.delMacro(macro[0]) if pkg_errors: raise dnf.exceptions.Error(_("Some packages could not be found.")) @staticmethod def _rpm_dep2reldep_str(rpm_dep): return rpm_dep.DNEVR()[2:] def _install(self, reldep_str): # Try to find something by provides sltr = dnf.selector.Selector(self.base.sack) sltr.set(provides=reldep_str) found = sltr.matches() if not found and reldep_str.startswith("/"): # Nothing matches by provides and since it's file, try by files sltr = dnf.selector.Selector(self.base.sack) sltr.set(file=reldep_str) found = sltr.matches() if not found and not reldep_str.startswith("("): # No provides, no files # Richdeps can have no matches but it could be correct (solver must decide later) msg = _("No matching package to install: '%s'") logger.warning(msg, reldep_str) return self.opts.skip_unavailable is True if found: already_inst = self.base._sltr_matches_installed(sltr) if already_inst: for package in already_inst: dnf.base._msg_installed(package) self.base._goal.install(select=sltr, optional=False) return True def _src_deps(self, src_fn): fd = os.open(src_fn, os.O_RDONLY) try: h = self._rpm_ts.hdrFromFdno(fd) except rpm.error as e: if str(e) == 'error reading package header': e = _("Failed to open: '%s', not a valid source rpm file.") % src_fn os.close(fd) raise dnf.exceptions.Error(e) os.close(fd) ds = h.dsFromHeader('requirename') done = True for dep in ds: reldep_str = self._rpm_dep2reldep_str(dep) if reldep_str.startswith('rpmlib('): continue done &= self._install(reldep_str) if not done: err = _("Not all dependencies satisfied") raise dnf.exceptions.Error(err) if self.opts.define: logger.warning(_("Warning: -D or --define arguments have no meaning " "for source rpm packages.")) def _spec_deps(self, spec_fn): try: spec = rpm.spec(spec_fn) except ValueError as ex: msg = _("Failed to open: '%s', not a valid spec file: %s") % ( spec_fn, ex) raise dnf.exceptions.Error(msg) done = True for dep in rpm.ds(spec.sourceHeader, 'requires'): reldep_str = self._rpm_dep2reldep_str(dep) done &= self._install(reldep_str) if not done: err = _("Not all dependencies satisfied") raise dnf.exceptions.Error(err) def _remote_deps(self, package): available = dnf.subject.Subject(package).get_best_query( self.base.sack).filter(arch__neq="src") sourcenames = list({pkg.source_name for pkg in available}) pkgs = self.base.sack.query().available().filter( name=(sourcenames + [package]), arch="src").latest().run() if not pkgs: raise dnf.exceptions.Error(_('no package matched: %s') % package) done = True for pkg in pkgs: for req in pkg.requires: done &= self._install(str(req)) if not done: err = _("Not all dependencies satisfied") raise dnf.exceptions.Error(err) generate_completion_cache.py000064400000007554151030231510012266 0ustar00# coding=utf-8 # generate_completion_cache.py - generate cache for dnf bash completion # Copyright © 2013 Elad Alfassa # Copyright (C) 2014-2015 Igor Gnatenko # Copyright (C) 2015 Red Hat, Inc. # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import ucd from dnfpluginscore import logger import dnf import os.path import sqlite3 class BashCompletionCache(dnf.Plugin): name = 'generate_completion_cache' def __init__(self, base, cli): super(BashCompletionCache, self).__init__(base, cli) self.base = base self.cache_file = "/var/cache/dnf/packages.db" @staticmethod def _out(msg): logger.debug('Completion plugin: %s', msg) def sack(self): ''' Generate cache of available packages ''' # We generate this cache only if the repos were just freshed or if the # cache file doesn't exist fresh = False for repo in self.base.repos.iter_enabled(): if repo.metadata is not None and repo.metadata.fresh: # One fresh repo is enough to cause a regen of the cache fresh = True break if not os.path.exists(self.cache_file) or fresh: try: with sqlite3.connect(self.cache_file) as conn: self._out('Generating completion cache...') cur = conn.cursor() cur.execute( "create table if not exists available (pkg TEXT)") cur.execute( "create unique index if not exists " "pkg_available ON available(pkg)") cur.execute("delete from available") avail_pkgs = self.base.sack.query().available() avail_pkgs_insert = [[str(x)] for x in avail_pkgs if x.arch != "src"] cur.executemany("insert or ignore into available values (?)", avail_pkgs_insert) conn.commit() except sqlite3.OperationalError as e: self._out("Can't write completion cache: %s" % ucd(e)) def transaction(self): ''' Generate cache of installed packages ''' if not self.transaction: return try: with sqlite3.connect(self.cache_file) as conn: self._out('Generating completion cache...') cur = conn.cursor() cur.execute("create table if not exists installed (pkg TEXT)") cur.execute( "create unique index if not exists " "pkg_installed ON installed(pkg)") cur.execute("delete from installed") inst_pkgs = dnf.sack._rpmdb_sack(self.base).query().installed() inst_pkgs_insert = [[str(x)] for x in inst_pkgs if x.arch != "src"] cur.executemany("insert or ignore into installed values (?)", inst_pkgs_insert) conn.commit() except sqlite3.OperationalError as e: self._out("Can't write completion cache: %s" % ucd(e)) copr.py000064400000073132151030231510006056 0ustar00# supplies the 'copr' command. # # Copyright (C) 2014-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function import glob import itertools import json import os import re import shutil import stat import sys import base64 import json from dnfpluginscore import _, logger import dnf from dnf.pycomp import PY3 from dnf.i18n import ucd import rpm # Attempt importing the linux_distribution function from distro # If that fails, attempt to import the deprecated implementation # from the platform module. try: from distro import name, version, codename, os_release_attr # Re-implement distro.linux_distribution() to avoid a deprecation warning def linux_distribution(): return (name(), version(), codename()) except ImportError: def os_release_attr(_): return "" try: from platform import linux_distribution except ImportError: # Simple fallback for distributions that lack an implementation def linux_distribution(): with open('/etc/os-release') as os_release_file: os_release_data = {} for line in os_release_file: try: os_release_key, os_release_value = line.rstrip().split('=') os_release_data[os_release_key] = os_release_value.strip('"') except ValueError: # Skip empty lines and everything that is not a simple # variable assignment pass return (os_release_data['NAME'], os_release_data['VERSION_ID'], None) PLUGIN_CONF = 'copr' YES = set([_('yes'), _('y')]) NO = set([_('no'), _('n'), '']) if PY3: from configparser import ConfigParser, NoOptionError, NoSectionError from urllib.request import urlopen, HTTPError, URLError else: from ConfigParser import ConfigParser, NoOptionError, NoSectionError from urllib2 import urlopen, HTTPError, URLError @dnf.plugin.register_command class CoprCommand(dnf.cli.Command): """ Copr plugin for DNF """ chroot_config = None default_hostname = "copr.fedorainfracloud.org" default_hub = "fedora" default_protocol = "https" default_port = 443 default_url = default_protocol + "://" + default_hostname aliases = ("copr",) summary = _("Interact with Copr repositories.") first_warning = True usage = _(""" enable name/project [chroot] disable name/project remove name/project list --installed/enabled/disabled list --available-by-user=NAME search project Examples: copr enable rhscl/perl516 epel-6-x86_64 copr enable ignatenkobrain/ocltoys copr disable rhscl/perl516 copr remove rhscl/perl516 copr list --enabled copr list --available-by-user=ignatenkobrain copr search tests """) @staticmethod def set_argparser(parser): parser.add_argument('subcommand', nargs=1, choices=['help', 'enable', 'disable', 'remove', 'list', 'search']) list_option = parser.add_mutually_exclusive_group() list_option.add_argument('--installed', action='store_true', help=_('List all installed Copr repositories (default)')) list_option.add_argument('--enabled', action='store_true', help=_('List enabled Copr repositories')) list_option.add_argument('--disabled', action='store_true', help=_('List disabled Copr repositories')) list_option.add_argument('--available-by-user', metavar='NAME', help=_('List available Copr repositories by user NAME')) parser.add_argument('--hub', help=_('Specify an instance of Copr to work with')) parser.add_argument('arg', nargs='*') def configure(self): if self.cli.command.opts.command != "copr": return copr_hub = None copr_plugin_config = ConfigParser() config_files = [] config_path = self.base.conf.pluginconfpath[0] default_config_file = os.path.join(config_path, PLUGIN_CONF + ".conf") if os.path.isfile(default_config_file): config_files.append(default_config_file) copr_plugin_config.read(default_config_file) if copr_plugin_config.has_option('main', 'distribution') and\ copr_plugin_config.has_option('main', 'releasever'): distribution = copr_plugin_config.get('main', 'distribution') releasever = copr_plugin_config.get('main', 'releasever') self.chroot_config = [distribution, releasever] else: self.chroot_config = [False, False] for filename in os.listdir(os.path.join(config_path, PLUGIN_CONF + ".d")): if filename.endswith('.conf'): config_file = os.path.join(config_path, PLUGIN_CONF + ".d", filename) config_files.append(config_file) project = [] if len(self.opts.arg): project = self.opts.arg[0].split("/") if len(project) == 3 and self.opts.hub: logger.critical( _('Error: ') + _('specify Copr hub either with `--hub` or using ' '`copr_hub/copr_username/copr_projectname` format') ) raise dnf.cli.CliError(_('multiple hubs specified')) # Copr hub was not specified, using default hub `fedora` elif not self.opts.hub and len(project) != 3: self.copr_hostname = self.default_hostname self.copr_url = self.default_url # Copr hub specified with hub/user/project format elif len(project) == 3: copr_hub = project[0] else: copr_hub = self.opts.hub # Try to find hub in a config file if config_files and copr_hub: self.copr_url = None copr_plugin_config.read(sorted(config_files, reverse=True)) hostname = self._read_config_item(copr_plugin_config, copr_hub, 'hostname', None) if hostname: protocol = self._read_config_item(copr_plugin_config, copr_hub, 'protocol', self.default_protocol) port = self._read_config_item(copr_plugin_config, copr_hub, 'port', self.default_port) self.copr_hostname = hostname self.copr_url = protocol + "://" + hostname if int(port) != self.default_port: self.copr_url += ":" + port self.copr_hostname += ":" + port if not self.copr_url: if '://' not in copr_hub: self.copr_hostname = copr_hub self.copr_url = self.default_protocol + "://" + copr_hub else: self.copr_hostname = copr_hub.split('://', 1)[1] self.copr_url = copr_hub def _read_config_item(self, config, hub, section, default): try: return config.get(hub, section) except (NoOptionError, NoSectionError): return default def _user_warning_before_prompt(self, text): sys.stderr.write("{0}\n".format(text.strip())) def run(self): subcommand = self.opts.subcommand[0] if subcommand == "help": self.cli.optparser.print_help(self) return 0 if subcommand == "list": if self.opts.available_by_user: self._list_user_projects(self.opts.available_by_user) return else: self._list_installed_repositories(self.base.conf.reposdir[0], self.opts.enabled, self.opts.disabled) return try: project_name = self.opts.arg[0] except (ValueError, IndexError): logger.critical( _('Error: ') + _('exactly two additional parameters to ' 'copr command are required')) self.cli.optparser.print_help(self) raise dnf.cli.CliError( _('exactly two additional parameters to ' 'copr command are required')) try: chroot = self.opts.arg[1] if len(self.opts.arg) > 2: raise dnf.exceptions.Error(_('Too many arguments.')) self.chroot_parts = chroot.split("-") if len(self.chroot_parts) < 3: raise dnf.exceptions.Error(_('Bad format of optional chroot. The format is ' 'distribution-version-architecture.')) except IndexError: chroot = self._guess_chroot() self.chroot_parts = chroot.split("-") # commands without defined copr_username/copr_projectname if subcommand == "search": self._search(project_name) return project = project_name.split("/") if len(project) not in [2, 3]: logger.critical( _('Error: ') + _('use format `copr_username/copr_projectname` ' 'to reference copr project')) raise dnf.cli.CliError(_('bad copr project format')) elif len(project) == 2: copr_username = project[0] copr_projectname = project[1] else: copr_username = project[1] copr_projectname = project[2] project_name = copr_username + "/" + copr_projectname repo_filename = "{0}/_copr:{1}:{2}:{3}.repo".format( self.base.conf.get_reposdir, self.copr_hostname, self._sanitize_username(copr_username), copr_projectname) if subcommand == "enable": self._need_root() info = _(""" Enabling a Copr repository. Please note that this repository is not part of the main distribution, and quality may vary. The Fedora Project does not exercise any power over the contents of this repository beyond the rules outlined in the Copr FAQ at , and packages are not held to any quality or security level. Please do not file bug reports about these packages in Fedora Bugzilla. In case of problems, contact the owner of this repository. """) project = '/'.join([self.copr_hostname, copr_username, copr_projectname]) msg = "Do you really want to enable {0}?".format(project) self._ask_user(info, msg) self._download_repo(project_name, repo_filename) logger.info(_("Repository successfully enabled.")) self._runtime_deps_warning(copr_username, copr_projectname) elif subcommand == "disable": self._need_root() self._disable_repo(copr_username, copr_projectname) logger.info(_("Repository successfully disabled.")) elif subcommand == "remove": self._need_root() self._remove_repo(copr_username, copr_projectname) logger.info(_("Repository successfully removed.")) else: raise dnf.exceptions.Error( _('Unknown subcommand {}.').format(subcommand)) def _list_repo_file(self, repo_id, repo, enabled_only, disabled_only): file_name = repo.repofile.split('/')[-1] match_new = re.match("_copr:" + self.copr_hostname, file_name) match_old = self.copr_url == self.default_url and re.match("_copr_", file_name) match_any = re.match("_copr:|^_copr_", file_name) if self.opts.hub: if not match_new and not match_old: return elif not match_any: return if re.match('copr:.*:.*:.*:ml', repo_id): # We skip multilib repositories return if re.match('coprdep:.*', repo_id): # Runtime dependencies are not listed. return enabled = repo.enabled if (enabled and disabled_only) or (not enabled and enabled_only): return old_repo = False # repo ID has copr::: format, while # can contain more colons if re.match("copr:", repo_id): _, copr_hostname, copr_owner, copr_dir = repo_id.split(':', 3) msg = copr_hostname + '/' + copr_owner + "/" + copr_dir # repo ID has - format, try to get hub from file name elif re.match("_copr:", file_name): copr_name = repo_id.split('-', 1) copr_hostname = file_name.rsplit(':', 2)[0].split(':', 1)[1] msg = copr_hostname + '/' + copr_name[0] + '/' + copr_name[1] # no information about hub, assume the default one else: copr_name = repo_id.split('-', 1) msg = self.default_hostname + '/' + copr_name[0] + '/' + copr_name[1] old_repo = True if not enabled: msg += " (disabled)" if old_repo: msg += " *" print(msg) return old_repo def _list_installed_repositories(self, directory, enabled_only, disabled_only): old_repo = False for repo_id, repo in self.base.repos.items(): if self._list_repo_file(repo_id, repo, enabled_only, disabled_only): old_repo = True if old_repo: print(_("* These coprs have repo file with an old format that contains " "no information about Copr hub - the default one was assumed. " "Re-enable the project to fix this.")) def _list_user_projects(self, user_name): # https://copr.fedorainfracloud.org/api_3/project/list?ownername=ignatenkobrain api_path = "/api_3/project/list?ownername={0}".format(user_name) url = self.copr_url + api_path res = self.base.urlopen(url, mode='w+') try: json_parse = json.loads(res.read()) except ValueError: raise dnf.exceptions.Error( _("Can't parse repositories for username '{}'.") .format(user_name)) self._check_json_output(json_parse) section_text = _("List of {} coprs").format(user_name) self._print_match_section(section_text) for item in json_parse["items"]: msg = "{0}/{1} : ".format(user_name, item["name"]) desc = item["description"] or _("No description given") msg = self.base.output.fmtKeyValFill(ucd(msg), desc) print(msg) def _search(self, query): # https://copr.fedorainfracloud.org/api_3/project/search?query=tests api_path = "/api_3/project/search?query={}".format(query) url = self.copr_url + api_path res = self.base.urlopen(url, mode='w+') try: json_parse = json.loads(res.read()) except ValueError: raise dnf.exceptions.Error(_("Can't parse search for '{}'." ).format(query)) self._check_json_output(json_parse) section_text = _("Matched: {}").format(query) self._print_match_section(section_text) for item in json_parse["items"]: msg = "{0} : ".format(item["full_name"]) desc = item["description"] or _("No description given.") msg = self.base.output.fmtKeyValFill(ucd(msg), desc) print(msg) def _print_match_section(self, text): formatted = self.base.output.fmtSection(text) print(formatted) def _ask_user_no_raise(self, info, msg): if not self.first_warning: sys.stderr.write("\n") self.first_warning = False sys.stderr.write("{0}\n".format(info.strip())) if self.base._promptWanted(): if self.base.conf.assumeno or not self.base.output.userconfirm( msg='\n{} [y/N]: '.format(msg), defaultyes_msg='\n{} [Y/n]: '.format(msg)): return False return True def _ask_user(self, info, msg): if not self._ask_user_no_raise(info, msg): raise dnf.exceptions.Error(_('Safe and good answer. Exiting.')) @classmethod def _need_root(cls): # FIXME this should do dnf itself (BZ#1062889) if os.geteuid() != 0: raise dnf.exceptions.Error( _('This command has to be run under the root user.')) def _guess_chroot(self): """ Guess which chroot is equivalent to this machine """ # FIXME Copr should generate non-specific arch repo dist = self.chroot_config if dist is None or (dist[0] is False) or (dist[1] is False): dist = linux_distribution() # Get distribution architecture distarch = self.base.conf.substitutions['basearch'] if any([name in dist for name in ["Fedora", "Fedora Linux"]]): if "Rawhide" in dist: chroot = ("fedora-rawhide-" + distarch) # workaround for enabling repos in Rawhide when VERSION in os-release # contains a name other than Rawhide elif "rawhide" in os_release_attr("redhat_support_product_version"): chroot = ("fedora-rawhide-" + distarch) else: chroot = ("fedora-{0}-{1}".format(dist[1], distarch)) elif "Mageia" in dist: # Get distribution architecture (Mageia does not use $basearch) distarch = rpm.expandMacro("%{distro_arch}") # Set the chroot if "Cauldron" in dist: chroot = ("mageia-cauldron-{}".format(distarch)) else: chroot = ("mageia-{0}-{1}".format(dist[1], distarch)) elif "openSUSE" in dist: # Get distribution architecture (openSUSE does not use $basearch) distarch = rpm.expandMacro("%{_target_cpu}") # Set the chroot if "Tumbleweed" in dist: chroot = ("opensuse-tumbleweed-{}".format(distarch)) else: chroot = ("opensuse-leap-{0}-{1}".format(dist[1], distarch)) else: chroot = ("epel-%s-x86_64" % dist[1].split(".", 1)[0]) return chroot def _download_repo(self, project_name, repo_filename): short_chroot = '-'.join(self.chroot_parts[:-1]) arch = self.chroot_parts[-1] api_path = "/coprs/{0}/repo/{1}/dnf.repo?arch={2}".format(project_name, short_chroot, arch) try: response = urlopen(self.copr_url + api_path) if os.path.exists(repo_filename): os.remove(repo_filename) except HTTPError as e: if e.code != 404: error_msg = _("Request to {0} failed: {1} - {2}").format(self.copr_url + api_path, e.code, str(e)) raise dnf.exceptions.Error(error_msg) error_msg = _("It wasn't possible to enable this project.\n") error_data = e.headers.get("Copr-Error-Data") if error_data: error_data_decoded = base64.b64decode(error_data).decode('utf-8') error_data_decoded = json.loads(error_data_decoded) error_msg += _("Repository '{0}' does not exist in project '{1}'.").format( '-'.join(self.chroot_parts), project_name) if error_data_decoded.get("available chroots"): error_msg += _("\nAvailable repositories: ") + ', '.join( "'{}'".format(x) for x in error_data_decoded["available chroots"]) error_msg += _("\n\nIf you want to enable a non-default repository, use the following command:\n" " 'dnf copr enable {0} '\n" "But note that the installed repo file will likely need a manual " "modification.").format(project_name) raise dnf.exceptions.Error(error_msg) else: error_msg += _("Project {0} does not exist.").format(project_name) raise dnf.exceptions.Error(error_msg) except URLError as e: error_msg = _("Failed to connect to {0}: {1}").format(self.copr_url + api_path, e.reason.strerror) raise dnf.exceptions.Error(error_msg) # Try to read the first line, and detect the repo_filename from that (override the repo_filename value). first_line = response.readline() line = first_line.decode("utf-8") if re.match(r"\[copr:", line): repo_filename = os.path.join(self.base.conf.get_reposdir, "_" + line[1:-2] + ".repo") # if using default hub, remove possible old repofile if self.copr_url == self.default_url: # copr:hub:user:project.repo => _copr_user_project.repo old_repo_filename = repo_filename.replace("_copr:", "_copr", 1)\ .replace(self.copr_hostname, "").replace(":", "_", 1).replace(":", "-")\ .replace("group_", "@") if os.path.exists(old_repo_filename): os.remove(old_repo_filename) with open(repo_filename, 'wb') as f: f.write(first_line) for line in response.readlines(): f.write(line) os.chmod(repo_filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH) def _runtime_deps_warning(self, copr_username, copr_projectname): """ In addition to the main copr repo (that has repo ID prefixed with `copr:`), the repofile might contain additional repositories that serve as runtime dependencies. This method informs the user about the additional repos and provides an option to disable them. """ self.base.reset(repos=True) self.base.read_all_repos() repo = self._get_copr_repo(self._sanitize_username(copr_username), copr_projectname) runtime_deps = [] for repo_id in repo.cfg.sections(): if repo_id.startswith("copr:"): continue runtime_deps.append(repo_id) if not runtime_deps: return info = _( "Maintainer of the enabled Copr repository decided to make\n" "it dependent on other repositories. Such repositories are\n" "usually necessary for successful installation of RPMs from\n" "the main Copr repository (they provide runtime dependencies).\n\n" "Be aware that the note about quality and bug-reporting\n" "above applies here too, Fedora Project doesn't control the\n" "content. Please review the list:\n\n" "{0}\n\n" "These repositories have been enabled automatically." ) counter = itertools.count(1) info = info.format("\n\n".join([ "{num:2}. [{repoid}]\n baseurl={baseurl}".format( num=next(counter), repoid=repoid, baseurl=repo.cfg.getValue(repoid, "baseurl")) for repoid in runtime_deps ])) if not self._ask_user_no_raise(info, _("Do you want to keep them enabled?")): for dep in runtime_deps: self.base.conf.write_raw_configfile(repo.repofile, dep, self.base.conf.substitutions, {"enabled": "0"}) def _get_copr_repo(self, copr_username, copr_projectname): repo_id = "copr:{0}:{1}:{2}".format(self.copr_hostname.rsplit(':', 1)[0], self._sanitize_username(copr_username), copr_projectname) if repo_id not in self.base.repos: # check if there is a repo with old ID format repo_id = repo_id = "{0}-{1}".format(self._sanitize_username(copr_username), copr_projectname) if repo_id in self.base.repos and "_copr" in self.base.repos[repo_id].repofile: file_name = self.base.repos[repo_id].repofile.split('/')[-1] try: copr_hostname = file_name.rsplit(':', 2)[0].split(':', 1)[1] if copr_hostname != self.copr_hostname: return None except IndexError: # old filename format without hostname pass else: return None return self.base.repos[repo_id] def _remove_repo(self, copr_username, copr_projectname): # FIXME is it Copr repo ? repo = self._get_copr_repo(copr_username, copr_projectname) if not repo: raise dnf.exceptions.Error( _("Failed to remove copr repo {0}/{1}/{2}" .format(self.copr_hostname, copr_username, copr_projectname))) try: os.remove(repo.repofile) except OSError as e: raise dnf.exceptions.Error(str(e)) def _disable_repo(self, copr_username, copr_projectname): repo = self._get_copr_repo(copr_username, copr_projectname) if repo is None: raise dnf.exceptions.Error( _("Failed to disable copr repo {}/{}" .format(copr_username, copr_projectname))) # disable all repos provided by the repo file for repo_id in repo.cfg.sections(): self.base.conf.write_raw_configfile(repo.repofile, repo_id, self.base.conf.substitutions, {"enabled": "0"}) @classmethod def _get_data(cls, f): """ Wrapper around response from server check data and print nice error in case of some error (and return None) otherwise return json object. """ try: output = json.loads(f.read()) except ValueError: dnf.cli.CliError(_("Unknown response from server.")) return return output @classmethod def _check_json_output(cls, json_obj): if "error" in json_obj: raise dnf.exceptions.Error("{}".format(json_obj["error"])) @classmethod def _sanitize_username(cls, copr_username): if copr_username[0] == "@": return "group_{}".format(copr_username[1:]) else: return copr_username @dnf.plugin.register_command class PlaygroundCommand(CoprCommand): """ Playground plugin for DNF """ aliases = ("playground",) summary = _("Interact with Playground repository.") usage = " [enable|disable|upgrade]" def _cmd_enable(self, chroot): self._need_root() self._ask_user( _("Enabling a Playground repository."), _("Do you want to continue?"), ) api_url = "{0}/api/playground/list/".format( self.copr_url) f = self.base.urlopen(api_url, mode="w+") output = self._get_data(f) f.close() if output["output"] != "ok": raise dnf.cli.CliError(_("Unknown response from server.")) for repo in output["repos"]: project_name = "{0}/{1}".format(repo["username"], repo["coprname"]) repo_filename = "{}/_playground_{}.repo".format(self.base.conf.get_reposdir, project_name.replace("/", "-")) try: if chroot not in repo["chroots"]: continue api_url = "{0}/api/coprs/{1}/detail/{2}/".format( self.copr_url, project_name, chroot) f = self.base.urlopen(api_url, mode='w+') output2 = self._get_data(f) f.close() if (output2 and ("output" in output2) and (output2["output"] == "ok")): self._download_repo(project_name, repo_filename) except dnf.exceptions.Error: # likely 404 and that repo does not exist pass def _cmd_disable(self): self._need_root() for repo_filename in glob.glob("{}/_playground_*.repo".format(self.base.conf.get_reposdir)): self._remove_repo(repo_filename) @staticmethod def set_argparser(parser): parser.add_argument('subcommand', nargs=1, choices=['enable', 'disable', 'upgrade']) def run(self): raise dnf.exceptions.Error("Playground is temporarily unsupported") subcommand = self.opts.subcommand[0] chroot = self._guess_chroot() if subcommand == "enable": self._cmd_enable(chroot) logger.info(_("Playground repositories successfully enabled.")) elif subcommand == "disable": self._cmd_disable() logger.info(_("Playground repositories successfully disabled.")) elif subcommand == "upgrade": self._cmd_disable() self._cmd_enable(chroot) logger.info(_("Playground repositories successfully updated.")) else: raise dnf.exceptions.Error( _('Unknown subcommand {}.').format(subcommand)) groups_manager.py000064400000032334151030231510010123 0ustar00# groups_manager.py # DNF plugin for managing comps groups metadata files # # Copyright (C) 2020 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import argparse import gzip import libcomps import os import re import shutil import tempfile from dnfpluginscore import _, logger import dnf import dnf.cli RE_GROUP_ID_VALID = '-a-z0-9_.:' RE_GROUP_ID = re.compile(r'^[{}]+$'.format(RE_GROUP_ID_VALID)) RE_LANG = re.compile(r'^[-a-zA-Z0-9_.@]+$') COMPS_XML_OPTIONS = { 'default_explicit': True, 'uservisible_explicit': True, 'empty_groups': True} def group_id_type(value): '''group id validator''' if not RE_GROUP_ID.match(value): raise argparse.ArgumentTypeError(_('Invalid group id')) return value def translation_type(value): '''translated texts validator''' data = value.split(':', 2) if len(data) != 2: raise argparse.ArgumentTypeError( _("Invalid translated data, should be in form 'lang:text'")) lang, text = data if not RE_LANG.match(lang): raise argparse.ArgumentTypeError(_('Invalid/empty language for translated data')) return lang, text def text_to_id(text): '''generate group id based on its name''' group_id = text.lower() group_id = re.sub('[^{}]'.format(RE_GROUP_ID_VALID), '', group_id) if not group_id: raise dnf.cli.CliError( _("Can't generate group id from '{}'. Please specify group id using --id.").format( text)) return group_id @dnf.plugin.register_command class GroupsManagerCommand(dnf.cli.Command): aliases = ('groups-manager',) summary = _('create and edit groups metadata file') def __init__(self, cli): super(GroupsManagerCommand, self).__init__(cli) self.comps = libcomps.Comps() @staticmethod def set_argparser(parser): # input / output options parser.add_argument('--load', action='append', default=[], metavar='COMPS.XML', help=_('load groups metadata from file')) parser.add_argument('--save', action='append', default=[], metavar='COMPS.XML', help=_('save groups metadata to file')) parser.add_argument('--merge', metavar='COMPS.XML', help=_('load and save groups metadata to file')) parser.add_argument('--print', action='store_true', default=False, help=_('print the result metadata to stdout')) # group options parser.add_argument('--id', type=group_id_type, help=_('group id')) parser.add_argument('-n', '--name', help=_('group name')) parser.add_argument('--description', help=_('group description')) parser.add_argument('--display-order', type=int, help=_('group display order')) parser.add_argument('--translated-name', action='append', default=[], metavar='LANG:TEXT', type=translation_type, help=_('translated name for the group')) parser.add_argument('--translated-description', action='append', default=[], metavar='LANG:TEXT', type=translation_type, help=_('translated description for the group')) visible = parser.add_mutually_exclusive_group() visible.add_argument('--user-visible', dest='user_visible', action='store_true', default=None, help=_('make the group user visible (default)')) visible.add_argument('--not-user-visible', dest='user_visible', action='store_false', default=None, help=_('make the group user invisible')) # package list options section = parser.add_mutually_exclusive_group() section.add_argument('--mandatory', action='store_true', help=_('add packages to the mandatory section')) section.add_argument('--optional', action='store_true', help=_('add packages to the optional section')) section.add_argument('--remove', action='store_true', default=False, help=_('remove packages from the group instead of adding them')) parser.add_argument('--dependencies', action='store_true', help=_('include also direct dependencies for packages')) parser.add_argument("packages", nargs='*', metavar='PACKAGE', help=_('package specification')) def configure(self): demands = self.cli.demands if self.opts.packages: demands.sack_activation = True demands.available_repos = True demands.load_system_repo = False # handle --merge option (shortcut to --load and --save the same file) if self.opts.merge: self.opts.load.insert(0, self.opts.merge) self.opts.save.append(self.opts.merge) # check that group is specified when editing is attempted if (self.opts.description or self.opts.display_order or self.opts.translated_name or self.opts.translated_description or self.opts.user_visible is not None or self.opts.packages): if not self.opts.id and not self.opts.name: raise dnf.cli.CliError( _("Can't edit group without specifying it (use --id or --name)")) def load_input_files(self): """ Loads all input xml files. Returns True if at least one file was successfuly loaded """ for file_name in self.opts.load: file_comps = libcomps.Comps() try: if file_name.endswith('.gz'): # libcomps does not support gzipped files - decompress to temporary # location with gzip.open(file_name) as gz_file: temp_file = tempfile.NamedTemporaryFile(delete=False) try: shutil.copyfileobj(gz_file, temp_file) # close temp_file to ensure the content is flushed to disk temp_file.close() file_comps.fromxml_f(temp_file.name) finally: os.unlink(temp_file.name) else: file_comps.fromxml_f(file_name) except (IOError, OSError, libcomps.ParserError) as err: # gzip module raises OSError on reading from malformed gz file # get_last_errors() output often contains duplicit lines, remove them seen = set() for error in file_comps.get_last_errors(): if error in seen: continue logger.error(error.strip()) seen.add(error) raise dnf.exceptions.Error( _("Can't load file \"{}\": {}").format(file_name, err)) else: self.comps += file_comps def save_output_files(self): for file_name in self.opts.save: try: # xml_f returns a list of errors / log entries errors = self.comps.xml_f(file_name, xml_options=COMPS_XML_OPTIONS) except libcomps.XMLGenError as err: errors = [err] if errors: # xml_f() method could return more than one error. In this case # raise the latest of them and log the others. for err in errors[:-1]: logger.error(err.strip()) raise dnf.exceptions.Error(_("Can't save file \"{}\": {}").format( file_name, errors[-1].strip())) def find_group(self, group_id, name): ''' Try to find group according to command line parameters - first by id then by name. ''' group = None if group_id: for grp in self.comps.groups: if grp.id == group_id: group = grp break if group is None and name: for grp in self.comps.groups: if grp.name == name: group = grp break return group def edit_group(self, group): ''' Set attributes and package lists for selected group ''' def langlist_to_strdict(lst): str_dict = libcomps.StrDict() for lang, text in lst: str_dict[lang] = text return str_dict # set group attributes if self.opts.name: group.name = self.opts.name if self.opts.description: group.desc = self.opts.description if self.opts.display_order: group.display_order = self.opts.display_order if self.opts.user_visible is not None: group.uservisible = self.opts.user_visible if self.opts.translated_name: group.name_by_lang = langlist_to_strdict(self.opts.translated_name) if self.opts.translated_description: group.desc_by_lang = langlist_to_strdict(self.opts.translated_description) # edit packages list if self.opts.packages: # find packages according to specifications from command line packages = set() for pkg_spec in self.opts.packages: subj = dnf.subject.Subject(pkg_spec) q = subj.get_best_query(self.base.sack, with_nevra=True, with_provides=False, with_filenames=False).latest() if not q: logger.warning(_("No match for argument: {}").format(pkg_spec)) continue packages.update(q) if self.opts.dependencies: # add packages that provide requirements requirements = set() for pkg in packages: requirements.update(pkg.requires) packages.update(self.base.sack.query().filterm(provides=requirements)) pkg_names = {pkg.name for pkg in packages} if self.opts.remove: for pkg_name in pkg_names: for pkg in group.packages_match(name=pkg_name, type=libcomps.PACKAGE_TYPE_UNKNOWN): group.packages.remove(pkg) else: if self.opts.mandatory: pkg_type = libcomps.PACKAGE_TYPE_MANDATORY elif self.opts.optional: pkg_type = libcomps.PACKAGE_TYPE_OPTIONAL else: pkg_type = libcomps.PACKAGE_TYPE_DEFAULT for pkg_name in sorted(pkg_names): if not group.packages_match(name=pkg_name, type=pkg_type): group.packages.append(libcomps.Package(name=pkg_name, type=pkg_type)) def run(self): self.load_input_files() if self.opts.id or self.opts.name: # we are adding / editing a group group = self.find_group(group_id=self.opts.id, name=self.opts.name) if group is None: # create a new group if self.opts.remove: raise dnf.exceptions.Error(_("Can't remove packages from non-existent group")) group = libcomps.Group() if self.opts.id: group.id = self.opts.id group.name = self.opts.id elif self.opts.name: group_id = text_to_id(self.opts.name) if self.find_group(group_id=group_id, name=None): raise dnf.cli.CliError( _("Group id '{}' generated from '{}' is duplicit. " "Please specify group id using --id.").format( group_id, self.opts.name)) group.id = group_id self.comps.groups.append(group) self.edit_group(group) self.save_output_files() if self.opts.print or (not self.opts.save): print(self.comps.xml_str(xml_options=COMPS_XML_OPTIONS)) reposync.py000064400000034470151030231510006757 0ustar00# reposync.py # DNF plugin adding a command to download all packages from given remote repo. # # Copyright (C) 2014 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import hawkey import os import shutil import types from dnfpluginscore import _, logger from dnf.cli.option_parser import OptionParser import dnf import dnf.cli def _pkgdir(intermediate, target): cwd = dnf.i18n.ucd(os.getcwd()) return os.path.realpath(os.path.join(cwd, intermediate, target)) class RPMPayloadLocation(dnf.repo.RPMPayload): def __init__(self, pkg, progress, pkg_location): super(RPMPayloadLocation, self).__init__(pkg, progress) self.package_dir = os.path.dirname(pkg_location) def _target_params(self): tp = super(RPMPayloadLocation, self)._target_params() dnf.util.ensure_dir(self.package_dir) tp['dest'] = self.package_dir return tp @dnf.plugin.register_command class RepoSyncCommand(dnf.cli.Command): aliases = ('reposync',) summary = _('download all packages from remote repo') def __init__(self, cli): super(RepoSyncCommand, self).__init__(cli) @staticmethod def set_argparser(parser): parser.add_argument('-a', '--arch', dest='arches', default=[], action=OptionParser._SplitCallback, metavar='[arch]', help=_('download only packages for this ARCH')) parser.add_argument('--delete', default=False, action='store_true', help=_('delete local packages no longer present in repository')) parser.add_argument('--download-metadata', default=False, action='store_true', help=_('download all the metadata.')) parser.add_argument('-g', '--gpgcheck', default=False, action='store_true', help=_('Remove packages that fail GPG signature checking ' 'after downloading')) parser.add_argument('-m', '--downloadcomps', default=False, action='store_true', help=_('also download and uncompress comps.xml')) parser.add_argument('--metadata-path', help=_('where to store downloaded repository metadata. ' 'Defaults to the value of --download-path.')) parser.add_argument('-n', '--newest-only', default=False, action='store_true', help=_('download only newest packages per-repo')) parser.add_argument('--norepopath', default=False, action='store_true', help=_("Don't add the reponame to the download path.")) parser.add_argument('-p', '--download-path', default='./', help=_('where to store downloaded repositories')) parser.add_argument('--remote-time', default=False, action='store_true', help=_('try to set local timestamps of local files by ' 'the one on the server')) parser.add_argument('--source', default=False, action='store_true', help=_('download only source packages')) parser.add_argument('-u', '--urls', default=False, action='store_true', help=_("Just list urls of what would be downloaded, " "don't download")) def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True repos = self.base.repos if self.opts.repo: repos.all().disable() for repoid in self.opts.repo: try: repo = repos[repoid] except KeyError: raise dnf.cli.CliError("Unknown repo: '%s'." % repoid) repo.enable() if self.opts.source: repos.enable_source_repos() if len(list(repos.iter_enabled())) > 1 and self.opts.norepopath: raise dnf.cli.CliError( _("Can't use --norepopath with multiple repositories")) for repo in repos.iter_enabled(): repo._repo.expire() repo.deltarpm = False def run(self): self.base.conf.keepcache = True gpgcheck_ok = True for repo in self.base.repos.iter_enabled(): if self.opts.remote_time: repo._repo.setPreserveRemoteTime(True) if self.opts.download_metadata: if self.opts.urls: for md_type, md_location in repo._repo.getMetadataLocations(): url = repo.remote_location(md_location) if url: print(url) else: msg = _("Failed to get mirror for metadata: %s") % md_type logger.warning(msg) else: self.download_metadata(repo) if self.opts.downloadcomps: if self.opts.urls: mdl = dict(repo._repo.getMetadataLocations()) group_locations = [mdl[md_type] for md_type in ('group', 'group_gz', 'group_gz_zck') if md_type in mdl] if group_locations: for group_location in group_locations: url = repo.remote_location(group_location) if url: print(url) break else: msg = _("Failed to get mirror for the group file.") logger.warning(msg) else: self.getcomps(repo) pkglist = self.get_pkglist(repo) if self.opts.urls: self.print_urls(pkglist) else: self.download_packages(pkglist) if self.opts.gpgcheck: for pkg in pkglist: local_path = self.pkg_download_path(pkg) # base.package_signature_check uses pkg.localPkg() to determine # the location of the package rpm file on the disk. # Set it to the correct download path. pkg.localPkg = types.MethodType( lambda s, local_path=local_path: local_path, pkg) result, error = self.base.package_signature_check(pkg) if result != 0: logger.warning(_("Removing {}: {}").format( os.path.basename(local_path), error)) os.unlink(local_path) gpgcheck_ok = False if self.opts.delete: self.delete_old_local_packages(repo, pkglist) if not gpgcheck_ok: raise dnf.exceptions.Error(_("GPG signature check failed.")) def repo_target(self, repo): return _pkgdir(self.opts.destdir or self.opts.download_path, repo.id if not self.opts.norepopath else '') def metadata_target(self, repo): if self.opts.metadata_path: return _pkgdir(self.opts.metadata_path, repo.id) else: return self.repo_target(repo) def pkg_download_path(self, pkg): repo_target = self.repo_target(pkg.repo) pkg_download_path = os.path.realpath( os.path.join(repo_target, pkg.location)) # join() ensures repo_target ends with a path separator (otherwise the # check would pass if pkg_download_path was a "sibling" path component # of repo_target that has the same prefix). if not pkg_download_path.startswith(os.path.join(repo_target, '')): raise dnf.exceptions.Error( _("Download target '{}' is outside of download path '{}'.").format( pkg_download_path, repo_target)) return pkg_download_path def delete_old_local_packages(self, repo, pkglist): # delete any *.rpm file under target path, that was not downloaded from repository downloaded_files = set(self.pkg_download_path(pkg) for pkg in pkglist) for dirpath, dirnames, filenames in os.walk(self.repo_target(repo)): for filename in filenames: path = os.path.join(dirpath, filename) if filename.endswith('.rpm') and os.path.isfile(path): if path not in downloaded_files: # Delete disappeared or relocated file try: os.unlink(path) logger.info(_("[DELETED] %s"), path) except OSError: logger.error(_("failed to delete file %s"), path) def getcomps(self, repo): comps_fn = repo._repo.getCompsFn() if comps_fn: dest_path = self.metadata_target(repo) dnf.util.ensure_dir(dest_path) dest = os.path.join(dest_path, 'comps.xml') dnf.yum.misc.decompress(comps_fn, dest=dest) logger.info(_("comps.xml for repository %s saved"), repo.id) def download_metadata(self, repo): repo_target = self.metadata_target(repo) repo._repo.downloadMetadata(repo_target) return True def _get_latest(self, query): """ return union of these queries: - the latest NEVRAs from non-modular packages - all packages from stream version with the latest package NEVRA (this should not be needed but the latest package NEVRAs might be part of an older module version) - all packages from the latest stream version """ if not dnf.base.WITH_MODULES: return query.latest() query.apply() module_packages = self.base._moduleContainer.getModulePackages() all_artifacts = set() module_dict = {} # {NameStream: {Version: [modules]}} artifact_version = {} # {artifact: {NameStream: [Version]}} for module_package in module_packages: artifacts = module_package.getArtifacts() all_artifacts.update(artifacts) module_dict.setdefault(module_package.getNameStream(), {}).setdefault( module_package.getVersionNum(), []).append(module_package) for artifact in artifacts: artifact_version.setdefault(artifact, {}).setdefault( module_package.getNameStream(), []).append(module_package.getVersionNum()) # the latest NEVRAs from non-modular packages latest_query = query.filter( pkg__neq=query.filter(nevra_strict=all_artifacts)).latest() # artifacts from the newest version and those versions that contain an artifact # with the highest NEVRA latest_stream_artifacts = set() for namestream, version_dict in module_dict.items(): # versions that will be synchronized versions = set() # add the newest stream version versions.add(sorted(version_dict.keys(), reverse=True)[0]) # collect all artifacts in all stream versions stream_artifacts = set() for modules in version_dict.values(): for module in modules: stream_artifacts.update(module.getArtifacts()) # find versions to which the packages with the highest NEVRAs belong for latest_pkg in query.filter(nevra_strict=stream_artifacts).latest(): # here we depend on modules.yaml allways containing full NEVRA (including epoch) nevra = "{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch}".format(latest_pkg) # download only highest version containing the latest artifact versions.add(max(artifact_version[nevra][namestream])) # add all artifacts from selected versions for synchronization for version in versions: for module in version_dict[version]: latest_stream_artifacts.update(module.getArtifacts()) latest_query = latest_query.union(query.filter(nevra_strict=latest_stream_artifacts)) return latest_query def get_pkglist(self, repo): query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available().filterm( reponame=repo.id) if self.opts.newest_only: query = self._get_latest(query) if self.opts.source: query.filterm(arch='src') elif self.opts.arches: query.filterm(arch=self.opts.arches) return query def download_packages(self, pkglist): base = self.base progress = base.output.progress if progress is None: progress = dnf.callback.NullDownloadProgress() drpm = dnf.drpm.DeltaInfo(base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).installed(), progress, 0) payloads = [RPMPayloadLocation(pkg, progress, self.pkg_download_path(pkg)) for pkg in pkglist] base._download_remote_payloads(payloads, drpm, progress, None, False) def print_urls(self, pkglist): for pkg in pkglist: url = pkg.remote_location() if url: print(url) else: msg = _("Failed to get mirror for package: %s") % pkg.name logger.warning(msg) repograph.py000064400000007774151030231510007113 0ustar00# repograph.py # DNF plugin adding a command to Output a full package dependency graph in dot # format. # # Copyright (C) 2015 Igor Gnatenko # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _, logger import dnf.cli DOT_HEADER = """ size="20.69,25.52"; ratio="fill"; rankdir="TB"; orientation=port; node[style="filled"]; """ class RepoGraph(dnf.Plugin): name = "repograph" def __init__(self, base, cli): super(RepoGraph, self).__init__(base, cli) if cli is None: return cli.register_command(RepoGraphCommand) class RepoGraphCommand(dnf.cli.Command): aliases = ("repograph", "repo-graph",) summary = _("Output a full package dependency graph in dot format") def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True if self.opts.repo: for repo in self.base.repos.all(): if repo.id not in self.opts.repo: repo.disable() else: repo.enable() def run(self): self.do_dot(DOT_HEADER) def do_dot(self, header): maxdeps = 0 deps = self._get_deps(self.base.sack) print("digraph packages {") print("{}".format(header)) for pkg in deps.keys(): if len(deps[pkg]) > maxdeps: maxdeps = len(deps[pkg]) # color calculations lifted from rpmgraph h = 0.5 + (0.6 / 23 * len(deps[pkg])) s = h + 0.1 b = 1.0 print('"{}" [color="{:.12g} {:.12g} {}"];'.format(pkg, h, s, b)) print('"{}" -> {{'.format(pkg)) for req in deps[pkg]: print('"{}"'.format(req)) print('}} [color="{:.12g} {:.12g} {}"];\n'.format(h, s, b)) print("}") @staticmethod def _get_deps(sack): requires = {} prov = {} skip = [] available = sack.query().available() for pkg in available: xx = {} for req in pkg.requires: reqname = str(req) if reqname in skip: continue # XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721 if reqname.startswith("solvable:"): continue if reqname in prov: provider = prov[reqname] else: provider = available.filter(provides=reqname) if not provider: logger.debug(_("Nothing provides: '%s'"), reqname) skip.append(reqname) continue else: provider = provider[0].name prov[reqname] = provider if provider == pkg.name: xx[provider] = None if provider in xx or provider in skip: continue else: xx[provider] = None requires[pkg.name] = xx.keys() return requires repomanage.py000064400000024512151030231510007227 0ustar00# repomanage.py # DNF plugin adding a command to manage rpm packages from given directory. # # Copyright (C) 2015 Igor Gnatenko # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _, logger import dnf import dnf.cli import logging import os import hawkey class RepoManage(dnf.Plugin): name = "repomanage" def __init__(self, base, cli): super(RepoManage, self).__init__(base, cli) if cli is None: return cli.register_command(RepoManageCommand) class RepoManageCommand(dnf.cli.Command): aliases = ("repomanage",) summary = _("Manage a directory of rpm packages") def pre_configure(self): if not self.opts.verbose and not self.opts.quiet: self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO) def configure(self): if not self.opts.verbose and not self.opts.quiet: self.cli.redirect_repo_progress() demands = self.cli.demands demands.sack_activation = True def run(self): if self.opts.new and self.opts.old: raise dnf.exceptions.Error(_("Pass either --old or --new, not both!")) if self.opts.new and self.opts.oldonly: raise dnf.exceptions.Error(_("Pass either --oldonly or --new, not both!")) if self.opts.old and self.opts.oldonly: raise dnf.exceptions.Error(_("Pass either --old or --oldonly, not both!")) if not self.opts.old and not self.opts.oldonly: self.opts.new = True verfile = {} pkgdict = {} module_dict = {} # {NameStream: {Version: [modules]}} all_modular_artifacts = set() keepnum = int(self.opts.keep) # the number of items to keep try: REPOMANAGE_REPOID = "repomanage_repo" repo_conf = self.base.repos.add_new_repo(REPOMANAGE_REPOID, self.base.conf, baseurl=[self.opts.path]) # Always expire the repo, otherwise repomanage could use cached metadata and give identical results # for multiple runs even if the actual repo changed in the meantime repo_conf._repo.expire() self.base._add_repo_to_sack(repo_conf) if dnf.base.WITH_MODULES: self.base._setup_modular_excludes() # Prepare modules module_packages = self.base._moduleContainer.getModulePackages() for module_package in module_packages: # Even though we load only REPOMANAGE_REPOID other modules can be loaded from system # failsafe data automatically, we don't want them affecting repomanage results so ONLY # use modules from REPOMANAGE_REPOID. if module_package.getRepoID() == REPOMANAGE_REPOID: all_modular_artifacts.update(module_package.getArtifacts()) module_dict.setdefault(module_package.getNameStream(), {}).setdefault( module_package.getVersionNum(), []).append(module_package) except dnf.exceptions.RepoError: rpm_list = [] rpm_list = self._get_file_list(self.opts.path, ".rpm") if len(rpm_list) == 0: raise dnf.exceptions.Error(_("No files to process")) self.base.reset(sack=True, repos=True) self.base.fill_sack(load_system_repo=False, load_available_repos=False) try: self.base.add_remote_rpms(rpm_list, progress=self.base.output.progress) except IOError: logger.warning(_("Could not open {}").format(', '.join(rpm_list))) # Prepare regular packages query = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES).available() packages = [x for x in query.filter(pkg__neq=query.filter(nevra_strict=all_modular_artifacts)).available()] packages.sort() for pkg in packages: na = (pkg.name, pkg.arch) if na in pkgdict: if pkg not in pkgdict[na]: pkgdict[na].append(pkg) else: pkgdict[na] = [pkg] nevra = self._package_to_nevra(pkg) if nevra in verfile: verfile[nevra].append(self._package_to_path(pkg)) else: verfile[nevra] = [self._package_to_path(pkg)] outputpackages = [] # modular packages keepnum_latest_stream_artifacts = set() if self.opts.new: # regular packages for (n, a) in pkgdict.keys(): evrlist = pkgdict[(n, a)] newevrs = evrlist[-keepnum:] for package in newevrs: nevra = self._package_to_nevra(package) for fpkg in verfile[nevra]: outputpackages.append(fpkg) # modular packages for streams_by_version in module_dict.values(): sorted_stream_versions = sorted(streams_by_version.keys()) new_sorted_stream_versions = sorted_stream_versions[-keepnum:] for i in new_sorted_stream_versions: for stream in streams_by_version[i]: keepnum_latest_stream_artifacts.update(set(stream.getArtifacts())) if self.opts.old: # regular packages for (n, a) in pkgdict.keys(): evrlist = pkgdict[(n, a)] oldevrs = evrlist[:-keepnum] for package in oldevrs: nevra = self._package_to_nevra(package) for fpkg in verfile[nevra]: outputpackages.append(fpkg) # modular packages for streams_by_version in module_dict.values(): sorted_stream_versions = sorted(streams_by_version.keys()) old_sorted_stream_versions = sorted_stream_versions[:-keepnum] for i in old_sorted_stream_versions: for stream in streams_by_version[i]: keepnum_latest_stream_artifacts.update(set(stream.getArtifacts())) if self.opts.oldonly: # regular packages for (n, a) in pkgdict.keys(): evrlist = pkgdict[(n, a)] oldevrs = evrlist[:-keepnum] for package in oldevrs: nevra = self._package_to_nevra(package) for fpkg in verfile[nevra]: outputpackages.append(fpkg) # modular packages keepnum_newer_stream_artifacts = set() for streams_by_version in module_dict.values(): sorted_stream_versions = sorted(streams_by_version.keys()) new_sorted_stream_versions = sorted_stream_versions[-keepnum:] for i in new_sorted_stream_versions: for stream in streams_by_version[i]: keepnum_newer_stream_artifacts.update(set(stream.getArtifacts())) for streams_by_version in module_dict.values(): sorted_stream_versions = sorted(streams_by_version.keys()) old_sorted_stream_versions = sorted_stream_versions[:-keepnum] for i in old_sorted_stream_versions: for stream in streams_by_version[i]: for artifact in stream.getArtifacts(): if artifact not in keepnum_newer_stream_artifacts: keepnum_latest_stream_artifacts.add(artifact) modular_packages = [self._package_to_path(x) for x in query.filter(pkg__eq=query.filter(nevra_strict=keepnum_latest_stream_artifacts)).available()] outputpackages = outputpackages + modular_packages outputpackages.sort() if self.opts.space: print(" ".join(outputpackages)) else: for pkg in outputpackages: print(pkg) @staticmethod def set_argparser(parser): parser.add_argument("-o", "--old", action="store_true", help=_("Print the older packages")) parser.add_argument("-O", "--oldonly", action="store_true", help=_("Print the older packages. Exclude the newest packages.")) parser.add_argument("-n", "--new", action="store_true", help=_("Print the newest packages")) parser.add_argument("-s", "--space", action="store_true", help=_("Space separated output, not newline")) parser.add_argument("-k", "--keep", action="store", metavar="KEEP", help=_("Newest N packages to keep - defaults to 1"), default=1, type=int) parser.add_argument("path", action="store", help=_("Path to directory")) @staticmethod def _get_file_list(path, ext): """Return all files in path matching ext return list object """ filelist = [] for root, dirs, files in os.walk(path): for f in files: if os.path.splitext(f)[1].lower() == str(ext): filelist.append(os.path.join(root, f)) return filelist def _package_to_path(self, pkg): if len(self.base.repos): return os.path.join(self.opts.path, pkg.location) else: return pkg.location @staticmethod def _package_to_nevra(pkg): return (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch) repoclosure.py000064400000015233151030231510007453 0ustar00# repoclosure.py # DNF plugin adding a command to display a list of unresolved dependencies # for repositories. # # Copyright (C) 2015 Igor Gnatenko # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _ import dnf.cli class RepoClosure(dnf.Plugin): name = "repoclosure" def __init__(self, base, cli): super(RepoClosure, self).__init__(base, cli) if cli is None: return cli.register_command(RepoClosureCommand) class RepoClosureCommand(dnf.cli.Command): aliases = ("repoclosure",) summary = _("Display a list of unresolved dependencies for repositories") def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True if self.opts.repo: for repo in self.base.repos.all(): if repo.id not in self.opts.repo and repo.id not in self.opts.check: repo.disable() else: repo.enable() def run(self): if self.opts.arches: unresolved = self._get_unresolved(self.opts.arches) else: unresolved = self._get_unresolved() for pkg in sorted(unresolved.keys()): print("package: {} from {}".format(str(pkg), pkg.reponame)) print(" unresolved deps:") for dep in unresolved[pkg]: print(" {}".format(dep)) if len(unresolved) > 0: msg = _("Repoclosure ended with unresolved dependencies.") raise dnf.exceptions.Error(msg) def _get_unresolved(self, arch=None): unresolved = {} deps = set() # We have two sets of packages, available and to_check: # * available is the set of packages used to satisfy dependencies # * to_check is the set of packages we are checking the dependencies of # # to_check can be a subset of available if the --arch, --best, --check, # --newest, or --pkg options are used # # --arch: only packages matching arch are checked # --best: available only contains the latest packages per arch across all repos # --check: only check packages in the specified repo(s) # --newest: only consider the latest versions of a package from each repo # --pkg: only check the specified packages # # Relationship of --best and --newest: # # Pkg Set | Neither | --best | --newest | --best and --newest | # available | all | latest in all repos | latest per repo | latest in all repos | # to_check | all | all | latest per repo | latest per repo | if self.opts.newest: available = self.base.sack.query().filter(empty=True) to_check = self.base.sack.query().filter(empty=True) for repo in self.base.repos.iter_enabled(): available = \ available.union(self.base.sack.query().filter(reponame=repo.id).latest()) to_check = \ to_check.union(self.base.sack.query().filter(reponame=repo.id).latest()) else: available = self.base.sack.query().available() to_check = self.base.sack.query().available() if self.opts.pkglist: pkglist_q = self.base.sack.query().filter(empty=True) errors = [] for pkg in self.opts.pkglist: subj = dnf.subject.Subject(pkg) pkg_q = to_check.intersection( subj.get_best_query(self.base.sack, with_nevra=True, with_provides=False, with_filenames=False)) if pkg_q: pkglist_q = pkglist_q.union(pkg_q) else: errors.append(pkg) if errors: raise dnf.exceptions.Error( _('no package matched: %s') % ', '.join(errors)) to_check = pkglist_q if self.opts.check: to_check.filterm(reponame=self.opts.check) if arch is not None: to_check.filterm(arch=arch) if self.base.conf.best: available.filterm(latest_per_arch=True) available.apply() to_check.apply() for pkg in to_check: unresolved[pkg] = set() for req in pkg.requires: reqname = str(req) # XXX: https://bugzilla.redhat.com/show_bug.cgi?id=1186721 if reqname.startswith("solvable:") or \ reqname.startswith("rpmlib("): continue deps.add(req) unresolved[pkg].add(req) unresolved_deps = set(x for x in deps if not available.filter(provides=x)) unresolved_transition = {k: set(x for x in v if x in unresolved_deps) for k, v in unresolved.items()} return {k: v for k, v in unresolved_transition.items() if v} @staticmethod def set_argparser(parser): parser.add_argument("--arch", default=[], action="append", dest='arches', help=_("check packages of the given archs, can be " "specified multiple times")) parser.add_argument("--check", default=[], action="append", help=_("Specify repositories to check")) parser.add_argument("-n", "--newest", action="store_true", help=_("Check only the newest packages in the " "repos")) parser.add_argument("--pkg", default=[], action="append", help=_("Check closure for this package only"), dest="pkglist") download.py000064400000030052151030231510006714 0ustar00# download.py, supplies the 'download' command. # # Copyright (C) 2013-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _, logger from dnf.cli.option_parser import OptionParser import dnf import dnf.cli import dnf.exceptions import dnf.i18n import dnf.subject import dnf.util import hawkey import itertools import os import shutil @dnf.plugin.register_command class DownloadCommand(dnf.cli.Command): aliases = ['download'] summary = _('Download package to current directory') def __init__(self, cli): super(DownloadCommand, self).__init__(cli) self.opts = None self.parser = None @staticmethod def set_argparser(parser): parser.add_argument('packages', nargs='+', help=_('packages to download')) parser.add_argument("--source", action='store_true', help=_('download the src.rpm instead')) parser.add_argument("--debuginfo", action='store_true', help=_('download the -debuginfo package instead')) parser.add_argument("--debugsource", action='store_true', help=_('download the -debugsource package instead')) parser.add_argument("--arch", '--archlist', dest='arches', default=[], action=OptionParser._SplitCallback, metavar='[arch]', help=_("limit the query to packages of given architectures.")) parser.add_argument('--resolve', action='store_true', help=_('resolve and download needed dependencies')) parser.add_argument('--alldeps', action='store_true', help=_('when running with --resolve, download all dependencies ' '(do not exclude already installed ones)')) parser.add_argument('--url', '--urls', action='store_true', dest='url', help=_('print list of urls where the rpms ' 'can be downloaded instead of downloading')) parser.add_argument('--urlprotocols', action='append', choices=['http', 'https', 'rsync', 'ftp'], default=[], help=_('when running with --url, ' 'limit to specific protocols')) def configure(self): # setup sack and populate it with enabled repos demands = self.cli.demands demands.sack_activation = True demands.available_repos = True if self.opts.resolve and self.opts.alldeps: demands.load_system_repo = False if self.opts.source: self.base.repos.enable_source_repos() if self.opts.debuginfo or self.opts.debugsource: self.base.repos.enable_debug_repos() if self.opts.destdir: self.base.conf.destdir = self.opts.destdir else: self.base.conf.destdir = dnf.i18n.ucd(os.getcwd()) def run(self): """Execute the util action here.""" if (not self.opts.source and not self.opts.debuginfo and not self.opts.debugsource): pkgs = self._get_pkg_objs_rpms(self.opts.packages) else: pkgs = [] if self.opts.source: pkgs.extend(self._get_pkg_objs_source(self.opts.packages)) if self.opts.debuginfo: pkgs.extend(self._get_pkg_objs_debuginfo(self.opts.packages)) if self.opts.debugsource: pkgs.extend(self._get_pkg_objs_debugsource(self.opts.packages)) # If user asked for just urls then print them and we're done if self.opts.url: for pkg in pkgs: # command line repo packages do not have .remote_location if pkg.repoid != hawkey.CMDLINE_REPO_NAME: url = pkg.remote_location(schemes=self.opts.urlprotocols) if url: print(url) else: msg = _("Failed to get mirror for package: %s") % pkg.name if self.base.conf.strict: raise dnf.exceptions.Error(msg) logger.warning(msg) return else: self._do_downloads(pkgs) # download rpms def _do_downloads(self, pkgs): """ Perform the download for a list of packages """ pkg_dict = {} for pkg in pkgs: pkg_dict.setdefault(str(pkg), []).append(pkg) to_download = [] cmdline = [] for pkg_list in pkg_dict.values(): pkgs_cmdline = [pkg for pkg in pkg_list if pkg.repoid == hawkey.CMDLINE_REPO_NAME] if pkgs_cmdline: cmdline.append(pkgs_cmdline[0]) continue pkg_list.sort(key=lambda x: (x.repo.priority, x.repo.cost)) to_download.append(pkg_list[0]) if to_download: self.base.download_packages(to_download, self.base.output.progress) if cmdline: # command line repo packages are either local files or already downloaded urls # just copy them to the destination for pkg in cmdline: # python<3.4 shutil module does not raise SameFileError, check manually src = pkg.localPkg() dst = os.path.join(self.base.conf.destdir, os.path.basename(src)) if os.path.exists(dst) and os.path.samefile(src, dst): continue shutil.copy(src, self.base.conf.destdir) locations = sorted([pkg.localPkg() for pkg in to_download + cmdline]) return locations def _get_pkg_objs_rpms(self, pkg_specs): """ Return a list of dnf.Package objects that represent the rpms to download. """ if self.opts.resolve: pkgs = self._get_packages_with_deps(pkg_specs) else: pkgs = self._get_packages(pkg_specs) return pkgs def _get_pkg_objs_source(self, pkg_specs): """ Return a list of dnf.Package objects that represent the source rpms to download. """ pkgs = self._get_pkg_objs_rpms(pkg_specs) source_pkgs = self._get_source_packages(pkgs) pkgs = set(self._get_packages(source_pkgs, source=True)) return pkgs def _get_pkg_objs_debuginfo(self, pkg_specs): """ Return a list of dnf.Package objects that represent the debuginfo rpms to download. """ dbg_pkgs = set() q = self.base.sack.query().available() for pkg in self._get_packages(pkg_specs): for dbg_name in [pkg.debug_name, pkg.source_debug_name]: dbg_available = q.filter( name=dbg_name, epoch=int(pkg.epoch), version=pkg.version, release=pkg.release, arch=pkg.arch ) if not dbg_available: continue for p in dbg_available: dbg_pkgs.add(p) break return dbg_pkgs def _get_pkg_objs_debugsource(self, pkg_specs): """ Return a list of dnf.Package objects that represent the debugsource rpms to download. """ dbg_pkgs = set() q = self.base.sack.query().available() for pkg in self._get_packages(pkg_specs): dbg_available = q.filter( name=pkg.debugsource_name, epoch=int(pkg.epoch), version=pkg.version, release=pkg.release, arch=pkg.arch ) for p in dbg_available: dbg_pkgs.add(p) return dbg_pkgs def _get_packages(self, pkg_specs, source=False): """Get packages matching pkg_specs.""" func = self._get_query_source if source else self._get_query queries = [] for pkg_spec in pkg_specs: try: queries.append(func(pkg_spec)) except dnf.exceptions.PackageNotFoundError as e: logger.error(dnf.i18n.ucd(e)) if self.base.conf.strict: logger.error(_("Exiting due to strict setting.")) raise dnf.exceptions.Error(e) pkgs = list(itertools.chain(*queries)) return pkgs def _get_packages_with_deps(self, pkg_specs, source=False): """Get packages matching pkg_specs and the deps.""" pkgs = self._get_packages(pkg_specs) pkg_set = set(pkgs) for pkg in pkgs: goal = hawkey.Goal(self.base.sack) goal.install(pkg) rc = goal.run() if rc: pkg_set.update(goal.list_installs()) pkg_set.update(goal.list_upgrades()) else: msg = [_('Error in resolve of packages:')] logger.error("\n ".join(msg + [str(pkg) for pkg in pkgs])) logger.error(dnf.util._format_resolve_problems(goal.problem_rules())) raise dnf.exceptions.Error() return pkg_set @staticmethod def _get_source_packages(pkgs): """Get list of source rpm names for a list of packages.""" source_pkgs = set() for pkg in pkgs: if pkg.sourcerpm: source_pkgs.add(pkg.sourcerpm) logger.debug(' --> Package : %s Source : %s', str(pkg), pkg.sourcerpm) elif pkg.arch == 'src': source_pkgs.add("%s-%s.src.rpm" % (pkg.name, pkg.evr)) else: logger.info(_("No source rpm defined for %s"), str(pkg)) return list(source_pkgs) def _get_query(self, pkg_spec): """Return a query to match a pkg_spec.""" schemes = dnf.pycomp.urlparse.urlparse(pkg_spec)[0] is_url = schemes and schemes in ('http', 'ftp', 'file', 'https') if is_url or (pkg_spec.endswith('.rpm') and os.path.isfile(pkg_spec)): pkgs = self.base.add_remote_rpms([pkg_spec], progress=self.base.output.progress) return self.base.sack.query().filterm(pkg=pkgs) subj = dnf.subject.Subject(pkg_spec) q = subj.get_best_query(self.base.sack, with_src=self.opts.source) q = q.available() q = q.filterm(latest_per_arch_by_priority=True) if self.opts.arches: q = q.filter(arch=self.opts.arches) if len(q.run()) == 0: msg = _("No package %s available.") % (pkg_spec) raise dnf.exceptions.PackageNotFoundError(msg) return q def _get_query_source(self, pkg_spec): """Return a query to match a source rpm file name.""" pkg_spec = pkg_spec[:-4] # skip the .rpm subj = dnf.subject.Subject(pkg_spec) for nevra_obj in subj.get_nevra_possibilities(): tmp_query = nevra_obj.to_query(self.base.sack).available() if tmp_query: return tmp_query.latest() msg = _("No package %s available.") % (pkg_spec) raise dnf.exceptions.PackageNotFoundError(msg) universal_hooks.py000075500000013437151030231510010333 0ustar00#!/usr/bin/python3.6 # Copyright (c) 2020, cPanel, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import abc import glob import logging import os from os import path import re import subprocess import sys import tempfile from dnf import Plugin # this logger is configured by the dnf CLI, but error() is not shown by default (but is with -v) # LOG = logging.getLogger("dnf") LOG = logging.getLogger(__name__) LOG.setLevel(logging.ERROR) LOG.addHandler(logging.StreamHandler(sys.stderr)) class UniversalHooksPlugin(Plugin): name = 'universal-hooks' def __init__(self, base, cli): super().__init__(base, cli) self.hook_root = '/etc/dnf/universal-hooks' def pre_config(self): _run_dir(path.join(self.hook_root, self.pre_config.__name__), LOG) def config(self): _run_dir(path.join(self.hook_root, self.config.__name__), LOG) def resolved(self): _run_dir(path.join(self.hook_root, self.resolved.__name__), LOG) def sack(self): _run_dir(path.join(self.hook_root, self.sack.__name__), LOG) def pre_transaction(self): name = self.pre_transaction.__name__ _run_pkg_dirs(self.hook_root, LOG, name, DnfTransactionInfo(self.base.transaction)) _run_dir(path.join(self.hook_root, name), LOG) def transaction(self): name = self.transaction.__name__ _run_pkg_dirs(self.hook_root, LOG, name, DnfTransactionInfo(self.base.transaction)) _run_dir(path.join(self.hook_root, name), LOG) class FileSystem(metaclass=abc.ABCMeta): @abc.abstractmethod def glob(self, pathname): pass @abc.abstractmethod def isdir(self, pathname): pass @abc.abstractmethod def access(self, path, mode): pass @abc.abstractmethod def NamedTemporaryFile(self, mode, encoding): pass class RealFileSystem(FileSystem): def glob(self, pathname): return glob.glob(pathname) def isdir(self, pathname): return path.isdir(pathname) def access(self, path, mode): return os.access(path, mode) def NamedTemporaryFile(self, mode, encoding): return tempfile.NamedTemporaryFile(mode=mode, encoding=encoding) fs = RealFileSystem() def _run_dir(hook_dir, log, args=''): if not fs.isdir(hook_dir): return None for script in sorted(fs.glob(hook_dir + "/*")): if fs.isdir(script): continue if fs.access(script, os.X_OK): cmdline = f'{script} {args}' completed = subprocess.run(cmdline, shell=True) # todo change args to a list, shell=False if 0 != completed.returncode: log.error("!!! %s did not exit cleanly: %d", cmdline, completed.returncode) else: log.error("!!! %s is not executable", script) class TransactionInfo(metaclass=abc.ABCMeta): @abc.abstractmethod def getMembers(self): pass class DnfTransactionInfo(TransactionInfo): def __init__(self, transaction) -> None: self.transaction = transaction def getMembers(self): return self.transaction def _run_pkg_dirs(base_dir, log, slot, tinfo): """ :param str base_dir: :param logging.Logger log: :param str slot: :param TransactionInfo tinfo: """ wildcard_path = path.join(base_dir, 'multi_pkgs', slot) dir_matchers = _make_dir_matchers(wildcard_path) wildcard_to_run = {} with fs.NamedTemporaryFile(mode='w', encoding='utf-8') as temp_pkg_file: members_seen = {} members = tinfo.getMembers() for member in sorted(set(members), key=lambda m: m.name): pkg = member.name if pkg in members_seen: continue members_seen[pkg] = 1 temp_pkg_file.write(pkg + "\n") _run_dir(path.join(base_dir, 'pkgs', pkg, slot), log) for wildcard_dir, matcher in dir_matchers.items(): if matcher.search(pkg): wildcard_to_run[wildcard_dir] = 1 # the file may be used by a subprocess, so make sure it is flushed to kernel temp_pkg_file.flush() for wildcard_dir in wildcard_to_run: _run_dir(path.join(wildcard_path, wildcard_dir), log, "--pkg_list=" + temp_pkg_file.name) def _make_dir_matchers(wc_slot_dir): dir_matchers = {} for pth in fs.glob(wc_slot_dir + "/*"): if fs.isdir(pth): pth = path.basename(path.normpath(pth)) dir_matchers[pth] = _regex_from_dir(pth) return dir_matchers def _regex_from_dir(path): expr = path.replace("__WILDCARD__", ".*") return re.compile("^" + expr + "$") changelog.py000064400000011547151030231510007044 0ustar00# changelog.py # DNF plugin adding a command changelog. # # Copyright (C) 2014 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import argparse import collections import dateutil.parser from dnfpluginscore import _, P_, logger import dnf import dnf.cli def validate_date(val): try: return dateutil.parser.parse(val, fuzzy=True) except (ValueError, TypeError, OverflowError): raise argparse.ArgumentTypeError(_('Not a valid date: "{0}".').format(val)) @dnf.plugin.register_command class ChangelogCommand(dnf.cli.Command): aliases = ('changelog',) summary = _('Show changelog data of packages') @staticmethod def set_argparser(parser): filter_group = parser.add_mutually_exclusive_group() filter_group.add_argument( '--since', metavar="DATE", default=None, type=validate_date, help=_('show changelog entries since DATE. To avoid ambiguosity, ' 'YYYY-MM-DD format is recommended.')) filter_group.add_argument( '--count', default=None, type=int, help=_('show given number of changelog entries per package')) filter_group.add_argument( '--upgrades', default=False, action='store_true', help=_('show only new changelog entries for packages, that provide an ' 'upgrade for some of already installed packages.')) parser.add_argument("package", nargs='*', metavar=_('PACKAGE')) def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.changelogs = True def query(self): q = self.base.sack.query() if self.opts.package: q.filterm(empty=True) for pkg in self.opts.package: pkg_q = dnf.subject.Subject(pkg, ignore_case=True).get_best_query( self.base.sack, with_nevra=True, with_provides=False, with_filenames=False) if self.opts.repo: pkg_q.filterm(reponame=self.opts.repo) if pkg_q: q = q.union(pkg_q.latest()) else: logger.info(_('No match for argument: %s') % pkg) elif self.opts.repo: q.filterm(reponame=self.opts.repo) if self.opts.upgrades: q = q.upgrades() else: q = q.available() return q def by_srpm(self, packages): by_srpm = collections.OrderedDict() for pkg in sorted(packages): by_srpm.setdefault((pkg.source_name or pkg.name, pkg.evr), []).append(pkg) return by_srpm def filter_changelogs(self, package): if self.opts.upgrades: return self.base.latest_changelogs(package) elif self.opts.count: return package.changelogs[:self.opts.count] elif self.opts.since: return [chlog for chlog in package.changelogs if chlog['timestamp'] >= self.opts.since.date()] else: return package.changelogs def run(self): if self.opts.since: logger.info(_('Listing changelogs since {}').format(self.opts.since)) elif self.opts.count: logger.info(P_('Listing only latest changelog', 'Listing {} latest changelogs', self.opts.count).format(self.opts.count)) elif self.opts.upgrades: logger.info( _('Listing only new changelogs since installed version of the package')) else: logger.info(_('Listing all changelogs')) by_srpm = self.by_srpm(self.query()) for name in by_srpm: print(_('Changelogs for {}').format( ', '.join(sorted({str(pkg) for pkg in by_srpm[name]})))) for chlog in self.filter_changelogs(by_srpm[name][0]): print(self.base.format_changelog(chlog)) system_upgrade.py000064400000064251151030231510010150 0ustar00# -*- coding: utf-8 -*- # # Copyright (c) 2015-2020 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see . # # Author(s): Will Woods """system_upgrade.py - DNF plugin to handle major-version system upgrades.""" from subprocess import call, Popen, check_output, CalledProcessError import json import os import os.path import re import sys import uuid from systemd import journal from dnfpluginscore import _, logger import dnf import dnf.cli from dnf.cli import CliError from dnf.i18n import ucd import dnf.transaction from dnf.transaction_sr import serialize_transaction, TransactionReplay import libdnf.conf # Translators: This string is only used in unit tests. _("the color of the sky") DOWNLOAD_FINISHED_ID = uuid.UUID('9348174c5cc74001a71ef26bd79d302e') REBOOT_REQUESTED_ID = uuid.UUID('fef1cc509d5047268b83a3a553f54b43') UPGRADE_STARTED_ID = uuid.UUID('3e0a5636d16b4ca4bbe5321d06c6aa62') UPGRADE_FINISHED_ID = uuid.UUID('8cec00a1566f4d3594f116450395f06c') ID_TO_IDENTIFY_BOOTS = UPGRADE_STARTED_ID PLYMOUTH = '/usr/bin/plymouth' RELEASEVER_MSG = _( "Need a --releasever greater than the current system version.") DOWNLOAD_FINISHED_MSG = _( # Translators: do not change "reboot" here "Download complete! Use 'dnf {command} reboot' to start the upgrade.\n" "To remove cached metadata and transaction use 'dnf {command} clean'") CANT_RESET_RELEASEVER = _( "Sorry, you need to use 'download --releasever' instead of '--network'") STATE_VERSION = 2 # --- Miscellaneous helper functions ------------------------------------------ def reboot(): if os.getenv("DNF_SYSTEM_UPGRADE_NO_REBOOT", default=False): logger.info(_("Reboot turned off, not rebooting.")) else: Popen(["systemctl", "reboot"]) def get_url_from_os_release(): key = "UPGRADE_GUIDE_URL=" for path in ["/etc/os-release", "/usr/lib/os-release"]: try: with open(path) as release_file: for line in release_file: line = line.strip() if line.startswith(key): return line[len(key):].strip('"') except IOError: continue return None # DNF-FIXME: dnf.util.clear_dir() doesn't delete regular files :/ def clear_dir(path, ignore=[]): if not os.path.isdir(path): return for entry in os.listdir(path): fullpath = os.path.join(path, entry) if fullpath in ignore: continue try: if os.path.isdir(fullpath): dnf.util.rm_rf(fullpath) else: os.unlink(fullpath) except OSError: pass def check_release_ver(conf, target=None): if dnf.rpm.detect_releasever(conf.installroot) == conf.releasever: raise CliError(RELEASEVER_MSG) if target and target != conf.releasever: # it's too late to set releasever here, so this can't work. # (see https://bugzilla.redhat.com/show_bug.cgi?id=1212341) raise CliError(CANT_RESET_RELEASEVER) def disable_blanking(): try: tty = open('/dev/tty0', 'wb') tty.write(b'\33[9;0]') except Exception as e: print(_("Screen blanking can't be disabled: %s") % e) # --- State object - for tracking upgrade state between runs ------------------ # DNF-INTEGRATION-NOTE: basically the same thing as dnf.persistor.JSONDB class State(object): def __init__(self, statefile): self.statefile = statefile self._data = {} self._read() def _read(self): try: with open(self.statefile) as fp: self._data = json.load(fp) except IOError: self._data = {} except ValueError: self._data = {} logger.warning(_("Failed loading state file: %s, continuing with " "empty state."), self.statefile) def write(self): dnf.util.ensure_dir(os.path.dirname(self.statefile)) with open(self.statefile, 'w') as outf: json.dump(self._data, outf, indent=4, sort_keys=True) def clear(self): if os.path.exists(self.statefile): os.unlink(self.statefile) self._read() def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: self.write() # helper function for creating properties. pylint: disable=protected-access def _prop(option): # pylint: disable=no-self-argument def setprop(self, value): self._data[option] = value def getprop(self): return self._data.get(option) return property(getprop, setprop) # !!! Increase STATE_VERSION for any changes in data structure like a new property or a new # data structure !!! state_version = _prop("state_version") download_status = _prop("download_status") destdir = _prop("destdir") target_releasever = _prop("target_releasever") system_releasever = _prop("system_releasever") gpgcheck = _prop("gpgcheck") # list of repos with gpgcheck=True gpgcheck_repos = _prop("gpgcheck_repos") # list of repos with repo_gpgcheck=True repo_gpgcheck_repos = _prop("repo_gpgcheck_repos") upgrade_status = _prop("upgrade_status") upgrade_command = _prop("upgrade_command") distro_sync = _prop("distro_sync") enable_disable_repos = _prop("enable_disable_repos") module_platform_id = _prop("module_platform_id") # --- Plymouth output helpers ------------------------------------------------- class PlymouthOutput(object): """A plymouth output helper class. Filters duplicate calls, and stops calling the plymouth binary if we fail to contact it. """ def __init__(self): self.alive = True self._last_args = dict() self._last_msg = None def _plymouth(self, cmd, *args): dupe_cmd = (args == self._last_args.get(cmd)) if (self.alive and not dupe_cmd) or cmd == '--ping': try: self.alive = (call((PLYMOUTH, cmd) + args) == 0) except OSError: self.alive = False self._last_args[cmd] = args return self.alive def ping(self): return self._plymouth("--ping") def message(self, msg): if self._last_msg and self._last_msg != msg: self._plymouth("hide-message", "--text", self._last_msg) self._last_msg = msg return self._plymouth("display-message", "--text", msg) def set_mode(self): mode = 'updates' try: s = check_output([PLYMOUTH, '--help']) if re.search('--system-upgrade', ucd(s)): mode = 'system-upgrade' except (CalledProcessError, OSError): pass return self._plymouth("change-mode", "--" + mode) def progress(self, percent): return self._plymouth("system-update", "--progress", str(percent)) # A single PlymouthOutput instance for us to use within this module Plymouth = PlymouthOutput() # A TransactionProgress class that updates plymouth for us. class PlymouthTransactionProgress(dnf.callback.TransactionProgress): # pylint: disable=too-many-arguments def progress(self, package, action, ti_done, ti_total, ts_done, ts_total): self._update_plymouth(package, action, ts_done, ts_total) def _update_plymouth(self, package, action, current, total): # Prevents quick jumps of progressbar when pretrans scriptlets # and TRANS_PREPARATION are reported as 1/1 if total == 1: return # Verification goes through all the packages again, # which resets the "current" param value, this prevents # resetting of the progress bar as well. (Rhbug:1809096) if action != dnf.callback.PKG_VERIFY: Plymouth.progress(int(90.0 * current / total)) else: Plymouth.progress(90 + int(10.0 * current / total)) Plymouth.message(self._fmt_event(package, action, current, total)) def _fmt_event(self, package, action, current, total): action = dnf.transaction.ACTIONS.get(action, action) return "[%d/%d] %s %s..." % (current, total, action, package) # --- journal helpers ------------------------------------------------- def find_boots(message_id): """Find all boots with this message id. Returns the entries of all found boots. """ j = journal.Reader() j.add_match(MESSAGE_ID=message_id.hex, # identify the message _UID=0) # prevent spoofing of logs oldboot = None for entry in j: boot = entry['_BOOT_ID'] if boot == oldboot: continue oldboot = boot yield entry def list_logs(): print(_('The following boots appear to contain upgrade logs:')) n = -1 for n, entry in enumerate(find_boots(ID_TO_IDENTIFY_BOOTS)): print('{} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}'.format( n + 1, entry['_BOOT_ID'], entry['__REALTIME_TIMESTAMP'], entry.get('SYSTEM_RELEASEVER', '??'), entry.get('TARGET_RELEASEVER', '??'))) if n == -1: print(_('-- no logs were found --')) def pick_boot(message_id, n): boots = list(find_boots(message_id)) # Positive indices index all found boots starting with 1 and going forward, # zero is the current boot, and -1, -2, -3 are previous going backwards. # This is the same as journalctl. try: if n == 0: raise IndexError if n > 0: n -= 1 return boots[n]['_BOOT_ID'] except IndexError: raise CliError(_("Cannot find logs with this index.")) def show_log(n): boot_id = pick_boot(ID_TO_IDENTIFY_BOOTS, n) process = Popen(['journalctl', '--boot', boot_id.hex]) process.wait() rc = process.returncode if rc == 1: raise dnf.exceptions.Error(_("Unable to match systemd journal entry")) CMDS = ['download', 'clean', 'reboot', 'upgrade', 'log'] # --- The actual Plugin and Command objects! ---------------------------------- class SystemUpgradePlugin(dnf.Plugin): name = 'system-upgrade' def __init__(self, base, cli): super(SystemUpgradePlugin, self).__init__(base, cli) if cli: cli.register_command(SystemUpgradeCommand) cli.register_command(OfflineUpgradeCommand) cli.register_command(OfflineDistrosyncCommand) class SystemUpgradeCommand(dnf.cli.Command): aliases = ('system-upgrade', 'fedup',) summary = _("Prepare system for upgrade to a new release") DATADIR = 'var/lib/dnf/system-upgrade' def __init__(self, cli): super(SystemUpgradeCommand, self).__init__(cli) self.datadir = os.path.join(cli.base.conf.installroot, self.DATADIR) self.transaction_file = os.path.join(self.datadir, 'system-upgrade-transaction.json') self.magic_symlink = os.path.join(cli.base.conf.installroot, 'system-update') self.state = State(os.path.join(self.datadir, 'system-upgrade-state.json')) @staticmethod def set_argparser(parser): parser.add_argument("--no-downgrade", dest='distro_sync', action='store_false', help=_("keep installed packages if the new " "release's version is older")) parser.add_argument('tid', nargs=1, choices=CMDS, metavar="[%s]" % "|".join(CMDS)) parser.add_argument('--number', type=int, help=_('which logs to show')) def log_status(self, message, message_id): """Log directly to the journal.""" journal.send(message, MESSAGE_ID=message_id, PRIORITY=journal.LOG_NOTICE, SYSTEM_RELEASEVER=self.state.system_releasever, TARGET_RELEASEVER=self.state.target_releasever, DNF_VERSION=dnf.const.VERSION) def pre_configure(self): self._call_sub("check") self._call_sub("pre_configure") def configure(self): self._call_sub("configure") def run(self): self._call_sub("run") def run_transaction(self): self._call_sub("transaction") def run_resolved(self): self._call_sub("resolved") def _call_sub(self, name): subfunc = getattr(self, name + '_' + self.opts.tid[0], None) if callable(subfunc): subfunc() def _check_state_version(self, command): if self.state.state_version != STATE_VERSION: msg = _("Incompatible version of data. Rerun 'dnf {command} download [OPTIONS]'" "").format(command=command) raise CliError(msg) def _set_cachedir(self): # set download directories from json state file self.base.conf.cachedir = self.datadir self.base.conf.destdir = self.state.destdir if self.state.destdir else None def _get_forward_reverse_pkg_reason_pairs(self): """ forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}} reverse = {pkg_nevra: {tsi.action: tsi.reason}} :return: forward, reverse """ backward_action = set(dnf.transaction.BACKWARD_ACTIONS + [libdnf.transaction.TransactionItemAction_REINSTALLED]) forward_actions = set(dnf.transaction.FORWARD_ACTIONS) forward = {} reverse = {} for tsi in self.cli.base.transaction: if tsi.action in forward_actions: pkg = tsi.pkg forward.setdefault(pkg.repo.id, {}).setdefault( str(pkg), {})[tsi.action] = tsi.reason elif tsi.action in backward_action: reverse.setdefault(str(tsi.pkg), {})[tsi.action] = tsi.reason return forward, reverse # == pre_configure_*: set up action-specific demands ========================== def pre_configure_download(self): # only download subcommand accepts --destdir command line option self.base.conf.cachedir = self.datadir self.base.conf.destdir = self.opts.destdir if self.opts.destdir else None if 'offline-distrosync' == self.opts.command and not self.opts.distro_sync: raise CliError( _("Command 'offline-distrosync' cannot be used with --no-downgrade option")) elif 'offline-upgrade' == self.opts.command: self.opts.distro_sync = False def pre_configure_reboot(self): self._set_cachedir() def pre_configure_upgrade(self): self._set_cachedir() if self.state.enable_disable_repos: self.opts.repos_ed = self.state.enable_disable_repos self.base.conf.releasever = self.state.target_releasever def pre_configure_clean(self): self._set_cachedir() # == configure_*: set up action-specific demands ========================== def configure_download(self): if 'system-upgrade' == self.opts.command or 'fedup' == self.opts.command: logger.warning(_('WARNING: this operation is not supported on the RHEL distribution. ' 'Proceed at your own risk.')) help_url = get_url_from_os_release() if help_url: msg = _('Additional information for System Upgrade: {}') logger.info(msg.format(ucd(help_url))) if self.base._promptWanted(): msg = _('Before you continue ensure that your system is fully upgraded by running ' '"dnf --refresh upgrade". Do you want to continue') if self.base.conf.assumeno or not self.base.output.userconfirm( msg='{} [y/N]: '.format(msg), defaultyes_msg='{} [Y/n]: '.format(msg)): logger.error(_("Operation aborted.")) sys.exit(1) check_release_ver(self.base.conf, target=self.opts.releasever) elif 'offline-upgrade' == self.opts.command: self.cli._populate_update_security_filter(self.opts) self.cli.demands.root_user = True self.cli.demands.resolving = True self.cli.demands.available_repos = True self.cli.demands.sack_activation = True self.cli.demands.freshest_metadata = True # We want to do the depsolve / download / transaction-test, but *not* # run the actual RPM transaction to install the downloaded packages. # Setting the "test" flag makes the RPM transaction a test transaction, # so nothing actually gets installed. # (It also means that we run two test transactions in a row, which is # kind of silly, but that's something for DNF to fix...) self.base.conf.tsflags += ["test"] def configure_reboot(self): # FUTURE: add a --debug-shell option to enable debug shell: # systemctl add-wants system-update.target debug-shell.service self.cli.demands.root_user = True def configure_upgrade(self): # same as the download, but offline and non-interactive. so... self.cli.demands.root_user = True self.cli.demands.resolving = True self.cli.demands.available_repos = True self.cli.demands.sack_activation = True # use the saved value for --allowerasing, etc. self.opts.distro_sync = self.state.distro_sync if self.state.gpgcheck is not None: self.base.conf.gpgcheck = self.state.gpgcheck if self.state.gpgcheck_repos is not None: for repo in self.base.repos.values(): repo.gpgcheck = repo.id in self.state.gpgcheck_repos if self.state.repo_gpgcheck_repos is not None: for repo in self.base.repos.values(): repo.repo_gpgcheck = repo.id in self.state.repo_gpgcheck_repos self.base.conf.module_platform_id = self.state.module_platform_id # don't try to get new metadata, 'cuz we're offline self.cli.demands.cacheonly = True # and don't ask any questions (we confirmed all this beforehand) self.base.conf.assumeyes = True self.cli.demands.transaction_display = PlymouthTransactionProgress() # upgrade operation already removes all element that must be removed. Additional removal # could trigger unwanted changes in transaction. self.base.conf.clean_requirements_on_remove = False self.base.conf.install_weak_deps = False def configure_clean(self): self.cli.demands.root_user = True def configure_log(self): pass # == check_*: do any action-specific checks =============================== def check_reboot(self): if not self.state.download_status == 'complete': raise CliError(_("system is not ready for upgrade")) self._check_state_version(self.opts.command) if self.state.upgrade_command != self.opts.command: msg = _("the transaction was not prepared for '{command}'. " "Rerun 'dnf {command} download [OPTIONS]'").format(command=self.opts.command) raise CliError(msg) if os.path.lexists(self.magic_symlink): raise CliError(_("upgrade is already scheduled")) dnf.util.ensure_dir(self.datadir) # FUTURE: checkRPMDBStatus(self.state.download_transaction_id) def check_upgrade(self): if not os.path.lexists(self.magic_symlink): logger.info(_("trigger file does not exist. exiting quietly.")) raise SystemExit(0) if os.readlink(self.magic_symlink) != self.datadir: logger.info(_("another upgrade tool is running. exiting quietly.")) raise SystemExit(0) # Delete symlink ASAP to avoid reboot loops dnf.yum.misc.unlink_f(self.magic_symlink) command = self.state.upgrade_command if not command: command = self.opts.command self._check_state_version(command) if not self.state.upgrade_status == 'ready': msg = _("use 'dnf {command} reboot' to begin the upgrade").format(command=command) raise CliError(msg) # == run_*: run the action/prep the transaction =========================== def run_prepare(self): # make the magic symlink os.symlink(self.datadir, self.magic_symlink) # set upgrade_status so that the upgrade can run with self.state as state: state.upgrade_status = 'ready' def run_reboot(self): self.run_prepare() if not self.opts.tid[0] == "reboot": return self.log_status(_("Rebooting to perform upgrade."), REBOOT_REQUESTED_ID) reboot() def run_download(self): # Mark everything in the world for upgrade/sync if self.opts.distro_sync: self.base.distro_sync() else: self.base.upgrade_all() if self.opts.command not in ['offline-upgrade', 'offline-distrosync']: # Mark all installed groups and environments for upgrade self.base.read_comps() installed_groups = [g.id for g in self.base.comps.groups if self.base.history.group.get(g.id)] if installed_groups: self.base.env_group_upgrade(installed_groups) installed_environments = [g.id for g in self.base.comps.environments if self.base.history.env.get(g.id)] if installed_environments: self.base.env_group_upgrade(installed_environments) with self.state as state: state.download_status = 'downloading' state.target_releasever = self.base.conf.releasever state.destdir = self.base.conf.destdir def run_upgrade(self): # change the upgrade status (so we can detect crashed upgrades later) command = '' with self.state as state: state.upgrade_status = 'incomplete' command = state.upgrade_command if command == 'offline-upgrade': msg = _("Starting offline upgrade. This will take a while.") elif command == 'offline-distrosync': msg = _("Starting offline distrosync. This will take a while.") else: msg = _("Starting system upgrade. This will take a while.") self.log_status(msg, UPGRADE_STARTED_ID) # reset the splash mode and let the user know we're running Plymouth.set_mode() Plymouth.progress(0) Plymouth.message(msg) # disable screen blanking disable_blanking() self.replay = TransactionReplay(self.base, self.transaction_file) self.replay.run() def run_clean(self): logger.info(_("Cleaning up downloaded data...")) # Don't delete persistor, it contains paths for downloaded packages # that are used by dnf during finalizing base to clean them up clear_dir(self.base.conf.cachedir, [dnf.persistor.TempfilePersistor(self.base.conf.cachedir).db_path]) with self.state as state: state.download_status = None state.state_version = None state.upgrade_status = None state.upgrade_command = None state.destdir = None def run_log(self): if self.opts.number: show_log(self.opts.number) else: list_logs() # == resolved_*: do staff after succesful resolvement ===================== def resolved_upgrade(self): """Adjust transaction reasons according to stored values""" self.replay.post_transaction() # == transaction_*: do stuff after a successful transaction =============== def transaction_download(self): transaction = self.base.history.get_current() if not transaction.packages(): logger.info(_("The system-upgrade transaction is empty, your system is already up-to-date.")) return data = serialize_transaction(transaction) try: with open(self.transaction_file, "w") as f: json.dump(data, f, indent=4, sort_keys=True) f.write("\n") print(_("Transaction saved to {}.").format(self.transaction_file)) except OSError as e: raise dnf.cli.CliError(_('Error storing transaction: {}').format(str(e))) # Okay! Write out the state so the upgrade can use it. system_ver = dnf.rpm.detect_releasever(self.base.conf.installroot) with self.state as state: state.download_status = 'complete' state.state_version = STATE_VERSION state.distro_sync = self.opts.distro_sync state.gpgcheck = self.base.conf.gpgcheck state.gpgcheck_repos = [ repo.id for repo in self.base.repos.values() if repo.gpgcheck] state.repo_gpgcheck_repos = [ repo.id for repo in self.base.repos.values() if repo.repo_gpgcheck] state.system_releasever = system_ver state.target_releasever = self.base.conf.releasever state.module_platform_id = self.base.conf.module_platform_id state.enable_disable_repos = self.opts.repos_ed state.destdir = self.base.conf.destdir state.upgrade_command = self.opts.command msg = DOWNLOAD_FINISHED_MSG.format(command=self.opts.command) logger.info(msg) self.log_status(_("Download finished."), DOWNLOAD_FINISHED_ID) def transaction_upgrade(self): Plymouth.message(_("Upgrade complete! Cleaning up and rebooting...")) self.log_status(_("Upgrade complete! Cleaning up and rebooting..."), UPGRADE_FINISHED_ID) self.run_clean() if self.opts.tid[0] == "upgrade": reboot() class OfflineUpgradeCommand(SystemUpgradeCommand): aliases = ('offline-upgrade',) summary = _("Prepare offline upgrade of the system") class OfflineDistrosyncCommand(SystemUpgradeCommand): aliases = ('offline-distrosync',) summary = _("Prepare offline distrosync of the system") debuginfo-install.py000064400000025514151030231510010522 0ustar00# debuginfo-install.py # Install the debuginfo of packages and their dependencies to debug this package. # # Copyright (C) 2014 Igor Gnatenko # Copyright (C) 2014-2019 Red Hat # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from dnfpluginscore import _, logger import dnf from dnf.package import Package class DebuginfoInstall(dnf.Plugin): """DNF plugin supplying the 'debuginfo-install' command.""" name = 'debuginfo-install' def __init__(self, base, cli): """Initialize the plugin instance.""" super(DebuginfoInstall, self).__init__(base, cli) self.base = base self.cli = cli if cli is not None: cli.register_command(DebuginfoInstallCommand) def config(self): cp = self.read_config(self.base.conf) autoupdate = (cp.has_section('main') and cp.has_option('main', 'autoupdate') and cp.getboolean('main', 'autoupdate')) if autoupdate: # allow update of already installed debuginfo packages dbginfo = dnf.sack._rpmdb_sack(self.base).query().filterm(name__glob="*-debuginfo") if len(dbginfo): self.base.repos.enable_debug_repos() class DebuginfoInstallCommand(dnf.cli.Command): """ DebuginfoInstall plugin for DNF """ aliases = ("debuginfo-install",) summary = _('install debuginfo packages') def __init__(self, cli): super(DebuginfoInstallCommand, self).__init__(cli) self.available_debuginfo_missing = set() self.available_debugsource_missing = set() self.installed_debuginfo_missing = set() self.installed_debugsource_missing = set() @staticmethod def set_argparser(parser): parser.add_argument('package', nargs='+') def configure(self): demands = self.cli.demands demands.resolving = True demands.root_user = True demands.sack_activation = True demands.available_repos = True self.base.repos.enable_debug_repos() def run(self): errors_spec = [] debuginfo_suffix_len = len(Package.DEBUGINFO_SUFFIX) debugsource_suffix_len = len(Package.DEBUGSOURCE_SUFFIX) for pkgspec in self.opts.package: solution = dnf.subject.Subject(pkgspec).get_best_solution(self.base.sack, with_src=False) query = solution["query"] if not query: logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkgspec)) errors_spec.append(pkgspec) continue package_dict = query.available()._name_dict() # installed versions of packages have priority, replace / add them to the dict package_dict.update(query.installed()._name_dict()) # Remove debuginfo packages if their base packages are in the query. # They can get there through globs and they break the installation # of debug packages with the same version as the installed base # packages. If the base package of a debuginfo package is not in # the query, the user specified a debug package on the command # line. We don't want to ignore those, so we will install them. # But, in this case the version will not be matched to the # installed version of the base package, as that would require # another query and is further complicated if the user specifies a # version themselves etc. for name in list(package_dict.keys()): if name.endswith(Package.DEBUGINFO_SUFFIX): if name[:-debuginfo_suffix_len] in package_dict: package_dict.pop(name) if name.endswith(Package.DEBUGSOURCE_SUFFIX): if name[:-debugsource_suffix_len] in package_dict: package_dict.pop(name) # attempt to install debuginfo and debugsource for the highest # listed version of the package (in case the package is installed, # only the installed version is listed) for pkgs in package_dict.values(): first_pkg = pkgs[0] # for packages from system (installed) there can be more # packages with different architectures listed and we want to # install debuginfo for all of them if first_pkg._from_system: # we need to split them by architectures and install the # latest version for each architecture arch_dict = {} for pkg in pkgs: arch_dict.setdefault(pkg.arch, []).append(pkg) for package_arch_list in arch_dict.values(): pkg = package_arch_list[0] if not self._install_debug_from_system(pkg.debug_name, pkg): if not self._install_debug_from_system(pkg.source_debug_name, pkg): self.installed_debuginfo_missing.add(str(pkg)) if not self._install_debug_from_system(pkg.debugsource_name, pkg): self.installed_debugsource_missing.add(str(pkg)) continue # if the package in question is -debuginfo or -debugsource, install it directly if first_pkg.name.endswith(Package.DEBUGINFO_SUFFIX) \ or first_pkg.name.endswith(Package.DEBUGSOURCE_SUFFIX): self._install(pkgs) # pass all pkgs to the solver, it will pick the best one continue # if we have NEVRA parsed from the pkgspec, use it to install the package if solution["nevra"] is not None: if not self._install_debug(first_pkg.debug_name, solution["nevra"]): if not self._install_debug(first_pkg.source_debug_name, solution["nevra"]): self.available_debuginfo_missing.add( "{}-{}".format(first_pkg.name, first_pkg.evr)) if not self._install_debug(first_pkg.debugsource_name, solution["nevra"]): self.available_debugsource_missing.add( "{}-{}".format(first_pkg.name, first_pkg.evr)) continue # if we don't have NEVRA from the pkgspec, pass nevras from # all packages that were found (while replacing the name with # the -debuginfo and -debugsource variant) to the solver, which # will pick the correct version and architecture if not self._install_debug_no_nevra(first_pkg.debug_name, pkgs): if not self._install_debug_no_nevra(first_pkg.source_debug_name, pkgs): self.available_debuginfo_missing.add( "{}-{}".format(first_pkg.name, first_pkg.evr)) if not self._install_debug_no_nevra(first_pkg.debugsource_name, pkgs): self.available_debugsource_missing.add( "{}-{}".format(first_pkg.name, first_pkg.evr)) if self.available_debuginfo_missing: logger.info( _("Could not find debuginfo package for the following available packages: %s"), ", ".join(sorted(self.available_debuginfo_missing))) if self.available_debugsource_missing: logger.info( _("Could not find debugsource package for the following available packages: %s"), ", ".join(sorted(self.available_debugsource_missing))) if self.installed_debuginfo_missing: logger.info( _("Could not find debuginfo package for the following installed packages: %s"), ", ".join(sorted(self.installed_debuginfo_missing))) if self.installed_debugsource_missing: logger.info( _("Could not find debugsource package for the following installed packages: %s"), ", ".join(sorted(self.installed_debugsource_missing))) if errors_spec and self.base.conf.strict: raise dnf.exceptions.PackagesNotAvailableError(_("Unable to find a match"), pkg_spec=' '.join(errors_spec)) def _install_debug_from_system(self, debug_name, pkg): query = self.base.sack.query().filter(name=debug_name, epoch=pkg.epoch, version=pkg.version, release=pkg.release, arch=pkg.arch) if query: self._install(query) return True return False def _install_debug(self, debug_name, base_nevra): kwargs = {} # if some part of EVRA was specified in the argument, add it as a filter if base_nevra.epoch is not None: kwargs["epoch__glob"] = base_nevra.epoch if base_nevra.version is not None: kwargs["version__glob"] = base_nevra.version if base_nevra.release is not None: kwargs["release__glob"] = base_nevra.release if base_nevra.arch is not None: kwargs["arch__glob"] = base_nevra.arch query = self.base.sack.query().filter(name=debug_name, **kwargs) if query: self._install(query) return True return False def _install_debug_no_nevra(self, debug_name, pkgs): query = self.base.sack.query().filterm( nevra_strict=["{}-{}.{}".format(debug_name, p.evr, p.arch) for p in pkgs]) if query: self._install(query) return True return False def _install(self, pkgs): selector = dnf.selector.Selector(self.base.sack) selector.set(pkg=pkgs) self.base.goal.install(select=selector, optional=not self.base.conf.strict) __pycache__/config_manager.cpython-36.pyc000064400000016133151030231510014334 0ustar003 gt`*@sddlmZddlmZddlmZmZmZddlZddlZddl Zddl Zddl Z ddl Z ddl Z ddlZddlZejjGdddejjZddZejd Zejd Zejd Zejd Zd dZdS))absolute_import)unicode_literals)_loggerP_Nc@sReZdZdgZedjejjdZ e ddZ ddZ dd Z d d Zd d ZdS)ConfigManagerCommandzconfig-managerz4manage {prog} configuration options and repositories)progcCs|jdddtdd|jdddtd d |jd gd d tdd|jdddtdd |jdddtdd |j}|jddddtdd|jddddtdddS)Ncrepo*repozrepo to modify)nargsmetavarhelpz--saveF store_truez/save the current options (useful with --setopt))defaultactionrz --add-repoappendZURLz8add (and enable) the repo from the specified file or url)rrr rz--dumpz,print current configuration values to stdoutz--dump-variableszprint variable values to stdoutz --set-enabled set_enabledz"enable repos (automatically saves))rdestrrz--set-disabled set_disabledz#disable repos (automatically saves)) add_argumentrZadd_mutually_exclusive_group)parserZ enable_groupr$/usr/lib/python3.6/config_manager.py set_argparser)s,      z"ConfigManagerCommand.set_argparserc Cs|jj}d|_|jjgkp@|jjp@|jjp@|jjp@|jjp@|jj sp|jj j t dj djdddddd d d g|jjgkrtjt d |jjs|jj s|jjs|jjrd|_d d|jjD}dd|D|j_dS)NTz.one of the following arguments is required: {} z--savez --add-repoz--dumpz--dump-variablesz --set-enabledz--enablez--set-disabledz --disablez{Warning: --enablerepo/--disablerepo arguments have no meaningwith config manager. Use --set-enabled/--set-disabled instead.cSsg|]}|dkr|jdqS),)split).0xrrr _sz2ConfigManagerCommand.configure..cSs"g|]}|D]}|dkr |q qS)r)rZsublistitemrrrr as)clidemandsZavailable_reposoptsadd_reposavedumpdump_variablesrrZ optparsererrorrformatjoinZrepos_edrZwarningZ root_userr )selfr$Z temp_listrrr configureBs*  zConfigManagerCommand.configurecCs|jjr|jn|jdS)zExecute the util action here.N)r%r& modify_repo)r-rrrrunds zConfigManagerCommand.runc sgtfdd}jjrnxjjD]|dq.WtjdrxLjjjD]|dqZWn,tjdrxjjjD]|dqWrtjjt ddj j j }i}tjdrjj rjj }jjrx*j j jjD]\}td |fqWjj s0d jjkrjjr\|r\j j jj j jd |j|jjrtj jjd tj j jsd Sjjsjjrdj_xtD]}i}jjrd |d <njjrd|d <tjdr*x4jjjD]$\}}tj|j|r|j|qWjjrT|rTj j j|j|j|j|jjrtj jjd|jt|jqWd S)z< process --set-enabled, --set-disabled and --setopt options cs0jjj|}|sjn|r,j|dS)N)baseZreposZ get_matchingaddextend)keyZadd_matching_reposZmatching)matching_reposnamenot_matching_repos_idr-rr match_reposqs  z5ConfigManagerCommand.modify_repo..match_reposT repo_setoptsFzNo matching repo to modify: %s.z, main_setoptsz%s = %smainN1Zenabled0zrepo: )setr%r hasattrr9keysdnf exceptionsErrorrr,r1confr:r)Z substitutionsitemsprintr'Zwrite_raw_configfileZconfig_file_pathr(outputZ fmtSectionrrsortedfnmatchidupdateZrepofile) r-r8ZsbcZmodifyvalr Z repo_modifyrepoidZsetoptsr)r5r6r7r-rr/ks`          z ConfigManagerCommand.modify_repoc CsN|jjj}d}x|jjD]}tjjj|jdkrDdt j j |}t j td||jdrt j j|}t j j||}y6|jj|dd}tj|j|t j|d|jWn6tk r}z|d 7}t j|wWYd d }~XnXqt|}d jtjj|}t j j|d |}d |||f} t|| sqqW|rJtjj t!dd|d S)z process --add-repo option rr!zfile://zAdding repo from: %sz.repozw+)modeiNz$created by {} config-manager from {}z%s.repoz"[%s] name=%s baseurl=%s enabled=1 zConfiguration of repo failedzConfiguration of repos failed)"r1rDZ get_reposdirr%r&rApycompZurlparseschemeospathabspathrinforendswithbasenamer,ZurlopenshutilZcopy2r6chmodcloseIOErrorr*sanitize_url_to_fsr+util MAIN_PROG save_to_filerBrCr) r-Z myrepodirZ errors_counturlZdestnameferMZreponamecontentrrrr&s8         zConfigManagerCommand.add_repoN)__name__ __module__ __qualname__aliasesrr+rAr]r^Zsummary staticmethodrr.r0r/r&rrrrr"s  "BrcCspy4t|d }tjj||tj|dWdQRXWn6ttfk rj}ztj t d||dSd}~XnXdS)Nzw+iz&Could not save repo to repofile %s: %sFT) openrArPZ write_to_filerRrYr[OSErrorrr*r)filenamercfdrbrrrr_s  r_z^\w+:/*(\w+:|www\.)?z[?/:&#|~\*\[\]\(\)\'\\]+z^[,.]*z[,.]*$cCs*ybtj|r`tjjr&|jdjd}n:t|trB|jdjd}n |jd}t|t r`|jd}Wnt t t t fk r~YnXtjd|}tjd|}tjd|}tjd|}t|dkr|ddjd}dt|d }tj}|j||djd|d|d|j}d }tj|d|S) zReturn a filename suitable for the filesystem and for repo id Strips dangerous and common characters to create a filename we can use to store the cache in. Zidnazutf-8r!rNrOzE[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]) RE_SCHEMEmatchrArPZPY3encodedecode isinstancestrZunicodeUnicodeDecodeErrorUnicodeEncodeError UnicodeError TypeErrorsubRE_SLASHRE_BEGINRE_FINALlenrhashlibZsha256rKZ hexdigestre)r`parts lastindexZcsumZ allowed_regexrrrr\s.        r\)Z __future__rrZdnfpluginscorerrrrAZdnf.cliZ dnf.pycompZdnf.utilrIrrRrrXZpluginZregister_commandr#ZCommandrr_compilerpr{r|r}r\rrrrs(   1    __pycache__/download.cpython-36.opt-1.pyc000064400000022456151030231510014150 0ustar003 gt`*0@sddlmZddlmZddlmZmZddlmZddlZddl Zddl Zddl Zddl Zddl ZddlZddlZddlZddlZejjGdddejjZdS))absolute_import)unicode_literals)_logger) OptionParserNcseZdZdgZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZddZd ddZd!ddZeddZddZddZZS)"DownloadCommandZdownloadz%Download package to current directorycs tt|j|d|_d|_dS)N)superr__init__optsparser)selfcli) __class__/usr/lib/python3.6/download.pyr +szDownloadCommand.__init__c Cs|jddtdd|jddtdd|jd dtd d|jd dtd d|jd ddgtjdtdd|jddtdd|jddtdd|jddddtdd|jddddd d!ggtd"d#dS)$Npackages+zpackages to download)nargshelpz--source store_truezdownload the src.rpm instead)actionrz --debuginfoz'download the -debuginfo package insteadz --debugsourcez)download the -debugsource package insteadz--archz --archlistarchesz[arch]z5limit the query to packages of given architectures.)destdefaultrmetavarrz --resolvez(resolve and download needed dependenciesz --alldepsz^when running with --resolve, download all dependencies (do not exclude already installed ones)z--urlz--urlsurlzJprint list of urls where the rpms can be downloaded instead of downloading)rrrz--urlprotocolsappendhttphttpsZrsyncftpz4when running with --url, limit to specific protocols)rchoicesrr) add_argumentrrZ_SplitCallback)r rrr set_argparser0s*           zDownloadCommand.set_argparsercCs|jj}d|_d|_|jjr*|jjr*d|_|jjr>|j j j |jj sN|jj rZ|j j j|jjrr|jj|j j_ntjjtj|j j_dS)NTF)r demandsZsack_activationZavailable_reposr resolveZalldepsZload_system_reposourcebaseZreposZenable_source_repos debuginfo debugsourceZenable_debug_reposdestdirconfdnfi18nucdosgetcwd)r r#rrr configureKs  zDownloadCommand.configurecCs|jj r.|jj r.|jj r.|j|jj}nXg}|jjrN|j|j|jj|jjrj|j|j|jj|jjr|j|j |jj|jj rxd|D]\}|j t j kr|j|jjd}|rt|qtd|j}|jjjrtjj|tj|qWdS|j|dS)zExecute the util action here.)schemesz$Failed to get mirror for package: %sN)r r%r'r(_get_pkg_objs_rpmsrextend_get_pkg_objs_source_get_pkg_objs_debuginfo_get_pkg_objs_debugsourcerrepoidhawkeyCMDLINE_REPO_NAMEZremote_locationZ urlprotocolsprintrnamer&r*strictr+ exceptionsErrorrZwarning _do_downloads)r pkgspkgrmsgrrrrun^s.        zDownloadCommand.runc Csi}x"|D]}|jt|gj|q Wg}g}xP|jD]D}dd|D}|r`|j|dq:|jddd|j|dq:W|r|jj||jjj|rx^|D]V}|j }t j j |jj jt j j|} t j j| rt j j|| rqtj||jj jqWtdd||D} | S)z= Perform the download for a list of packages cSsg|]}|jtjkr|qSr)r7r8r9).0rArrr sz1DownloadCommand._do_downloads..rcSs|jj|jjfS)N)ZrepoZpriorityZcost)xrrrsz/DownloadCommand._do_downloads..)keycSsg|] }|jqSr)localPkg)rDrArrrrEs) setdefaultstrrvaluessortr&Zdownload_packagesoutputprogressrIr.pathjoinr*r)basenameexistssamefileshutilcopysorted) r r@Zpkg_dictrAZ to_downloadZcmdlineZpkg_listZ pkgs_cmdlinesrcdstZ locationsrrrr?s.  zDownloadCommand._do_downloadscCs"|jjr|j|}n |j|}|S)zc Return a list of dnf.Package objects that represent the rpms to download. )r r$_get_packages_with_deps _get_packages)r pkg_specsr@rrrr2s  z"DownloadCommand._get_pkg_objs_rpmscCs*|j|}|j|}t|j|dd}|S)zj Return a list of dnf.Package objects that represent the source rpms to download. T)r%)r2_get_source_packagessetr[)r r\r@ source_pkgsrrrr4s  z$DownloadCommand._get_pkg_objs_sourcec Cst}|jjjj}xh|j|D]Z}xT|j|jgD]D}|j|t |j |j |j |j d}|s^q4x|D]}|j|qdWPq4Wq"W|S)zm Return a list of dnf.Package objects that represent the debuginfo rpms to download. )r;epochversionreleasearch)r^r&sackquery availabler[Z debug_nameZsource_debug_namefilterintr`rarbrcadd)r r\dbg_pkgsqrAZdbg_name dbg_availableprrrr5s    z'DownloadCommand._get_pkg_objs_debuginfocCsht}|jjjj}xL|j|D]>}|j|jt|j |j |j |j d}x|D]}|j |qNWq"W|S)zo Return a list of dnf.Package objects that represent the debugsource rpms to download. )r;r`rarbrc)r^r&rdrerfr[rgZdebugsource_namerhr`rarbrcri)r r\rjrkrArlrmrrrr6s  z)DownloadCommand._get_pkg_objs_debugsourceFcCs|r |jn|j}g}x||D]t}y|j||Wqtjjk r}zlist itertoolschain)r r\r%funcZqueriespkg_specer@rrrr[s  "zDownloadCommand._get_packagesc Cs|j|}t|}x|D]}tj|jj}|j||j}|r^|j|j |j|j qt dg}t j dj|dd|Dt j tjj|jtjjqW|S)z-Get packages matching pkg_specs and the deps.zError in resolve of packages:z cSsg|] }t|qSr)rK)rDrArrrrEsz;DownloadCommand._get_packages_with_deps..)r[r^r8ZGoalr&rdZinstallrCupdateZ list_installsZ list_upgradesrrrqrQr+utilZ_format_resolve_problemsZ problem_rulesr=r>) r r\r%r@Zpkg_setrAZgoalZrcrBrrrrZs    z'DownloadCommand._get_packages_with_depscCszt}xj|D]b}|jr8|j|jtjdt||jq |jdkrZ|jd|j|jfq tj t dt|q Wt |S)z4Get list of source rpm names for a list of packages.z --> Package : %s Source : %srXz %s-%s.src.rpmzNo source rpm defined for %s) r^Z sourcerpmrirdebugrKrcr;Zevrinforrr)r@r_rArrrr] s   z$DownloadCommand._get_source_packagescCstjjj|d}|o|dk}|s8|jdrdtjj|rd|jj|g|jj j d}|jj j j |dStjj|}|j|jj |jjd }|j}|j d d }|jjr|j|jjd }t|jdkrtd |}tjj||S)z#Return a query to match a pkg_spec.rrrfilerz.rpm)rO)rA)Zwith_srcT)Zlatest_per_arch_by_priority)rczNo package %s available.)rrr|r)r+ZpycompZurlparseendswithr.rPisfiler&Zadd_remote_rpmsrNrOrdreZfiltermsubjectSubjectZget_best_queryr r%rfrrglenrCrr=rp)r rvr1Zis_urlr@subjrkrBrrrros     zDownloadCommand._get_querycCsd|dd}tjj|}x.|jD]"}|j|jjj}|r"|jSq"Wt d|}tj j |dS)z/Return a query to match a source rpm file name.NzNo package %s available.) r+rrZget_nevra_possibilitiesZto_queryr&rdrfZlatestrr=rp)r rvrZ nevra_objZ tmp_queryrBrrrrn,s    z!DownloadCommand._get_query_source)F)F)__name__ __module__ __qualname__aliasesrZsummaryr staticmethodr"r0rCr?r2r4r5r6r[rZr]rorn __classcell__rr)rrr%s   #!     r)Z __future__rrZdnfpluginscorerrZdnf.cli.option_parserrr+Zdnf.cliZdnf.exceptionsZdnf.i18nZ dnf.subjectZdnf.utilr8rsr.rUZpluginZregister_commandr ZCommandrrrrrs   __pycache__/repodiff.cpython-36.opt-1.pyc000064400000017063151030231510014135 0ustar003 gt`,@sjddlmZddlmZddlZddlmZddlZddlm Z Gdddej Z Gdd d ej j ZdS) )absolute_import)unicode_literalsN) OptionParser)_cs eZdZdZfddZZS)RepoDiffrepodiffcs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoDiffCommand)selfbasecli) __class__/usr/lib/python3.6/repodiff.pyr $szRepoDiff.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr src@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS)r rz1List differences between two sets of repositoriesc Cs|jddgddtdd|jddgdd td d|jd d d gtjdtdd|jdddtdd|jddtdd|jddtdd|jddtdddS)Nz --repo-oldz-oappendoldz2Specify old repository, can be used multiple times)defaultactiondesthelpz --repo-newz-nnewz2Specify new repository, can be used multiple timesz--archz --archlistz-aarcheszhSpecify architectures to compare, can be used multiple times. By default, only source rpms are compared.z--sizez-s store_truez5Output additional data about the size of the changes.)rrz--compare-archzMCompare packages also by arch. By default packages are compared just by name.z--simplez7Output a simple one line message for modified packages.z --downgradezNSplit the data for modified packages between upgraded and downgraded packages.) add_argumentrrZ_SplitCallback)parserrrr set_argparser/s         zRepoDiffCommand.set_argparsercCs|jj}d|_d|_d|_dg|jj_|jj s:|jj rNt d}t j j|x<|jjjD],}|j|jj |jj kr|jq\|jq\W|jjsdg|j_dS)NTallz*Both old and new repositories must be set.src)r demandsZsack_activationZavailable_repos changelogsr ZconfZdisable_excludesoptsrrrdnf exceptionsErrorZreposr"idenabledisabler)r r$msgZreporrr configureMs    zRepoDiffCommand.configurecCs|jjr|j|jfS|jS)N)r& compare_archrarch)r pkgrrr_pkgkey`s zRepoDiffCommand._pkgkeyc s6tfdd|Dtj}tfdd|Dtj}t}x:|j|dD]*}x$|j|jdD]}||j|<qlWqXWjjj}tfdd||Dfdd||D|ggd} xj|j |D]\} | } | } | j | j krq|| j | j d kr| d j | | fq| d j | | fqW| S) aNcompares packagesets old and new, returns dictionary with packages: added: only in new set removed: only in old set upgraded: in both old and new, new has bigger evr downgraded: in both old and new, new has lower evr obsoletes: dictionary of which old package is obsoleted by which new csg|]}j||fqSr)r2).0p)r rr msz-RepoDiffCommand._repodiff..csg|]}j||fqSr)r2)r3r4)r rrr5os) obsoletes)Zprovidescsg|] }|qSrr)r3k)new_drrr5zscsg|] }|qSrr)r3r7)old_drrr5{s)addedremovedr6upgraded downgradedrr=r<) dictsetkeysfilterr6r2r sackevr_cmp intersectionevrr) r rrZold_keysZnew_keysr6Z obsoleterZ obsoletedrCrr7pkg_oldpkg_newr)r8r9r r _repodiffes0    zRepoDiffCommand._repodiffc shfdddd}fdd}tddddd}x.pkgstrcSsXt|}|dkr.|djtjjj|j7}n&|dkrT|djtjjj| j7}|S)Nrz ({})z (-{}))rIformatr'r Z format_numberstrip)Znumr-rrrsizestrs z(RepoDiffCommand._report..sizestrcsBg}jjr*|jd||fn|jd|jd||f|jdt|d |jrv|jd}nd}x|jD]}|r|d|dkrPn2|d|dkr|d|dkr|d|dkrP|jd |djd tjj|dtjj|dfqWjj r0|jt d j |j |j t d j |dS)Nz%s -> %s-rZ timestampZauthortextz * %s %s %sz %a %b %d %YzSize change: {} bytes )r&Zsimplerlenr%Zstrftimer'Zi18nZucdsizerrKprintjoin)rFrGZmsgsZ old_chlogZchlog)rJr rrreport_modifieds2      z0RepoDiffCommand._report..report_modifiedr)r:r;r<r=r:zAdded package : {}r;zRemoved package: {}r6zObsoleted by : {}r<z Upgraded packagesr=z Downgraded packagesz Modified packagesz SummaryzAdded packages: {}zRemoved packages: {}zUpgraded packages: {}zDowngraded packages: {}zModified packages: {}zSize of added packages: {}zSize of removed packages: {}zSize of modified packages: {}zSize of upgraded packages: {}zSize of downgraded packages: {}zSize change: {}) r>sortedrVrrKrUgetr2r&Z downgraderT) r rrMrXZsizesr1Z obsoletedbyrFrGZmodifiedr)rJr r_reportsf                zRepoDiffCommand._reportcCs|jjjtjj|jjd}|jjjtjj|jjd}|jj rld|jj krl|j |jj d|j |jj d|jj r|j dd|j ddn|j dd|j dd|j |j |j |j||dS)N)Zreponame*)r0rP)Zlatest_per_arch)Zlatest)r rBZqueryhawkeyZIGNORE_EXCLUDESrAr&rrrZfiltermr/Zapplyr[rH)r Zq_newZq_oldrrrruns     zRepoDiffCommand.runN)r) rrraliasesrZsummary staticmethodr!r.r2rHr[r^rrrrr +s &ar )Z __future__rrZdnf.clir'Zdnf.cli.option_parserrr]ZdnfpluginscorerZPluginrr ZCommandr rrrrs     __pycache__/debug.cpython-36.opt-1.pyc000064400000025101151030231510013415 0ustar003 gt`1@sddlmZddlmZddlmZddlmZmZddlZddl Zddl Z ddl Z ddl Z ddl Z ddlZddlZdZGdddejZGd d d ejjZGd d d ejjZd dZddZddZdS))absolute_import)unicode_literals)ucd)_loggerNzdnf-debug-dump version 1 cs eZdZdZfddZZS)DebugdebugcsDtt|j||||_||_|jdk r@|jjt|jjtdS)N)superr__init__basecliZregister_commandDebugDumpCommandDebugRestoreCommand)selfr r ) __class__/usr/lib/python3.6/debug.pyr )s   zDebug.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr%srcsteZdZdZedZfddZddZeddZ d d Z d d Z d dZ ddZ ddZddZddZZS)r debug-dumpz5dump information about installed rpm packages to filecstt|j|d|_dS)N)r r r dump_file)rr )rrrr 7szDebugDumpCommand.__init__cCsd|jj_d|jj_dS)NT)r demandssack_activationavailable_repos)rrrr configure;s zDebugDumpCommand.configurecCs.|jdddtdd|jddtdd dS) Nz --norepos store_trueFz/do not attempt to dump the repository contents.)actiondefaulthelpfilename?zoptional name of dump file)nargsr!) add_argumentr)parserrrr set_argparser?s  zDebugDumpCommand.set_argparsercCs|jj}|s6tjdtjtj}dtjd|f}tjj|}|j dr\t j |d|_ n t |d|_ |jt|j|j|j|j|jj |j|j jttd|dS)z{create debug txt file and compress it, if no filename specified use dnf_debug_dump-.txt.gz by defaultz %Y-%m-%d_%Tzdnf_debug_dump-%s-%s.txt.gzz.gzwzOutput written to: %sN)optsr"timeZstrftimeZ localtimeosunamepathabspathendswithgzipGzipFileropenwrite DEBUG_VERSIONdump_system_infodump_dnf_config_infodump_rpm_problems dump_packagesZnoreposdump_rpmdb_versionscloseprintr)rr"ZnowrrrrunHs      zDebugDumpCommand.runcCs4tjjr t|jtjr t|d}tjj|j|dS)Nutf8) dnfZpycompZPY3 isinstancerr1r2bytesZ write_to_file)rmsgrrrr4as zDebugDumpCommand.writecCsX|jdtj}|jd|d|df|jdtj|jdtjjdddS) Nz%%%%SYSTEM INFO z uname: %s, %s z rpm ver: %s z python ver: %s  )r4r,r-rpm __version__sysversionreplace)rr-rrrr6fs  z!DebugDumpCommand.dump_system_infocCs|jjj}djdd|jjjD}|jd|jd|d|jd|d|jd |d |jd tjj |jd ||jd dj|jjj dS)N,cSsg|] }|jqSr)r).0prrr psz9DebugDumpCommand.dump_dnf_config_info..z %%%%DNF INFO z arch: %s archz basearch: %s Zbasearchz releasever: %s Z releaseverz dnf ver: %s z enabled plugins: %s z global excludes: %s ) r confZ substitutionsjoinZ_pluginspluginsr4r?constVERSION excludepkgs)rvarrSrrrr7ns  z%DebugDumpCommand.dump_dnf_config_infocCsP|jdt|j\}}|jdjdd|D|jdjdd|DdS)Nz%%%%RPMDB PROBLEMS rFcSs$g|]\}}dt|t|fqS)zPackage %s requires %s )r)rMreqpkgrrrrO}sz6DebugDumpCommand.dump_rpm_problems..cSs$g|]\}}dt|t|fqS)zPackage %s conflicts with %s )r)rMrQrYrrrrOs)r4 rpm_problemsr rR)rZmissing conflictsrrrr8zs   z"DebugDumpCommand.dump_rpm_problemsc Cs\|jjj}|jdx&t|jD]}|jdt|q$W|sFdS|jd|j}xt|jjj dddD]}yd}|j dk r|j }n*|j dk r|j }nt |j dkr|j d}|jd|j|f|jd d j|jx,t|j|jd D]}|jdt|qWWqrtjjk rR}z|jd |t|fwrWYdd}~XqrXqrWdS) Nz %%%%RPMDB z %s z %%%%REPOS cSs|jS)N)id)xrrrsz0DebugDumpCommand.dump_packages..)keyrz %%%s - %s z excludes: %s rL)ZreponamezError accessing repo %s: %s )r sackqueryr4sorted installedpkgspec availableZreposZ iter_enabledZmetalinkZ mirrorlistlenZbaseurlr\rRrVfilterr? exceptionsErrorstr) rZ load_reposqrNreZrepoZurlZpoerrrr9s2      zDebugDumpCommand.dump_packagescCs(|jd|jjj}|jd|dS)Nz%%%%RPMDB VERSIONS z all: %s )r4r r`Z_rpmdb_version)rrJrrrr:s  z$DebugDumpCommand.dump_rpmdb_versions)r)rrraliasesrsummaryr r staticmethodr'r=r4r6r7r8r9r:rrr)rrr 2s    r c@sPeZdZdZedZddZeddZddZ d d Z d d Z ed dZ dS)r debug-restorez,restore packages recorded in debug-dump filecCs4d|jj_d|jj_d|jj_|jjs0d|jj_dS)NT)r rrrZ root_userr*outputZ resolving)rrrrrs    zDebugRestoreCommand.configurecCs~|jddtdd|jddtdd|jddtdd|jd d d td d |jddtdd|jddtdddS)Nz--outputrz,output commands that would be run to stdout.)rr!z--install-latestz0Install the latest version of recorded packages.z --ignore-archz_Ignore architecture and install missing packages matching the name, epoch, version and release.z--filter-typesz[install, remove, replace]zinstall, remove, replacezlimit to specified type)metavarr r!z--remove-installonlyzqAllow removing of install-only packages. Using this option may result in an attempt to remove the running kernel.r"r(zname of dump file)r$r!)r%r)r&rrrr's$     z!DebugRestoreCommand.set_argparsercCsV|jjr$t|jjjddj|j_|j|jjd}|j||j|j||jdS)z Execute the command action here.rL rN) r* filter_typessetrKsplitread_dump_filer"process_installed process_dump)r dump_pkgsrrrr=s zDebugRestoreCommand.runc Cs|jjjj}|jj|}x|D]}d}t|}|j|j|jfd}|dk r|j |j |j f} | |krpd|| <q||kr~d}qd|j krd} nd} x|j D] } | || <qWnd}|r"d|j kr"||ks|jr"|jrtd|q"|jj|q"WdS)NFskipTrKremovez remove %s)r r`rarcZ_get_installonly_queryrdgetrrPepochrJreleasertkeysZremove_installonlyrqr<Zpackage_remove) rrzr*rcZinstallonly_pkgsrYZ pkg_removespecdumped_versionsZevrrZd_evrrrrrxs.    z%DebugRestoreCommand.process_installedc Csxt|jD]\}}|||f}xt|jD]\}}}||||f} | dkrRq0|jr^d} nd|} |jr| dkrd|| f} nt|| |||} | |jkr0|jrtd| | fq0y|jj | Wq0t j j k rt jtd| Yq0Xq0WqWdS)Nr{rF.installz%s%sz%s %szPackage %s is not available)rbrZ ignore_archZinstall_latest pkgtup2specrtrqr<r rr?rhZ MarkingErrorrerrorr) rrzr*narrlvrrrPpkg_specrrrrys&  z DebugRestoreCommand.process_dumpcCs|jdrtj|}nt|}t|jtkrFtjt d|t j j d}i}xp|D]h}t|}|rr|dkrTd}qT| s|ddkrP|j }tj|}d|j|j|jfi|j|j|jf<qTW|S) Nz.gzzBad dnf debug file: %sTz %%%%RPMDB Frrsr)r0r1r2r3rreadliner5rrrr?rhristriphawkeyZ split_nevra setdefaultrrPr~rJr)r"Zfobjr{ZpkgslinerZnevrarrrrws(    (z"DebugRestoreCommand.read_dump_fileN)rp) rrrrmrrnrror'r=rxryrwrrrrrs  #rcstjj|}|jjt}t}x@D]8|jfddjD|jfddjDq*Wfdd|D}fdd|D}||fS)Ncs2g|]*}t|dk rt|jd r|fqS)zsolvable:prereqmarkerzrpmlib()rj startswith)rMrX)rYrrrO:sz rpm_problems..csg|] }|fqSrr)rMrQ)rYrrrO=scs$g|]\}}j|ds||fqS))provides)rg)rMrXrY)allpkgsrrrO?scs$g|]\}}j|dr||fqS))r)rg)rMrQrY)rrrrOAs) r?r`Z _rpmdb_sackrarcruupdaterequiresr[)r Zrpmdbrr[Zmissing_requiresZexisting_conflictsr)rrYrrZ3s   rZcCst|j|j|j|j|jS)N)rrrPr~rJr)rYrrrrdFsrdcCs<|sdn d|jd}|dkr"dnd|}d|||||fS)NrFz.%srz%s:z %s-%s%s-%s%s)NrF)lstrip)rrPr~rJrrrlrrrrJsr)Z __future__rrZdnf.i18nrZdnfpluginscorerrr?Zdnf.clir1rr,rGrIr+r5ZPluginrr ZCommandr rrZrdrrrrrs&    w __pycache__/copr.cpython-36.opt-1.pyc000064400000050311151030231510013273 0ustar003 fZv@sddlmZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddlZddl m Z m Z ddlZddlmZddlmZddlZy$ddlmZmZmZmZddZWnLek rd d Zydd lmZWnek rd dZYnXYnXd Zee de dgZee de ddgZerdddl m!Z!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'n(ddl!m!Z!m"Z"m#Z#ddl(m%Z%m&Z&m'Z'ej)j*Gdddej+j,Z-ej)j*Gddde-Z.dS))print_functionN)_logger)PY3)ucd)nameversioncodenameos_release_attrcCstttfS)N)rrr r r /usr/lib/python3.6/copr.pylinux_distribution.sr cCsdS)Nr )rr r r r 1sr )r cCsrtd`}i}xF|D]>}y$|jjd\}}|jd||<Wqtk rPYqXqW|d|ddfSQRXdS)Nz/etc/os-release="NAMEZ VERSION_ID)openrstripsplitstrip ValueError)Zos_release_fileZos_release_datalineZos_release_keyZos_release_valuer r r r 7s   copryesynonr) ConfigParser NoOptionErrorNoSectionError)urlopen HTTPErrorURLErrorc@s eZdZdZdZdZdZdZdZedeZ d8Z e d Z d Z e d Zed d ZddZddZddZddZddZddZddZddZddZd d!Zd"d#Zed$d%Zd&d'Zd(d)Zd*d+Z d,d-Z!d.d/Z"d0d1Z#ed2d3Z$ed4d5Z%ed6d7Z&dS)9 CoprCommandz Copr plugin for DNF Nzcopr.fedorainfracloud.orgZfedoraZhttpsiz://rz Interact with Copr repositories.Ta enable name/project [chroot] disable name/project remove name/project list --installed/enabled/disabled list --available-by-user=NAME search project Examples: copr enable rhscl/perl516 epel-6-x86_64 copr enable ignatenkobrain/ocltoys copr disable rhscl/perl516 copr remove rhscl/perl516 copr list --enabled copr list --available-by-user=ignatenkobrain copr search tests c Cs|jddddddddgd |j}|jd d td d |jdd tdd |jdd tdd |jddtdd|jdtdd|jddddS)N subcommandhelpenabledisableremovelistsearch)nargschoicesz --installed store_truez.List all installed Copr repositories (default))actionr&z --enabledzList enabled Copr repositoriesz --disabledzList disabled Copr repositoriesz--available-by-userrz-List available Copr repositories by user NAME)metavarr&z--hubz(Specify an instance of Copr to work with)r&arg*)r,) add_argumentZadd_mutually_exclusive_groupr)parserZ list_optionr r r set_argparserps    zCoprCommand.set_argparsercCs|jjjjdkrdSd}t}g}|jjjd}tjj |t d}tjj |r|j ||j ||jddr|jddr|jdd}|jdd}||g|_n ddg|_xHtjtjj |t dD],}|jdrtjj |t d|} |j | qWg} t|jjr|jjdjd } t| d krV|jjrVtjtd td tjjtd nL|jj rt| d kr|j|_|j|_n t| d kr| d}n|jj}|rH|rHd|_|j t |dd|j!||dd} | rH|j!||d|j"} |j!||d|j#} | |_| d| |_t$| |j#krH|jd| 7_|jd| 7_|jsd|krr||_|j"d||_n|jddd|_||_dS)Nrrz.confmain distribution releaseverFz.d/zError: z^specify Copr hub either with `--hub` or using `copr_hub/copr_username/copr_projectname` formatzmultiple hubs specifiedT)reversehostnameprotocolportz://:r%)%cliZcommandoptsrbaseconfZpluginconfpathospathjoin PLUGIN_CONFisfileappendreadZ has_optionget chroot_configlistdirendswithlenr1rhubrcriticalrdnfCliErrordefault_hostname copr_hostname default_urlcopr_urlsorted_read_config_itemdefault_protocol default_portint)selfZcopr_hubZcopr_plugin_configZ config_filesZ config_pathZdefault_config_filer7r8filenameZ config_fileprojectr<r=r>r r r configuresl                 zCoprCommand.configurec Cs*y |j||Sttfk r$|SXdS)N)rKrr)r]configrPZsectiondefaultr r r rYs zCoprCommand._read_config_itemcCstjjdj|jdS)Nz{0} )sysstderrwriteformatr)r]textr r r _user_warning_before_promptsz'CoprCommand._user_warning_before_promptc Cs|jjd}|dkr&|jjj|dS|dkrl|jjrH|j|jjdS|j|jj j d|jj |jj dSy|jj d}WnLttfk rtjtdtd|jjj|tjjtdYnXy\|jj d}t|jj dkrtjjtd|jd |_t|jd kr$tjjtd Wn*tk rP|j}|jd |_YnX|d krj|j|dS|jd }t|dkrtjtdtdtjjtdnexactly two additional parameters to copr command are requiredr%zToo many arguments.-r:zOBad format of optional chroot. The format is distribution-version-architecture.r+r9zEuse format `copr_username/copr_projectname` to reference copr projectzbad copr project formatz{0}/_copr:{1}:{2}:{3}.repor'a Enabling a Copr repository. Please note that this repository is not part of the main distribution, and quality may vary. The Fedora Project does not exercise any power over the contents of this repository beyond the rules outlined in the Copr FAQ at , and packages are not held to any quality or security level. Please do not file bug reports about these packages in Fedora Bugzilla. In case of problems, contact the owner of this repository. z!Do you really want to enable {0}?z Repository successfully enabled.r(z!Repository successfully disabled.r)z Repository successfully removed.zUnknown subcommand {}.)rir:)(rAr$r@Z optparserZ print_helpZavailable_by_user_list_user_projects_list_installed_repositoriesrBrCZreposdirenabledZdisabledr1r IndexErrorrrQrrRrSrO exceptionsErrorr chroot_parts _guess_chroot_searchrf get_reposdirrU_sanitize_username _need_rootrF _ask_user_download_repoinfo_runtime_deps_warning _disable_repo _remove_repo) r]r$ project_namechrootr_ copr_usernamecopr_projectname repo_filenamerymsgr r r runs                     zCoprCommand.runcCs|jjdd}tjd|j|}|j|jko8tjd|}tjd|}|jjr`| rh| rhdSn|shdStjd|rxdStjd|rdS|j } | r|s| r|rdSd} tjd |r|jd d \} } } }| d| d|}ntjd|r2|jd d}|j d d djd dd} | d|dd|d}n.|jd d}|j d|dd|d}d} | sn|d7}| r||d7}t || S)Nr9r%z_copr:Z_copr_z_copr:|^_copr_zcopr:.*:.*:.*:mlz coprdep:.*Fzcopr:r?r:rjrirTz (disabled)z *) repofilerrematchrUrWrVrArPrmrsplitrTprint)r]repo_idrepo enabled_only disabled_only file_nameZ match_newZ match_oldZ match_anyrmold_reporrUZ copr_ownerZcopr_dirrZ copr_namer r r _list_repo_file8sB       zCoprCommand._list_repo_filecCsFd}x,|jjjD]\}}|j||||rd}qW|rBttddS)NFTz* These coprs have repo file with an old format that contains no information about Copr hub - the default one was assumed. Re-enable the project to fix this.)rBrepositemsrrr)r]Z directoryrrrrrr r r rlis z(CoprCommand._list_installed_repositoriesc Csdj|}|j|}|jj|dd}ytj|j}Wn*tk r`tj j t dj|YnX|j |t dj|}|j |xL|dD]@}dj||d}|d pt d } |jjjt|| }t|qWdS) Nz!/api_3/project/list?ownername={0}zw+)modez+Can't parse repositories for username '{}'.zList of {} coprsrz {0}/{1} : r descriptionzNo description given)rfrWrBr jsonloadsrJrrRrorpr_check_json_output_print_match_sectionoutput fmtKeyValFillrr) r]Z user_nameapi_pathurlres json_parse section_textitemrdescr r r rkss"    zCoprCommand._list_user_projectsc Csdj|}|j|}|jj|dd}ytj|j}Wn*tk r`tj j t dj|YnX|j |t dj|}|j |xJ|dD]>}dj|d}|d pt d } |jjjt|| }t|qWdS) Nz/api_3/project/search?query={}zw+)rzCan't parse search for '{}'.z Matched: {}rz{0} : Z full_namerzNo description given.)rfrWrBr rrrJrrRrorprrrrrrr) r]Zqueryrrrrrrrrr r r rss     zCoprCommand._searchcCs|jjj|}t|dS)N)rBrZ fmtSectionr)r]rgZ formattedr r r rsz CoprCommand._print_match_sectioncCsj|jstjjdd|_tjjdj|j|jjrf|jjj sb|jj j dj|dj|d rfdSdS)N Fz{0} z {} [y/N]: z {} [Y/n]: )rZdefaultyes_msgT) first_warningrcrdrerfrrBZ _promptWantedrCZassumenorZ userconfirm)r]ryrr r r _ask_user_no_raises  zCoprCommand._ask_user_no_raisecCs |j||stjjtddS)NzSafe and good answer. Exiting.)rrRrorpr)r]ryrr r r rws zCoprCommand._ask_usercCs tjdkrtjjtddS)Nrz/This command has to be run under the root user.)rDgeteuidrRrorpr)clsr r r rvs zCoprCommand._need_rootcs|jdks&ddks&ddkr,t|jjjd}tfdddDrd krbd |}n&d td krxd |}ndjd|}ndkrtj d}dkrdj|}ndjd|}nPdkrtj d}dkrdj|}ndjd|}nddj ddd}|S)z2 Guess which chroot is equivalent to this machine NrFr%Zbasearchcsg|] }|kqSr r ).0r)distr r sz-CoprCommand._guess_chroot..Fedora Fedora LinuxZRawhidezfedora-rawhide-ZrawhideZredhat_support_product_versionzfedora-{0}-{1}ZMageiaz%{distro_arch}ZCauldronzmageia-cauldron-{}zmageia-{0}-{1}ZopenSUSEz%{_target_cpu}Z Tumbleweedzopensuse-tumbleweed-{}zopensuse-leap-{0}-{1}zepel-%s-x86_64.)rr) rLr rBrC substitutionsanyr rfrpmZ expandMacror)r]Zdistarchr~r )rr rrs,        zCoprCommand._guess_chrootcCsdj|jdd}|jd}dj|||}y*t|j|}tjj|rRtj|Wn^t k rl}z|j dkrt dj|j||j t |}t jj|t d}|jjd} | r>tj| jd} tj| } |t d jdj|j|7}| jd r0|t d d jd d| d D7}|t dj|7}t jj|n|t dj|7}t jj|WYdd}~XnJtk r}z,t dj|j||jj}t jj|WYdd}~XnX|j} | jd} tjd| rtjj|jjjd| ddd}|j|j krR|j!dddj!|j"dj!dddj!ddj!dd} tjj| rRtj| t#|d.}|j$| x|j%D]} |j$| qrWWdQRXtj&|t'j(t'j)Bt'j*Bt'j+BdS) Nrjr%z%/coprs/{0}/repo/{1}/dnf.repo?arch={2}iz Request to {0} failed: {1} - {2}z+It wasn't possible to enable this project. zCopr-Error-Datazutf-8z1Repository '{0}' does not exist in project '{1}'.zavailable chrootsz Available repositories: z, css|]}dj|VqdS)z'{}'N)rf)rxr r r sz-CoprCommand._download_repo..z If you want to enable a non-default repository, use the following command: 'dnf copr enable {0} ' But note that the installed repo file will likely need a manual modification.zProject {0} does not exist.zFailed to connect to {0}: {1}z\[copr:rriz.repoz_copr:_coprrr?Zgroup_@wbrr),rFrqrfr rWrDrEexistsr)r!coderstrrRrorpZheadersrKbase64Z b64decodedecoderrr"reasonstrerrorreadlinerrrBrCrtrVreplacerUrre readlineschmodstatS_IRUSRS_IWUSRS_IRGRPS_IROTH)r]r}rZ short_chrootZarchrZresponseeZ error_msgZ error_dataZerror_data_decodedZ first_linerZold_repo_filenamefr r r rxsX           $   zCoprCommand._download_repocs|jjdd|jj|j|j||g}x(jjD]}|jdrJq:|j|q:W|s`dSt d}t j d|j dj fdd |D}|j|t d sx,|D]$}|jjjj||jjjd d iqWdS) a, In addition to the main copr repo (that has repo ID prefixed with `copr:`), the repofile might contain additional repositories that serve as runtime dependencies. This method informs the user about the additional repos and provides an option to disable them. T)rzcopr:NaMaintainer of the enabled Copr repository decided to make it dependent on other repositories. Such repositories are usually necessary for successful installation of RPMs from the main Copr repository (they provide runtime dependencies). Be aware that the note about quality and bug-reporting above applies here too, Fedora Project doesn't control the content. Please review the list: {0} These repositories have been enabled automatically.r%z cs*g|]"}djt|jj|ddqS)z){num:2}. [{repoid}] baseurl={baseurl}baseurl)Znumrepoidr)rfnextcfgZgetValue)rr)counterrr r r9sz5CoprCommand._runtime_deps_warning..z!Do you want to keep them enabled?rm0)rBresetZread_all_repos_get_copr_reporursections startswithrIr itertoolscountrfrFrrCwrite_raw_configfilerr)r]rrZ runtime_depsrryZdepr )rrr rzs*      z!CoprCommand._runtime_deps_warningc Csdj|jjddd|j||}||jjkrdj|j||}}||jjkrd|jj|jkr|jj|jjdd }y.|jdddjddd}||jkrdSWqtk rYqXndS|jj|S) Nzcopr:{0}:{1}:{2}r?r%rz{0}-{1}rr9rir) rfrUrrurBrrrrn)r]rrrrrUr r r rFs     zCoprCommand._get_copr_repocCst|j||}|s,tjjtdj|j||ytj|j Wn2t k rn}ztjjt |WYdd}~XnXdS)Nz&Failed to remove copr repo {0}/{1}/{2}) rrRrorprrfrUrDr)rOSErrorr)r]rrrrr r r r|\s zCoprCommand._remove_repocCsd|j||}|dkr,tjjtdj||x2|jjD]$}|jj j |j ||jj j ddiq8WdS)Nz!Failed to disable copr repo {}/{}rmr) rrRrorprrfrrrBrCrrr)r]rrrrr r r r{hs  zCoprCommand._disable_repoc Cs<ytj|j}Wn$tk r6tjjtddSX|S)z Wrapper around response from server check data and print nice error in case of some error (and return None) otherwise return json object. zUnknown response from server.N)rrrJrrRr@rSr)rrrr r r _get_datats zCoprCommand._get_datacCs"d|krtjjdj|ddS)Nerrorz{})rRrorprf)rZjson_objr r r rszCoprCommand._check_json_outputcCs&|ddkrdj|ddS|SdS)Nrrzgroup_{}r%)rf)rrr r r rus zCoprCommand._sanitize_username)r)'__name__ __module__ __qualname____doc__rLrTZ default_hubrZr[rValiasesrsummaryrusage staticmethodr5r`rYrhrrrlrkrsrrrw classmethodrvrrrxrzrr|r{rrrur r r r r#PsD  L_1   %82   r#c@sDeZdZdZdZedZdZddZddZ e d d Z d d Z d S)PlaygroundCommandz Playground plugin for DNF playgroundz$Interact with Playground repository.z [enable|disable|upgrade]c Cs0|j|jtdtddj|j}|jj|dd}|j|}|j|ddkrft j j tdx|d D]}d j|d |d }d j|jj j |jdd}yj||dkrwpdj|j||}|jj|dd}|j|}|j|od|ko|ddkr |j||Wqpt jjk r&YqpXqpWdS)Nz!Enabling a Playground repository.zDo you want to continue?z{0}/api/playground/list/zw+)rrokzUnknown response from server.rz{0}/{1}ZusernameZcoprnamez{}/_playground_{}.repor9rjZchrootsz{0}/api/coprs/{1}/detail/{2}/)rvrwrrfrWrBr rcloserRr@rSrCrtrrxrorp) r]r~Zapi_urlrrrr}rZoutput2r r r _cmd_enables8         zPlaygroundCommand._cmd_enablecCs6|jx(tjdj|jjjD]}|j|q WdS)Nz{}/_playground_*.repo)rvglobrfrBrCrtr|)r]rr r r _cmd_disableszPlaygroundCommand._cmd_disablecCs|jdddddgddS)Nr$r%r'r(upgrade)r,r-)r3)r4r r r r5szPlaygroundCommand.set_argparsercCstjjd|jjd}|j}|dkrB|j|tjt dn`|dkrb|j tjt dn@|dkr|j |j|tjt dntjjt d j |dS) Nz%Playground is temporarily unsupportedrr'z-Playground repositories successfully enabled.r(z.Playground repositories successfully disabled.rz-Playground repositories successfully updated.zUnknown subcommand {}.) rRrorprAr$rrrrryrrrf)r]r$r~r r r rs    zPlaygroundCommand.runN)r) rrrrrrrrrrrr5rr r r r rs  r)/Z __future__rrrrrDrZshutilrrcrZdnfpluginscorerrrRZ dnf.pycomprZdnf.i18nrrZdistrorrr r r ImportErrorplatformrGsetZYESZNOZ configparserrrrZurllib.requestr r!r"Zurllib2ZpluginZregister_commandr@ZCommandr#rr r r r sP      B__pycache__/repomanage.cpython-36.opt-1.pyc000064400000014573151030231510014460 0ustar003 fJ)@szddlmZddlmZddlmZmZddlZddlZddlZddl Z ddl Z Gdddej Z Gdddej jZdS) )absolute_import)unicode_literals)_loggerNcs eZdZdZfddZZS) RepoManage repomanagecs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoManageCommand)selfbasecli) __class__ /usr/lib/python3.6/repomanage.pyr $szRepoManage.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr src@s\eZdZdZedZddZddZddZe d d Z e d d Z d dZ e ddZ dS)r rz"Manage a directory of rpm packagescCs,|jj r(|jj r(|jjtjtjddS)N)stdoutstderr)optsverbosequietr Zredirect_loggerloggingZWARNINGINFO)r rrr pre_configure/szRepoManageCommand.pre_configurecCs0|jj r|jj r|jj|jj}d|_dS)NT)rrrr Zredirect_repo_progressdemandsZsack_activation)r rrrr configure3s zRepoManageCommand.configurec"s@jjr jjr tjjtdjjr@jjr@tjjtdjjr`jjr`tjjtdjj r|jj r|dj_i}i}i}t}t jj }yd}j j j |j jjjgd}|jjj j|tj jr>j jj jj}xH|D]@} | j|kr|j| j|j| jij| jgj| qWWntjjk rg} jjjd} t | dkrtjjtd j j!ddd j j"d d d yj j#| j j$j%d Wn0t&k rt'j(tdj)dj*| YnXYnXj j+j,t-j.dj/} dd| j0| j0|ddj/D} | j1x| D]~} | j2| j3f}||krx| ||kr||j| n | g||<j4| }||kr||jj5| nj5| g||<q@Wg}t}jjrxh|j6D]\\}}|||f}|| d}x6|D].}j4|}x||D]}|j|q WqWqWxb|j7D]V}t8|j6}|| d}x4|D],}x$||D]}|jt|jq|WqnWqJWjjr|xh|j6D]\\}}|||f}|d| }x6|D].}j4|}x||D]}|j|qWqWqWxb|j7D]V}t8|j6}|d| }x4|D],}x$||D]}|jt|jqTWqFWq"Wjjrxh|j6D]\\}}|||f}|d| }x6|D].}j4|}x||D]}|j|qWqWqWt}xb|j7D]V}t8|j6}|| d}x4|D],}x$||D]}|jt|jq2Wq$WqWxx|j7D]l}t8|j6}|d| }xJ|D]B}x:||D].}x&|jD]} | |kr|j9| qWqWqWqdWfdd| j0| j0|ddj/D}!||!}|j1jj:r$t;dj*|nx|D]} t;| q*WdS)Nz%Pass either --old or --new, not both!z)Pass either --oldonly or --new, not both!z)Pass either --old or --oldonly, not both!TZrepomanage_repo)Zbaseurlz.rpmrzNo files to process)sackreposF)Zload_system_repoZload_available_repos)progresszCould not open {}z, )flagscSsg|]}|qSrr).0xrrr osz)RepoManageCommand.run..)Z nevra_strict)Zpkg__neqcsg|]}j|qSr)_package_to_path)r$r%)r rrr&s)Zpkg__eq )s   __pycache__/repodiff.cpython-36.pyc000064400000017063151030231510013176 0ustar003 gt`,@sjddlmZddlmZddlZddlmZddlZddlm Z Gdddej Z Gdd d ej j ZdS) )absolute_import)unicode_literalsN) OptionParser)_cs eZdZdZfddZZS)RepoDiffrepodiffcs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoDiffCommand)selfbasecli) __class__/usr/lib/python3.6/repodiff.pyr $szRepoDiff.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr src@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS)r rz1List differences between two sets of repositoriesc Cs|jddgddtdd|jddgdd td d|jd d d gtjdtdd|jdddtdd|jddtdd|jddtdd|jddtdddS)Nz --repo-oldz-oappendoldz2Specify old repository, can be used multiple times)defaultactiondesthelpz --repo-newz-nnewz2Specify new repository, can be used multiple timesz--archz --archlistz-aarcheszhSpecify architectures to compare, can be used multiple times. By default, only source rpms are compared.z--sizez-s store_truez5Output additional data about the size of the changes.)rrz--compare-archzMCompare packages also by arch. By default packages are compared just by name.z--simplez7Output a simple one line message for modified packages.z --downgradezNSplit the data for modified packages between upgraded and downgraded packages.) add_argumentrrZ_SplitCallback)parserrrr set_argparser/s         zRepoDiffCommand.set_argparsercCs|jj}d|_d|_d|_dg|jj_|jj s:|jj rNt d}t j j|x<|jjjD],}|j|jj |jj kr|jq\|jq\W|jjsdg|j_dS)NTallz*Both old and new repositories must be set.src)r demandsZsack_activationZavailable_repos changelogsr ZconfZdisable_excludesoptsrrrdnf exceptionsErrorZreposr"idenabledisabler)r r$msgZreporrr configureMs    zRepoDiffCommand.configurecCs|jjr|j|jfS|jS)N)r& compare_archrarch)r pkgrrr_pkgkey`s zRepoDiffCommand._pkgkeyc s6tfdd|Dtj}tfdd|Dtj}t}x:|j|dD]*}x$|j|jdD]}||j|<qlWqXWjjj}tfdd||Dfdd||D|ggd} xj|j |D]\} | } | } | j | j krq|| j | j d kr| d j | | fq| d j | | fqW| S) aNcompares packagesets old and new, returns dictionary with packages: added: only in new set removed: only in old set upgraded: in both old and new, new has bigger evr downgraded: in both old and new, new has lower evr obsoletes: dictionary of which old package is obsoleted by which new csg|]}j||fqSr)r2).0p)r rr msz-RepoDiffCommand._repodiff..csg|]}j||fqSr)r2)r3r4)r rrr5os) obsoletes)Zprovidescsg|] }|qSrr)r3k)new_drrr5zscsg|] }|qSrr)r3r7)old_drrr5{s)addedremovedr6upgraded downgradedrr=r<) dictsetkeysfilterr6r2r sackevr_cmp intersectionevrr) r rrZold_keysZnew_keysr6Z obsoleterZ obsoletedrCrr7pkg_oldpkg_newr)r8r9r r _repodiffes0    zRepoDiffCommand._repodiffc shfdddd}fdd}tddddd}x.pkgstrcSsXt|}|dkr.|djtjjj|j7}n&|dkrT|djtjjj| j7}|S)Nrz ({})z (-{}))rIformatr'r Z format_numberstrip)Znumr-rrrsizestrs z(RepoDiffCommand._report..sizestrcsBg}jjr*|jd||fn|jd|jd||f|jdt|d |jrv|jd}nd}x|jD]}|r|d|dkrPn2|d|dkr|d|dkr|d|dkrP|jd |djd tjj|dtjj|dfqWjj r0|jt d j |j |j t d j |dS)Nz%s -> %s-rZ timestampZauthortextz * %s %s %sz %a %b %d %YzSize change: {} bytes )r&Zsimplerlenr%Zstrftimer'Zi18nZucdsizerrKprintjoin)rFrGZmsgsZ old_chlogZchlog)rJr rrreport_modifieds2      z0RepoDiffCommand._report..report_modifiedr)r:r;r<r=r:zAdded package : {}r;zRemoved package: {}r6zObsoleted by : {}r<z Upgraded packagesr=z Downgraded packagesz Modified packagesz SummaryzAdded packages: {}zRemoved packages: {}zUpgraded packages: {}zDowngraded packages: {}zModified packages: {}zSize of added packages: {}zSize of removed packages: {}zSize of modified packages: {}zSize of upgraded packages: {}zSize of downgraded packages: {}zSize change: {}) r>sortedrVrrKrUgetr2r&Z downgraderT) r rrMrXZsizesr1Z obsoletedbyrFrGZmodifiedr)rJr r_reportsf                zRepoDiffCommand._reportcCs|jjjtjj|jjd}|jjjtjj|jjd}|jj rld|jj krl|j |jj d|j |jj d|jj r|j dd|j ddn|j dd|j dd|j |j |j |j||dS)N)Zreponame*)r0rP)Zlatest_per_arch)Zlatest)r rBZqueryhawkeyZIGNORE_EXCLUDESrAr&rrrZfiltermr/Zapplyr[rH)r Zq_newZq_oldrrrruns     zRepoDiffCommand.runN)r) rrraliasesrZsummary staticmethodr!r.r2rHr[r^rrrrr +s &ar )Z __future__rrZdnf.clir'Zdnf.cli.option_parserrr]ZdnfpluginscorerZPluginrr ZCommandr rrrrs     __pycache__/builddep.cpython-36.opt-1.pyc000064400000016426151030231510014131 0ustar003 f$@sddlmZddlmZddlmZmZddlZddlZddlZddl Zddl Zddl Zddl Z ddlZddlZddlZddlZejjGdddejjZdS))absolute_import)unicode_literals)_loggerNcseZdZdZdZeeZedZfddZddZ d d Z e d d Z d dZ ddZddZe ddZddZddZddZddZZS)BuildDepCommandbuilddep build-depz3Install build dependencies for package or spec filez[PACKAGE|PACKAGE.spec]cs(tt|j|tjjj|_g|_dS)N) superr__init__dnfrpmZ transactionZinitReadOnlyTransaction_rpm_tstempdirs)selfcli) __class__/usr/lib/python3.6/builddep.pyr /szBuildDepCommand.__init__cCsx|jD]}tj|qWdS)N)rshutilZrmtree)rtemp_dirrrr__del__4s zBuildDepCommand.__del__cCstjjj|}|ddkr |jStjj}tjdd}t jj |t jj |}|j j |t|d}zFy|j|jjj||jWn$tk r}zWYdd}~XnXWd|jX|S) z In case pkgspec is a remote URL, download it to a temporary location and use the temporary file instead. rfileZ dnf_builddep_)prefixzwb+N)rr)r ZpycompZurlparsepathlibdnfZrepoZ DownloadertempfileZmkdtemposjoinbasenamerappendopenZ downloadURLbaseZconfZ_configfileno RuntimeErrorclose)rpkgspeclocationZ downloaderrZ temp_fileZtemp_foexrrr_download_remote_file8s      z%BuildDepCommand._download_remote_filec Csdd}|jdddtdd|jdd d gd |td d |jdddtdd|j}|jddtdd|jddtdddS)NcSs:|r|jddng}t|dkr6td|}tj||S)Nz&'%s' is not of the format 'MACRO EXPR')splitlenrargparseZArgumentTypeError)argZarglistmsgrrr macro_defRs    z0BuildDepCommand.set_argparser..macro_defpackages+packagez"packages with builddeps to install)nargsmetavarhelpz-Dz--definer z 'MACRO EXPR'z$define a macro for spec file parsing)actiondefaultr6typer7z--skip-unavailable store_trueFz5skip build dependencies not available in repositories)r8r9r7z--specz)treat commandline arguments as spec files)r8r7z--srpmz)treat commandline arguments as source rpm) add_argumentrZadd_mutually_exclusive_group)parserr1Zptyperrr set_argparserPs       zBuildDepCommand.set_argparsercCs|jjsd|j_dS)Nerror)optsZ rpmverbosity)rrrr pre_configurefszBuildDepCommand.pre_configurecCsr|jj}d|_d|_d|_d|_|jjp.|jjsnx<|jj D]0}|j dpZ|j dpZ|j ds:|j j j Pq:WdS)NTz.src.rpmz .nosrc.rpmz.spec)rdemandsZavailable_reposZ resolvingZ root_userZsack_activationr@specsrpmr2endswithr"ZreposZenable_source_repos)rrBr&rrr configurejs    zBuildDepCommand.configurecCs\tjjj|j}x$|jjD]}tj|d|dqWd}x|jj D]}|j |}yl|jj rh|j |nT|jj r||j|n@|jds|jdr|j |n |jdr|j|n |j|WqDtjjk r}z:x$|jD]}tjtdj|qWtj|d}WYdd}~XqDXqDWx |jjD]}tj|dq*W|rXtjjtd dS) Nrr*Fz.src.rpmz nosrc.rpmz.speczRPM: {}Tz!Some packages could not be found.)r ZyumZrpmtransZRPMTransactionr"r@definer ZaddMacror2r)rD _src_depsrC _spec_depsrE _remote_deps exceptionsErrorZmessagesrr?rformatZdelMacro)rZrpmlogZmacroZ pkg_errorsr&elinerrrrunzs2       zBuildDepCommand.runcCs|jddS)Nr+)ZDNEVR)Zrpm_deprrr_rpm_dep2reldep_strsz#BuildDepCommand._rpm_dep2reldep_strcCstjj|jj}|j|d|j}| rX|jdrXtjj|jj}|j|d|j}| r|jd rtd}t j |||j j dkS|r|jj |}|rx|D]}tjj|qW|jjj|dddS) N)Zprovides/)r(z$No matching package to install: '%s'TF)ZselectZoptional)r ZselectorZSelectorr"sacksetZmatches startswithrrwarningr@Zskip_unavailableZ_sltr_matches_installedZ_msg_installedZ_goalZinstall)r reldep_strZsltrfoundr0Z already_instr4rrr_installs$      zBuildDepCommand._installc Cstj|tj}y|jj|}WnRtjk rp}z4t|dkrJtd|}tj |t j j |WYdd}~XnXtj ||j d}d}x0|D](}|j|}|jdrq||j|M}qW|std} t j j | |jjrtjtddS)Nzerror reading package headerz2Failed to open: '%s', not a valid source rpm file.Z requirenameTzrpmlib(zNot all dependencies satisfiedzJWarning: -D or --define arguments have no meaning for source rpm packages.)rr!O_RDONLYr Z hdrFromFdnor r?strrr%r rKrLZ dsFromHeaderrQrVrZr@rGrrW) rZsrc_fnfdhrNdsdonedeprXerrrrrrHs*         zBuildDepCommand._src_depsc Csytj|}Wn>tk rL}z"td||f}tjj|WYdd}~XnXd}x.tj|jdD]}|j |}||j |M}qbW|std}tjj|dS)Nz/Failed to open: '%s', not a valid spec file: %sTrequireszNot all dependencies satisfied) r rC ValueErrorrr rKrLr_Z sourceHeaderrQrZ) rZspec_fnrCr(r0r`rarXrbrrrrIs  zBuildDepCommand._spec_depsc Cstjj|j|jjjdd}tdd|D}|jjjj j||gddj j }|sptj j td|d}x.|D]&}x |jD]}||jt|M}qWqzW|std}tj j |dS) Nsrc)Z arch__neqcSsh|] }|jqSr)Z source_name).0pkgrrr sz/BuildDepCommand._remote_deps..)nameZarchzno package matched: %sTzNot all dependencies satisfied)r ZsubjectZSubjectZget_best_queryr"rTfilterlistZquery availableZlatestrPrKrLrrcrZr\) rr4rlZ sourcenamesZpkgsr`rgZreqrbrrrrJs   zBuildDepCommand._remote_deps)rr)__name__ __module__ __qualname__aliasesr0rZsummaryZusager rr) staticmethodr>rArFrPrQrZrHrIrJ __classcell__rr)rrr's   ! r)Z __future__rrZdnfpluginscorerrr.r Zdnf.cliZdnf.exceptionsZdnf.rpm.transactionZdnf.yum.rpmtransZ libdnf.reporrr rrZpluginZregister_commandrZCommandrrrrrs  __pycache__/download.cpython-36.pyc000064400000022456151030231510013211 0ustar003 gt`*0@sddlmZddlmZddlmZmZddlmZddlZddl Zddl Zddl Zddl Zddl ZddlZddlZddlZddlZejjGdddejjZdS))absolute_import)unicode_literals)_logger) OptionParserNcseZdZdgZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZddZd ddZd!ddZeddZddZddZZS)"DownloadCommandZdownloadz%Download package to current directorycs tt|j|d|_d|_dS)N)superr__init__optsparser)selfcli) __class__/usr/lib/python3.6/download.pyr +szDownloadCommand.__init__c Cs|jddtdd|jddtdd|jd dtd d|jd dtd d|jd ddgtjdtdd|jddtdd|jddtdd|jddddtdd|jddddd d!ggtd"d#dS)$Npackages+zpackages to download)nargshelpz--source store_truezdownload the src.rpm instead)actionrz --debuginfoz'download the -debuginfo package insteadz --debugsourcez)download the -debugsource package insteadz--archz --archlistarchesz[arch]z5limit the query to packages of given architectures.)destdefaultrmetavarrz --resolvez(resolve and download needed dependenciesz --alldepsz^when running with --resolve, download all dependencies (do not exclude already installed ones)z--urlz--urlsurlzJprint list of urls where the rpms can be downloaded instead of downloading)rrrz--urlprotocolsappendhttphttpsZrsyncftpz4when running with --url, limit to specific protocols)rchoicesrr) add_argumentrrZ_SplitCallback)r rrr set_argparser0s*           zDownloadCommand.set_argparsercCs|jj}d|_d|_|jjr*|jjr*d|_|jjr>|j j j |jj sN|jj rZ|j j j|jjrr|jj|j j_ntjjtj|j j_dS)NTF)r demandsZsack_activationZavailable_reposr resolveZalldepsZload_system_reposourcebaseZreposZenable_source_repos debuginfo debugsourceZenable_debug_reposdestdirconfdnfi18nucdosgetcwd)r r#rrr configureKs  zDownloadCommand.configurecCs|jj r.|jj r.|jj r.|j|jj}nXg}|jjrN|j|j|jj|jjrj|j|j|jj|jjr|j|j |jj|jj rxd|D]\}|j t j kr|j|jjd}|rt|qtd|j}|jjjrtjj|tj|qWdS|j|dS)zExecute the util action here.)schemesz$Failed to get mirror for package: %sN)r r%r'r(_get_pkg_objs_rpmsrextend_get_pkg_objs_source_get_pkg_objs_debuginfo_get_pkg_objs_debugsourcerrepoidhawkeyCMDLINE_REPO_NAMEZremote_locationZ urlprotocolsprintrnamer&r*strictr+ exceptionsErrorrZwarning _do_downloads)r pkgspkgrmsgrrrrun^s.        zDownloadCommand.runc Csi}x"|D]}|jt|gj|q Wg}g}xP|jD]D}dd|D}|r`|j|dq:|jddd|j|dq:W|r|jj||jjj|rx^|D]V}|j }t j j |jj jt j j|} t j j| rt j j|| rqtj||jj jqWtdd||D} | S)z= Perform the download for a list of packages cSsg|]}|jtjkr|qSr)r7r8r9).0rArrr sz1DownloadCommand._do_downloads..rcSs|jj|jjfS)N)ZrepoZpriorityZcost)xrrrsz/DownloadCommand._do_downloads..)keycSsg|] }|jqSr)localPkg)rDrArrrrEs) setdefaultstrrvaluessortr&Zdownload_packagesoutputprogressrIr.pathjoinr*r)basenameexistssamefileshutilcopysorted) r r@Zpkg_dictrAZ to_downloadZcmdlineZpkg_listZ pkgs_cmdlinesrcdstZ locationsrrrr?s.  zDownloadCommand._do_downloadscCs"|jjr|j|}n |j|}|S)zc Return a list of dnf.Package objects that represent the rpms to download. )r r$_get_packages_with_deps _get_packages)r pkg_specsr@rrrr2s  z"DownloadCommand._get_pkg_objs_rpmscCs*|j|}|j|}t|j|dd}|S)zj Return a list of dnf.Package objects that represent the source rpms to download. T)r%)r2_get_source_packagessetr[)r r\r@ source_pkgsrrrr4s  z$DownloadCommand._get_pkg_objs_sourcec Cst}|jjjj}xh|j|D]Z}xT|j|jgD]D}|j|t |j |j |j |j d}|s^q4x|D]}|j|qdWPq4Wq"W|S)zm Return a list of dnf.Package objects that represent the debuginfo rpms to download. )r;epochversionreleasearch)r^r&sackquery availabler[Z debug_nameZsource_debug_namefilterintr`rarbrcadd)r r\dbg_pkgsqrAZdbg_name dbg_availableprrrr5s    z'DownloadCommand._get_pkg_objs_debuginfocCsht}|jjjj}xL|j|D]>}|j|jt|j |j |j |j d}x|D]}|j |qNWq"W|S)zo Return a list of dnf.Package objects that represent the debugsource rpms to download. )r;r`rarbrc)r^r&rdrerfr[rgZdebugsource_namerhr`rarbrcri)r r\rjrkrArlrmrrrr6s  z)DownloadCommand._get_pkg_objs_debugsourceFcCs|r |jn|j}g}x||D]t}y|j||Wqtjjk r}zlist itertoolschain)r r\r%funcZqueriespkg_specer@rrrr[s  "zDownloadCommand._get_packagesc Cs|j|}t|}x|D]}tj|jj}|j||j}|r^|j|j |j|j qt dg}t j dj|dd|Dt j tjj|jtjjqW|S)z-Get packages matching pkg_specs and the deps.zError in resolve of packages:z cSsg|] }t|qSr)rK)rDrArrrrEsz;DownloadCommand._get_packages_with_deps..)r[r^r8ZGoalr&rdZinstallrCupdateZ list_installsZ list_upgradesrrrqrQr+utilZ_format_resolve_problemsZ problem_rulesr=r>) r r\r%r@Zpkg_setrAZgoalZrcrBrrrrZs    z'DownloadCommand._get_packages_with_depscCszt}xj|D]b}|jr8|j|jtjdt||jq |jdkrZ|jd|j|jfq tj t dt|q Wt |S)z4Get list of source rpm names for a list of packages.z --> Package : %s Source : %srXz %s-%s.src.rpmzNo source rpm defined for %s) r^Z sourcerpmrirdebugrKrcr;Zevrinforrr)r@r_rArrrr] s   z$DownloadCommand._get_source_packagescCstjjj|d}|o|dk}|s8|jdrdtjj|rd|jj|g|jj j d}|jj j j |dStjj|}|j|jj |jjd }|j}|j d d }|jjr|j|jjd }t|jdkrtd |}tjj||S)z#Return a query to match a pkg_spec.rrrfilerz.rpm)rO)rA)Zwith_srcT)Zlatest_per_arch_by_priority)rczNo package %s available.)rrr|r)r+ZpycompZurlparseendswithr.rPisfiler&Zadd_remote_rpmsrNrOrdreZfiltermsubjectSubjectZget_best_queryr r%rfrrglenrCrr=rp)r rvr1Zis_urlr@subjrkrBrrrros     zDownloadCommand._get_querycCsd|dd}tjj|}x.|jD]"}|j|jjj}|r"|jSq"Wt d|}tj j |dS)z/Return a query to match a source rpm file name.NzNo package %s available.) r+rrZget_nevra_possibilitiesZto_queryr&rdrfZlatestrr=rp)r rvrZ nevra_objZ tmp_queryrBrrrrn,s    z!DownloadCommand._get_query_source)F)F)__name__ __module__ __qualname__aliasesrZsummaryr staticmethodr"r0rCr?r2r4r5r6r[rZr]rorn __classcell__rr)rrr%s   #!     r)Z __future__rrZdnfpluginscorerrZdnf.cli.option_parserrr+Zdnf.cliZdnf.exceptionsZdnf.i18nZ dnf.subjectZdnf.utilr8rsr.rUZpluginZregister_commandr ZCommandrrrrrs   __pycache__/universal_hooks.cpython-36.pyc000064400000013564151030231510014615 0ustar003 "h@sddlZddlZddlZddlZddlmZddlZddlZddlZddlZddl m Z ej e Z e jeje jejejGddde ZGdddejdZGd d d eZeZdd d ZGdddejdZGdddeZddZddZddZdS)N)path)PlugincsPeZdZdZfddZddZddZdd Zd d Zd d Z ddZ Z S)UniversalHooksPluginzuniversal-hookscstj||d|_dS)Nz/etc/dnf/universal-hooks)super__init__ hook_root)selfbaseZcli) __class__%/usr/lib/python3.6/universal_hooks.pyr,szUniversalHooksPlugin.__init__cCsttj|j|jjtdS)N)_run_dirrjoinr pre_config__name__LOG)rr r r r0szUniversalHooksPlugin.pre_configcCsttj|j|jjtdS)N)r rrrconfigrr)rr r r r3szUniversalHooksPlugin.configcCsttj|j|jjtdS)N)r rrrresolvedrr)rr r r r6szUniversalHooksPlugin.resolvedcCsttj|j|jjtdS)N)r rrrsackrr)rr r r r9szUniversalHooksPlugin.sackcCs8|jj}t|jt|t|jjtt j |j|tdS)N) pre_transactionr _run_pkg_dirsrrDnfTransactionInfor transactionr rr)rnamer r r r<sz$UniversalHooksPlugin.pre_transactioncCs8|jj}t|jt|t|jjttj |j|tdS)N) rrrrrrr r rr)rrr r r rAsz UniversalHooksPlugin.transaction) r __module__ __qualname__rrrrrrrr __classcell__r r )r r r)s rc@sDeZdZejddZejddZejddZejddZd S) FileSystemcCsdS)Nr )rpathnamer r r globHszFileSystem.globcCsdS)Nr )rrr r r isdirLszFileSystem.isdircCsdS)Nr )rrmoder r r accessPszFileSystem.accesscCsdS)Nr )rr!encodingr r r NamedTemporaryFileTszFileSystem.NamedTemporaryFileN) rrrabcabstractmethodrr r"r$r r r r rGsr) metaclassc@s,eZdZddZddZddZddZd S) RealFileSystemcCs tj|S)N)r)rrr r r rZszRealFileSystem.globcCs tj|S)N)rr )rrr r r r ]szRealFileSystem.isdircCs tj||S)N)osr")rrr!r r r r"`szRealFileSystem.accesscCstj||dS)N)r!r#)tempfiler$)rr!r#r r r r$csz!RealFileSystem.NamedTemporaryFileN)rrrrr r"r$r r r r r(Ysr(cCstj|sdSxxttj|dD]b}tj|r2q"tj|tjrx|d|}tj|dd}d|j kr|j d||j q"|j d|q"WdS)Nz/* T)shellrz!!! %s did not exit cleanly: %dz!!! %s is not executable) fsr sortedrr"r)X_OK subprocessZrun returncodeerror)Zhook_dirlogargsZscriptZcmdlineZ completedr r r r js   r c@seZdZejddZdS)TransactionInfocCsdS)Nr )rr r r getMembers|szTransactionInfo.getMembersN)rrrr%r&r7r r r r r6{sr6c@s"eZdZddddZddZdS)rN)returncCs ||_dS)N)r)rrr r r rszDnfTransactionInfo.__init__cCs|jS)N)r)rr r r r7szDnfTransactionInfo.getMembers)rrrrr7r r r r rsrcCstj|d|}t|}i}tjddd}i}|j} xtt| dddD]h} | j} | |kr`qLd|| <|j | d t tj|d | ||x&|j D]\} } | j | rd|| <qWqLW|j x&|D]} t tj|| |d |jqWWd QRXd S) zu :param str base_dir: :param logging.Logger log: :param str slot: :param TransactionInfo tinfo: Z multi_pkgswzutf-8)r!r#cSs|jS)N)r)mr r r sz_run_pkg_dirs..)key Zpkgsz --pkg_list=N)rr_make_dir_matchersr.r$r7r/setrwriter itemssearchflush)Zbase_dirr4ZslotZtinfoZ wildcard_path dir_matchersZwildcard_to_runZ temp_pkg_fileZ members_seenmembersmemberZpkgZ wildcard_dirZmatcherr r r rs&   rcCsFi}xs,     & __pycache__/builddep.cpython-36.pyc000064400000016426151030231510013172 0ustar003 f$@sddlmZddlmZddlmZmZddlZddlZddlZddl Zddl Zddl Zddl Z ddlZddlZddlZddlZejjGdddejjZdS))absolute_import)unicode_literals)_loggerNcseZdZdZdZeeZedZfddZddZ d d Z e d d Z d dZ ddZddZe ddZddZddZddZddZZS)BuildDepCommandbuilddep build-depz3Install build dependencies for package or spec filez[PACKAGE|PACKAGE.spec]cs(tt|j|tjjj|_g|_dS)N) superr__init__dnfrpmZ transactionZinitReadOnlyTransaction_rpm_tstempdirs)selfcli) __class__/usr/lib/python3.6/builddep.pyr /szBuildDepCommand.__init__cCsx|jD]}tj|qWdS)N)rshutilZrmtree)rtemp_dirrrr__del__4s zBuildDepCommand.__del__cCstjjj|}|ddkr |jStjj}tjdd}t jj |t jj |}|j j |t|d}zFy|j|jjj||jWn$tk r}zWYdd}~XnXWd|jX|S) z In case pkgspec is a remote URL, download it to a temporary location and use the temporary file instead. rfileZ dnf_builddep_)prefixzwb+N)rr)r ZpycompZurlparsepathlibdnfZrepoZ DownloadertempfileZmkdtemposjoinbasenamerappendopenZ downloadURLbaseZconfZ_configfileno RuntimeErrorclose)rpkgspeclocationZ downloaderrZ temp_fileZtemp_foexrrr_download_remote_file8s      z%BuildDepCommand._download_remote_filec Csdd}|jdddtdd|jdd d gd |td d |jdddtdd|j}|jddtdd|jddtdddS)NcSs:|r|jddng}t|dkr6td|}tj||S)Nz&'%s' is not of the format 'MACRO EXPR')splitlenrargparseZArgumentTypeError)argZarglistmsgrrr macro_defRs    z0BuildDepCommand.set_argparser..macro_defpackages+packagez"packages with builddeps to install)nargsmetavarhelpz-Dz--definer z 'MACRO EXPR'z$define a macro for spec file parsing)actiondefaultr6typer7z--skip-unavailable store_trueFz5skip build dependencies not available in repositories)r8r9r7z--specz)treat commandline arguments as spec files)r8r7z--srpmz)treat commandline arguments as source rpm) add_argumentrZadd_mutually_exclusive_group)parserr1Zptyperrr set_argparserPs       zBuildDepCommand.set_argparsercCs|jjsd|j_dS)Nerror)optsZ rpmverbosity)rrrr pre_configurefszBuildDepCommand.pre_configurecCsr|jj}d|_d|_d|_d|_|jjp.|jjsnx<|jj D]0}|j dpZ|j dpZ|j ds:|j j j Pq:WdS)NTz.src.rpmz .nosrc.rpmz.spec)rdemandsZavailable_reposZ resolvingZ root_userZsack_activationr@specsrpmr2endswithr"ZreposZenable_source_repos)rrBr&rrr configurejs    zBuildDepCommand.configurecCs\tjjj|j}x$|jjD]}tj|d|dqWd}x|jj D]}|j |}yl|jj rh|j |nT|jj r||j|n@|jds|jdr|j |n |jdr|j|n |j|WqDtjjk r}z:x$|jD]}tjtdj|qWtj|d}WYdd}~XqDXqDWx |jjD]}tj|dq*W|rXtjjtd dS) Nrr*Fz.src.rpmz nosrc.rpmz.speczRPM: {}Tz!Some packages could not be found.)r ZyumZrpmtransZRPMTransactionr"r@definer ZaddMacror2r)rD _src_depsrC _spec_depsrE _remote_deps exceptionsErrorZmessagesrr?rformatZdelMacro)rZrpmlogZmacroZ pkg_errorsr&elinerrrrunzs2       zBuildDepCommand.runcCs|jddS)Nr+)ZDNEVR)Zrpm_deprrr_rpm_dep2reldep_strsz#BuildDepCommand._rpm_dep2reldep_strcCstjj|jj}|j|d|j}| rX|jdrXtjj|jj}|j|d|j}| r|jd rtd}t j |||j j dkS|r|jj |}|rx|D]}tjj|qW|jjj|dddS) N)Zprovides/)r(z$No matching package to install: '%s'TF)ZselectZoptional)r ZselectorZSelectorr"sacksetZmatches startswithrrwarningr@Zskip_unavailableZ_sltr_matches_installedZ_msg_installedZ_goalZinstall)r reldep_strZsltrfoundr0Z already_instr4rrr_installs$      zBuildDepCommand._installc Cstj|tj}y|jj|}WnRtjk rp}z4t|dkrJtd|}tj |t j j |WYdd}~XnXtj ||j d}d}x0|D](}|j|}|jdrq||j|M}qW|std} t j j | |jjrtjtddS)Nzerror reading package headerz2Failed to open: '%s', not a valid source rpm file.Z requirenameTzrpmlib(zNot all dependencies satisfiedzJWarning: -D or --define arguments have no meaning for source rpm packages.)rr!O_RDONLYr Z hdrFromFdnor r?strrr%r rKrLZ dsFromHeaderrQrVrZr@rGrrW) rZsrc_fnfdhrNdsdonedeprXerrrrrrHs*         zBuildDepCommand._src_depsc Csytj|}Wn>tk rL}z"td||f}tjj|WYdd}~XnXd}x.tj|jdD]}|j |}||j |M}qbW|std}tjj|dS)Nz/Failed to open: '%s', not a valid spec file: %sTrequireszNot all dependencies satisfied) r rC ValueErrorrr rKrLr_Z sourceHeaderrQrZ) rZspec_fnrCr(r0r`rarXrbrrrrIs  zBuildDepCommand._spec_depsc Cstjj|j|jjjdd}tdd|D}|jjjj j||gddj j }|sptj j td|d}x.|D]&}x |jD]}||jt|M}qWqzW|std}tj j |dS) Nsrc)Z arch__neqcSsh|] }|jqSr)Z source_name).0pkgrrr sz/BuildDepCommand._remote_deps..)nameZarchzno package matched: %sTzNot all dependencies satisfied)r ZsubjectZSubjectZget_best_queryr"rTfilterlistZquery availableZlatestrPrKrLrrcrZr\) rr4rlZ sourcenamesZpkgsr`rgZreqrbrrrrJs   zBuildDepCommand._remote_deps)rr)__name__ __module__ __qualname__aliasesr0rZsummaryZusager rr) staticmethodr>rArFrPrQrZrHrIrJ __classcell__rr)rrr's   ! r)Z __future__rrZdnfpluginscorerrr.r Zdnf.cliZdnf.exceptionsZdnf.rpm.transactionZdnf.yum.rpmtransZ libdnf.reporrr rrZpluginZregister_commandrZCommandrrrrrs  __pycache__/repomanage.cpython-36.pyc000064400000014573151030231510013521 0ustar003 fJ)@szddlmZddlmZddlmZmZddlZddlZddlZddl Z ddl Z Gdddej Z Gdddej jZdS) )absolute_import)unicode_literals)_loggerNcs eZdZdZfddZZS) RepoManage repomanagecs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoManageCommand)selfbasecli) __class__ /usr/lib/python3.6/repomanage.pyr $szRepoManage.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr src@s\eZdZdZedZddZddZddZe d d Z e d d Z d dZ e ddZ dS)r rz"Manage a directory of rpm packagescCs,|jj r(|jj r(|jjtjtjddS)N)stdoutstderr)optsverbosequietr Zredirect_loggerloggingZWARNINGINFO)r rrr pre_configure/szRepoManageCommand.pre_configurecCs0|jj r|jj r|jj|jj}d|_dS)NT)rrrr Zredirect_repo_progressdemandsZsack_activation)r rrrr configure3s zRepoManageCommand.configurec"s@jjr jjr tjjtdjjr@jjr@tjjtdjjr`jjr`tjjtdjj r|jj r|dj_i}i}i}t}t jj }yd}j j j |j jjjgd}|jjj j|tj jr>j jj jj}xH|D]@} | j|kr|j| j|j| jij| jgj| qWWntjjk rg} jjjd} t | dkrtjjtd j j!ddd j j"d d d yj j#| j j$j%d Wn0t&k rt'j(tdj)dj*| YnXYnXj j+j,t-j.dj/} dd| j0| j0|ddj/D} | j1x| D]~} | j2| j3f}||krx| ||kr||j| n | g||<j4| }||kr||jj5| nj5| g||<q@Wg}t}jjrxh|j6D]\\}}|||f}|| d}x6|D].}j4|}x||D]}|j|q WqWqWxb|j7D]V}t8|j6}|| d}x4|D],}x$||D]}|jt|jq|WqnWqJWjjr|xh|j6D]\\}}|||f}|d| }x6|D].}j4|}x||D]}|j|qWqWqWxb|j7D]V}t8|j6}|d| }x4|D],}x$||D]}|jt|jqTWqFWq"Wjjrxh|j6D]\\}}|||f}|d| }x6|D].}j4|}x||D]}|j|qWqWqWt}xb|j7D]V}t8|j6}|| d}x4|D],}x$||D]}|jt|jq2Wq$WqWxx|j7D]l}t8|j6}|d| }xJ|D]B}x:||D].}x&|jD]} | |kr|j9| qWqWqWqdWfdd| j0| j0|ddj/D}!||!}|j1jj:r$t;dj*|nx|D]} t;| q*WdS)Nz%Pass either --old or --new, not both!z)Pass either --oldonly or --new, not both!z)Pass either --old or --oldonly, not both!TZrepomanage_repo)Zbaseurlz.rpmrzNo files to process)sackreposF)Zload_system_repoZload_available_repos)progresszCould not open {}z, )flagscSsg|]}|qSrr).0xrrr osz)RepoManageCommand.run..)Z nevra_strict)Zpkg__neqcsg|]}j|qSr)_package_to_path)r$r%)r rrr&s)Zpkg__eq )s   __pycache__/needs_restarting.cpython-36.pyc000064400000023602151030231510014734 0ustar003 f`. @s$ddlmZddlmZddlmZddlmZddlmZmZddlZddl Zddl Z ddl Z ddl Z ddl Z ddlZddlZddd d d d d ddg ZdgZddZddZddZddZddZddZddZdd Zd!d"ZGd#d$d$eZGd%d&d&eZejjGd'd(d(ej j!Z"dS)))absolute_import)division)print_function)unicode_literals)logger_NZkernelz kernel-rtZglibczlinux-firmwareZsystemddbusz dbus-brokerz dbus-daemonZ microcode_ctlzlibc stjj|stSt}xjtj|D]\}tjj|s$|jd rBq$ttjj||&}x|D]}|j |j |fq\WWdQRXq$Wtx4|j j j jdd|DdD]}j |jqWx6fdd|DD] \}}tjtdj||dqWS) z Provide filepath as string if single dir or list of strings Return set of package names contained in files under filepath z.confNcSsh|] }|dqS)r).0xr r &/usr/lib/python3.6/needs_restarting.py Bsz'get_options_from_dir..)namecsh|]}|dkr|qS)rr )r r )packagesr r rDsz`No installed package found for package name "{pkg}" specified in needs-restarting file "{file}".)pkgfile)ospathexistssetlistdirisdirendswithopenjoinaddrstripsackquery installedfilterrrwarningrformat)filepathbaseZoptionsrfplinerrr )rr get_options_from_dir0s"  $&r(ccsxtD]\}}y<|dk r(|t|kr(wt|ddd}|j}WdQRXWn"tk rntjd|wYnXx$|D]}t||}|dk rv|VqvWqWdS)Nrreplace)errorszFailed to read PID %d's smaps.) list_smaps owner_uidr readlinesEnvironmentErrorrr"smap2opened_file)uidpidsmapsZ smaps_filelinesr'ofiler r r list_opened_filesKs   r6c csNxHtjdD]:}y t|}Wntk r2w YnXd|}||fVq WdS)Nz/procz/proc/%d/smaps)rrint ValueError)Zdir_r2r3r r r r,\s r,cstifdd}|S)Ncs,j|}|k r|S|}||<|S)N)get)Zparamval)cachefuncsentinelr r wrapperis  zmemoize..wrapper)object)r<r>r )r;r<r=r memoizefsr@cCstj|tjS)N)rstatST_UID)fnamer r r r-ssr-cCs$|jj|dj}|r |dSdS)N)rr)rr!run)rrCZmatchesr r r owning_packagewsrEc CsPd|}t|}tjj|j}WdQRXdj|jd}td||fdS)Nz/proc/%d/cmdline z%d : %s)rdnfZi18nZucdreadrsplitprint)r2ZcmdlineZ cmdline_fileZcommandr r r print_cmd~s  rLc Cstj}|jdd}tj|d}d}y|jd|j|}Wn<tjk rv}zt|}tjdj ||dSd}~XnXtj|dd}|j dd}|j d r|SdS) Nzorg.freedesktop.systemd1z/org/freedesktop/systemd1z org.freedesktop.systemd1.Managerz)Failed to get systemd unit for PID {}: {}zorg.freedesktop.DBus.Properties)Zdbus_interfacezorg.freedesktop.systemd1.UnitZIdz.service) rZ SystemBusZ get_objectZ InterfaceZ GetUnitByPIDZ DBusExceptionstrrr"r#ZGetr) r2ZbusZsystemd_manager_objectZsystemd_manager_interfaceZ service_proxyemsgZservice_propertiesrr r r get_service_dbuss0  rPcCsn|jd}|dkrdS|jddkr(dS||dj}|jd}|dkrVt||dSt||d|dSdS)N/rz00:z (deleted)FT)findstriprfind OpenedFile)r2r'ZslashfnZ suffix_indexr r r r0s   r0c@s*eZdZejdZddZeddZdS)rUz^(.+);[0-9A-Fa-f]{8,}$cCs||_||_||_dS)N)deletedrr2)selfr2rrWr r r __init__szOpenedFile.__init__cCs(|jr"|jj|j}|r"|jdS|jS)a;Calculate the name of the file pre-transaction. In case of a file that got deleted during the transactionm, possibly just because of an upgrade to a newer version of the same file, RPM renames the old file to the same name with a hexadecimal suffix just before delting it. )rWRE_TRANSACTION_FILEmatchrgroup)rXr\r r r presumed_names  zOpenedFile.presumed_nameN) __name__ __module__ __qualname__recompiler[rYpropertyr^r r r r rUs rUc@s4eZdZddZeddZeddZddZd S) ProcessStartcCs|j|_|j|_dS)N) get_boot_time boot_timeget_sc_clk_tck sc_clk_tck)rXr r r rYs zProcessStart.__init__c Cshttjdj}tjjdrdtdd8}|jjj dj}tt j t |}t ||SQRX|S)a  We have two sources from which to derive the boot time. These values vary depending on containerization, existence of a Real Time Clock, etc. For our purposes we want the latest derived value. - st_mtime of /proc/1 Reflects the time the first process was run after booting This works for all known cases except machines without a RTC - they awake at the start of the epoch. - /proc/uptime Seconds field of /proc/uptime subtracted from the current time Works for machines without RTC iff the current time is reasonably correct. Does not work on containers which share their kernel with the host - there the host kernel uptime is returned z/proc/1z /proc/uptimerbrN) r7rrAst_mtimerisfilerreadlinerSrJtimefloatmax)Zproc_1_boot_timefZuptimeZproc_uptime_boot_timer r r rfs  zProcessStart.get_boot_timecCstjtjdS)N SC_CLK_TCK)rsysconf sysconf_namesr r r r rhszProcessStart.get_sc_clk_tckc CsLd|}t|}|jjj}WdQRXt|d}||j}|j|S)Nz /proc/%d/stat)rrIrSrJr7rirg)rXr2Zstat_fnZ stat_fileZstatsZticks_after_bootZsecs_after_bootr r r __call__s    zProcessStart.__call__N)r_r`rarY staticmethodrfrhrvr r r r res  rec@s4eZdZd ZedZeddZddZddZ d S) NeedsRestartingCommandneeds-restartingz/determine updated binaries that need restartingcCsF|jdddtdd|jdddtdd|jd d dtd ddS) Nz-uz --useronly store_truez#only consider this user's processes)actionhelpz-rz --reboothintzKonly report whether a reboot is required (exit code 1) or not (exit code 0)z-sz --servicesz%only report affected systemd services) add_argumentr)parserr r r set_argparsers      z$NeedsRestartingCommand.set_argparsercCs|jj}d|_dS)NT)clidemandsZsack_activation)rXrr r r configure sz NeedsRestartingCommand.configurecCsNt}tjt|jj}t|}ttj j |jj j d|j}t j||jjrt}t}|jjjj}x,|jt dD]}|j|jkrx|j|jqxW|jdddgd}t|dkrx,|jtdD]}|j|jkr|j|jqW|s|rfttdxt|D]} td| qWxt|D]} td | q$Wtttd ttd d tjj nttd ttddSt} |jj!rtj"nd} xHt#| D]<} || j$}|dkrĐq|j|| j%kr| j| j%qW|jj&r.tddt| D} x | D]} | dk rt| qWdSxt| D]}t'|q8WdS)Nz#etc/dnf/plugins/needs-restarting.d/)rrz dbus-daemonz dbus-brokerrz;Core libraries or services have been updated since boot-up:z * %sz8 * %s (dependency of dbus. Recommending reboot of dbus)z2Reboot is required to fully utilize these updates.zMore information:z)https://access.redhat.com/solutions/27943z>No core libraries or services have been updated since boot-up.zReboot should not be necessary.cSsg|] }t|qSr )rP)r r2r r r Bsz.NeedsRestartingCommand.run..)(re functoolspartialrEr%rr@r(rrrZconfZ installroot NEED_REBOOTextendZoptsZ reboothintrrr r!Z installtimergrrlenNEED_REBOOT_DEPENDS_ON_DBUSrKrsortedrH exceptionsErrorZuseronlygeteuidr6r^r2ZservicesrL)rXZ process_startZ owning_pkg_fnoptZ need_rebootZneed_reboot_depends_on_dbusr rZdbus_installedrZ stale_pidsr1r5namesr2r r r rDsd                zNeedsRestartingCommand.runN)ry) r_r`raaliasesrZsummaryrwrrrDr r r r rxs  rx)#Z __future__rrrrZdnfpluginscorerrrHZdnf.clirrrrbrArnrrr(r6r,r@r-rErLrPr0r?rUreZpluginZregister_commandrZCommandrxr r r r s:      "+__pycache__/debuginfo-install.cpython-36.opt-1.pyc000064400000013665151030231510015751 0ustar003 gt`L+@sNddlmZmZddlZddlmZGdddejZGdddejj Z dS))_loggerN)Packagecs,eZdZdZdZfddZddZZS)DebuginfoInstallz5DNF plugin supplying the 'debuginfo-install' command.zdebuginfo-installcs4tt|j||||_||_|dk r0|jtdS)zInitialize the plugin instance.N)superr__init__basecliZregister_commandDebuginfoInstallCommand)selfrr ) __class__'/usr/lib/python3.6/debuginfo-install.pyr s zDebuginfoInstall.__init__cCsf|j|jj}|jdo.|jddo.|jdd}|rbtjj|jj j dd}t |rb|jj j dS)Nmain autoupdatez *-debuginfo)Z name__glob)Z read_configrconfZ has_sectionZ has_optionZ getbooleandnfsackZ _rpmdb_sackqueryfiltermlenreposenable_debug_repos)r ZcprZdbginfor r rconfig(s   zDebuginfoInstall.config)__name__ __module__ __qualname____doc__namerr __classcell__r r )r rrs rcsheZdZdZdZedZfddZeddZ dd Z d d Z d d Z ddZ ddZddZZS)r z! DebuginfoInstall plugin for DNF debuginfo-installzinstall debuginfo packagescs4tt|j|t|_t|_t|_t|_dS)N)rr rsetavailable_debuginfo_missingavailable_debugsource_missinginstalled_debuginfo_missinginstalled_debugsource_missing)r r )r r rr:s z DebuginfoInstallCommand.__init__cCs|jddddS)Npackage+)nargs) add_argument)parserr r r set_argparserBsz%DebuginfoInstallCommand.set_argparsercCs0|jj}d|_d|_d|_d|_|jjjdS)NT) r demandsZ resolvingZ root_userZsack_activationZavailable_reposrrr)r r,r r r configureFs z!DebuginfoInstallCommand.configurecCsg}ttj}ttj}x|jjD]}tjj|j |j j dd}|d}|sxt j td|j jjj||j|q$|jj}|j|jjxdt|jD]T}|jtjr|d| |kr|j||jtjr|d| |kr|j|qWx|jD]} | d} | jri} x"| D]} | j| jgj| q(Wxj| jD]^} | d} |j| j | s|j| j!| s|j"j#t$| |j| j%| sP|j&j#t$| qPWq| j'jtjs| j'jtjr|j(| q|ddk rb|j)| j |ds2|j)| j!|ds2|j*j#dj+| j'| j,|j)| j%|ds|j-j#dj+| j'| j,q|j.| j | s|j.| j!| s|j*j#dj+| j'| j,|j.| j%| s|j-j#dj+| j'| j,qWq$W|j*rt j tdd j/t0|j*|j-rt j td d j/t0|j-|j"r8t j td d j/t0|j"|j&r\t j td d j/t0|j&|r|j j1j2rtj3j4td dj/|ddS)NF)Zwith_srcrzNo match for argument: %srZnevraz{}-{}zICould not find debuginfo package for the following available packages: %sz, zKCould not find debugsource package for the following available packages: %szICould not find debuginfo package for the following installed packages: %szKCould not find debugsource package for the following installed packages: %szUnable to find a match )Zpkg_spec)5rrZDEBUGINFO_SUFFIXZDEBUGSOURCE_SUFFIXZoptsr&rZsubjectZSubjectZget_best_solutionrrrinforoutputZtermZboldappendZ availableZ _name_dictupdateZ installedlistkeysendswithpopvaluesZ _from_system setdefaultarch_install_debug_from_system debug_nameZsource_debug_namer$addstrZdebugsource_namer%r_install_install_debugr"formatevrr#_install_debug_no_nevrajoinsortedrstrict exceptionsZPackagesNotAvailableError)r Z errors_specZdebuginfo_suffix_lenZdebugsource_suffix_lenZpkgspecZsolutionrZ package_dictrpkgsZ first_pkgZ arch_dictpkgZpackage_arch_listr r rrunNs           zDebuginfoInstallCommand.runcCs:|jjjj||j|j|j|jd}|r6|j|dSdS)N)repochversionreleaser9TF) rrrfilterrJrKrLr9r>)r r;rHrr r rr:s  z2DebuginfoInstallCommand._install_debug_from_systemcCsi}|jdk r|j|d<|jdk r,|j|d<|jdk r@|j|d<|jdk rT|j|d<|jjjjfd|i|}|r|j|dSdS)NZ epoch__globZ version__globZ release__globZ arch__globrTF) rJrKrLr9rrrrMr>)r r;Z base_nevrakwargsrr r rr?s         z&DebuginfoInstallCommand._install_debugcs8|jjjjfdd|Dd}|r4|j|dSdS)Ncsg|]}dj|j|jqS)z{}-{}.{})r@rAr9).0p)r;r r szCDebuginfoInstallCommand._install_debug_no_nevra..)Z nevra_strictTF)rrrrr>)r r;rGrr )r;rrBs   z/DebuginfoInstallCommand._install_debug_no_nevracCs:tjj|jj}|j|d|jjj||jjj ddS)N)rH)ZselectZoptional) rselectorZSelectorrrr!ZgoalZinstallrrE)r rGrRr r rr>s z DebuginfoInstallCommand._install)r )rrrraliasesrZsummaryr staticmethodr+r-rIr:r?rBr>rr r )r rr 4s  |  r ) ZdnfpluginscorerrrZ dnf.packagerZPluginrr ZCommandr r r r rs __pycache__/repograph.cpython-36.opt-1.pyc000064400000005342151030231510014323 0ustar003 gt`@s^ddlmZddlmZddlmZmZddlZdZGdddej Z Gdd d ej j Z dS) )absolute_import)unicode_literals)_loggerNzY size="20.69,25.52"; ratio="fill"; rankdir="TB"; orientation=port; node[style="filled"]; cs eZdZdZfddZZS) RepoGraph repographcs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoGraphCommand)selfbasecli) __class__/usr/lib/python3.6/repograph.pyr )szRepoGraph.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr%src@s<eZdZd ZedZddZddZdd Ze d d Z d S)r r repo-graphz4Output a full package dependency graph in dot formatcCsV|jj}d|_d|_|jjrRx4|jjjD]$}|j |jjkrF|j q*|j q*WdS)NT) r demandsZsack_activationZavailable_reposZoptsrepor Zreposalliddisableenable)r rrrrr configure4s zRepoGraphCommand.configurecCs|jtdS)N)do_dot DOT_HEADER)r rrrrun?szRepoGraphCommand.runc Csd}|j|jj}tdtdj|x|jD]}t|||krRt||}ddt||}|d}d}td j||||td j|x||D]}td j|qWtd j|||q2Wtd dS)Nrzdigraph packages {z{}g?g333333?g?g?z""{}" [color="{:.12g} {:.12g} {}"];z "{}" -> {{z"{}"z!}} [color="{:.12g} {:.12g} {}"]; }gs?) _get_depsr sackprintformatkeyslen) r headerZmaxdepsZdepspkghsbreqrrrrBs  zRepoGraphCommand.do_dotc Csi}i}g}|jj}x|D]}i}x|jD]}t|}||krDq.|jdrPq.||krb||} n@|j|d} | stjtd||j |q.n | dj } | ||<| |j krd|| <| |ks.| |krq.nd|| <|j ||j <q.WqW|S)Nz solvable:)ZprovideszNothing provides: '%s'r) Zquery availablerequiresstr startswithfilterrdebugrappendrr') r$r0Zprovskipr/r*Zxxr.ZreqnameZproviderrrrr#Ys8         zRepoGraphCommand._get_depsN)rr) rrraliasesrZsummaryrr r staticmethodr#rrrrr 0s  r )Z __future__rrZdnfpluginscorerrZdnf.cliZdnfrZPluginrr ZCommandr rrrrs   __pycache__/repograph.cpython-36.pyc000064400000005342151030231510013364 0ustar003 gt`@s^ddlmZddlmZddlmZmZddlZdZGdddej Z Gdd d ej j Z dS) )absolute_import)unicode_literals)_loggerNzY size="20.69,25.52"; ratio="fill"; rankdir="TB"; orientation=port; node[style="filled"]; cs eZdZdZfddZZS) RepoGraph repographcs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoGraphCommand)selfbasecli) __class__/usr/lib/python3.6/repograph.pyr )szRepoGraph.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr%src@s<eZdZd ZedZddZddZdd Ze d d Z d S)r r repo-graphz4Output a full package dependency graph in dot formatcCsV|jj}d|_d|_|jjrRx4|jjjD]$}|j |jjkrF|j q*|j q*WdS)NT) r demandsZsack_activationZavailable_reposZoptsrepor Zreposalliddisableenable)r rrrrr configure4s zRepoGraphCommand.configurecCs|jtdS)N)do_dot DOT_HEADER)r rrrrun?szRepoGraphCommand.runc Csd}|j|jj}tdtdj|x|jD]}t|||krRt||}ddt||}|d}d}td j||||td j|x||D]}td j|qWtd j|||q2Wtd dS)Nrzdigraph packages {z{}g?g333333?g?g?z""{}" [color="{:.12g} {:.12g} {}"];z "{}" -> {{z"{}"z!}} [color="{:.12g} {:.12g} {}"]; }gs?) _get_depsr sackprintformatkeyslen) r headerZmaxdepsZdepspkghsbreqrrrrBs  zRepoGraphCommand.do_dotc Csi}i}g}|jj}x|D]}i}x|jD]}t|}||krDq.|jdrPq.||krb||} n@|j|d} | stjtd||j |q.n | dj } | ||<| |j krd|| <| |ks.| |krq.nd|| <|j ||j <q.WqW|S)Nz solvable:)ZprovideszNothing provides: '%s'r) Zquery availablerequiresstr startswithfilterrdebugrappendrr') r$r0Zprovskipr/r*Zxxr.ZreqnameZproviderrrrr#Ys8         zRepoGraphCommand._get_depsN)rr) rrraliasesrZsummaryrr r staticmethodr#rrrrr 0s  r )Z __future__rrZdnfpluginscorerrZdnf.cliZdnfrZPluginrr ZCommandr rrrrs   __pycache__/system_upgrade.cpython-36.opt-1.pyc000064400000054753151030231510015401 0ustar003 fh@sdZddlmZmZmZmZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddlmZmZddlZddlZddlmZddlmZddlZddlmZmZddlZed e jd Ze jd Ze jd Ze jd Z eZ!dZ"edZ#edZ$edZ%dZ&ddZ'ddZ(gfddZ)d7ddZ*ddZ+Gddde,Z-Gdd d e,Z.e.Z/Gd!d"d"ej0j1Z2d#d$Z3d%d&Z4d'd(Z5d)d*Z6d+d,dd-d.gZ7Gd/d0d0ej8Z9Gd1d2d2ej:j;ZdS)8zGsystem_upgrade.py - DNF plugin to handle major-version system upgrades.)callPopen check_outputCalledProcessErrorN)journal)_logger)CliError)ucd)serialize_transactionTransactionReplayzthe color of the skyZ 9348174c5cc74001a71ef26bd79d302eZ fef1cc509d5047268b83a3a553f54b43Z 3e0a5636d16b4ca4bbe5321d06c6aa62Z 8cec00a1566f4d3594f116450395f06cz/usr/bin/plymouthzfprrrr=s   z State._readc CsFtjjtjj|jt|jd}tj |j |dddWdQRXdS)NwT)indent sort_keys) r$r% ensure_dirrrdirnamer;rr@dumpr<)r>Zoutfrrrr5sz State.writecCs&tjj|jrtj|j|jdS)N)rrexistsr;r&r=)r>rrrclears z State.clearcCs|S)Nr)r>rrr __enter__szState.__enter__cCs|dkr|jdS)N)r5)r>exc_type exc_value tracebackrrr__exit__szState.__exit__cs"fdd}fdd}t||S)Ncs||j<dS)N)r<)r>value)optionrrsetpropszState._prop..setpropcs |jjS)N)r<get)r>)rTrrgetpropszState._prop..getprop)property)rTrUrWr)rTr_props  z State._prop state_versiondownload_statusdestdirtarget_releaseversystem_releasevergpgcheckgpgcheck_reposrepo_gpgcheck_reposupgrade_statusupgrade_command distro_syncenable_disable_reposmodule_platform_idN)__name__ __module__ __qualname__r?r=r5rMrNrRrYrZr[r\r]r^r_r`rarbrcrdrerfrrrrr:s(  r:c@s@eZdZdZddZddZddZdd Zd d Zd d Z dS)PlymouthOutputzA plymouth output helper class. Filters duplicate calls, and stops calling the plymouth binary if we fail to contact it. cCsd|_t|_d|_dS)NT)alivedict _last_args _last_msg)r>rrrr?szPlymouthOutput.__init__c Gsj||jj|k}|jr| s$|dkrdytt|f|dk|_Wntk rXd|_YnX||j|<|jS)Nz--pingrF)rmrVrkrPLYMOUTHr')r>cmdargsZdupe_cmdrrr _plymouths  zPlymouthOutput._plymouthcCs |jdS)Nz--ping)rr)r>rrrpingszPlymouthOutput.pingcCs4|jr |j|kr |jdd|j||_|jdd|S)Nz hide-messagez--textzdisplay-message)rnrr)r>msgrrrmessageszPlymouthOutput.messagec CsRd}y$ttdg}tjdt|r&d}Wnttfk r@YnX|jdd|S)NZupdatesz--helpz--system-upgradezsystem-upgradez change-modez--)rroresearchr rr'rr)r>modesrrrset_modes zPlymouthOutput.set_modecCs|jddt|S)Nz system-updatez --progress)rrstr)r>ZpercentrrrprogressszPlymouthOutput.progressN) rgrhri__doc__r?rrrsrurzr|rrrrrjs  rjc@s$eZdZddZddZddZdS)PlymouthTransactionProgresscCs|j||||dS)N)_update_plymouth)r>packageactionZti_doneZti_totalZts_doneZts_totalrrrr|sz$PlymouthTransactionProgress.progresscCsd|dkr dS|tjjkr0tjtd||ntjdtd||tj|j||||dS)NgV@Zg$@)r$callbackZ PKG_VERIFYPlymouthr|intru _fmt_event)r>rrcurrenttotalrrrrs  z,PlymouthTransactionProgress._update_plymouthcCs tjjj||}d||||fS)Nz[%d/%d] %s %s...)r$ transactionZACTIONSrV)r>rrrrrrrrsz&PlymouthTransactionProgress._fmt_eventN)rgrhrir|rrrrrrr~sr~ccsJtj}|j|jddd}x(|D] }|d}||kr8q"|}|Vq"WdS)zVFind all boots with this message id. Returns the entries of all found boots. r) MESSAGE_IDZ_UIDN_BOOT_ID)rReaderZ add_matchhex) message_idjZoldbootr)Zbootrrr find_bootss rc Cstttdd }xJtttD]:\}}tdj|d|d|d|jdd|jddqW|d krpttd dS) Nz3The following boots appear to contain upgrade logs:ru){} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}rZ__REALTIME_TIMESTAMPSYSTEM_RELEASEVERz??TARGET_RELEASEVERz-- no logs were found --r)r7r enumeraterID_TO_IDENTIFY_BOOTSformatrV)nr)rrr list_logs s  rc CsZtt|}y(|dkrt|dkr*|d8}||dStk rTttdYnXdS)Nrrrz!Cannot find logs with this index.)listr IndexErrorr r)rrZbootsrrr pick_boot.s  rcCsDtt|}tdd|jg}|j|j}|dkr@tjjt ddS)NZ journalctlz--bootrz%Unable to match systemd journal entry) rrrrwait returncoder$ exceptionsErrorr)rZboot_idZprocessZrcrrrshow_log=s  rZdownloadZcleanupgradelogcs eZdZdZfddZZS)SystemUpgradePluginzsystem-upgradecs8tt|j|||r4|jt|jt|jtdS)N)superrr?Zregister_commandSystemUpgradeCommandOfflineUpgradeCommandOfflineDistrosyncCommand)r>basecli) __class__rrr?Ns   zSystemUpgradePlugin.__init__)rgrhrinamer? __classcell__rr)rrrKsrcs(eZdZdEZedZdZfddZeddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6Z d7d8Z!d9d:Z"d;d<Z#d=d>Z$d?d@Z%dAdBZ&dCdDZ'Z(S)Frsystem-upgradefedupz+Prepare system for upgrade to a new releasezvar/lib/dnf/system-upgradecsjtt|j|tjj|jjj|j |_ tjj|j d|_ tjj|jjjd|_ t tjj|j d|_dS)Nzsystem-upgrade-transaction.jsonz system-updatezsystem-upgrade-state.json)rrr?rrr#rr1r-DATADIRdatadirtransaction_file magic_symlinkr:state)r>r)rrrr?\s zSystemUpgradeCommand.__init__cCsJ|jdddtdd|jddtdd jtd |jd ttd d dS)Nz--no-downgraderdZ store_falsez=keep installed packages if the new release's version is older)destrhelptidrz[%s]|)nargschoicesmetavarz--numberzwhich logs to show)typer) add_argumentrCMDSr#r)parserrrr set_argparserds   z"SystemUpgradeCommand.set_argparsercCs(tj||tj|jj|jjtjjddS)zLog directly to the journal.)rZPRIORITYrrZ DNF_VERSIONN) rsendZ LOG_NOTICErr^r]r$constVERSION)r>rurrrr log_statusns zSystemUpgradeCommand.log_statuscCs|jd|jddS)NZcheck pre_configure) _call_sub)r>rrrrws z"SystemUpgradeCommand.pre_configurecCs|jddS)N configure)r)r>rrrr{szSystemUpgradeCommand.configurecCs|jddS)Nrun)r)r>rrrr~szSystemUpgradeCommand.runcCs|jddS)Nr)r)r>rrrrun_transactionsz$SystemUpgradeCommand.run_transactioncCs|jddS)NZresolved)r)r>rrr run_resolvedsz!SystemUpgradeCommand.run_resolvedcCs.t||d|jjdd}t|r*|dS)Nrr)getattroptsrcallable)r>rZsubfuncrrrrszSystemUpgradeCommand._call_subcCs(|jjtkr$tdj|d}t|dS)NzFIncompatible version of data. Rerun 'dnf {command} download [OPTIONS]')command)rrZ STATE_VERSIONrrr )r>rrtrrr_check_state_versions z)SystemUpgradeCommand._check_state_versioncCs*|j|jj_|jjr|jjnd|jj_dS)N)rrr1cachedirrr\)r>rrr _set_cachedirs z"SystemUpgradeCommand._set_cachedircCsttjjtjjg}ttjj}i}i}xl|jjjD]^}|j |krp|j }|j |j |j jij t|i|j <q6|j |kr6|j |j t|j i|j <q6W||fS)z forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}} reverse = {pkg_nevra: {tsi.action: tsi.reason}} :return: forward, reverse )setr$rZBACKWARD_ACTIONSlibdnfZ!TransactionItemAction_REINSTALLEDZFORWARD_ACTIONSrrrpkgreason setdefaultrepoidr{)r>Zbackward_actionZforward_actionsZforwardreverseZtsirrrr%_get_forward_reverse_pkg_reason_pairss  & z:SystemUpgradeCommand._get_forward_reverse_pkg_reason_pairscCsb|j|jj_|jjr|jjnd|jj_d|jjkrJ|jj rJtt dnd|jjkr^d|j_dS)Nzoffline-distrosynczFCommand 'offline-distrosync' cannot be used with --no-downgrade optionzoffline-upgradeF) rrr1rrr\rrdr r)r>rrrpre_configure_downloads   z+SystemUpgradeCommand.pre_configure_downloadcCs |jdS)N)r)r>rrrpre_configure_rebootsz)SystemUpgradeCommand.pre_configure_rebootcCs.|j|jjr|jj|j_|jj|jj_dS)N) rrrerrepos_edr]rr1r.)r>rrrpre_configure_upgrades z*SystemUpgradeCommand.pre_configure_upgradecCs |jdS)N)r)r>rrrpre_configure_cleansz(SystemUpgradeCommand.pre_configure_cleancCsd|jjksd|jjkrtjtdt}|rLtd}tj|jt||j j rtd}|j j j s|j j jdj|dj|d rtjtd tjd t|j j |jjd nd |jjkr|jj|jd |jj_d |jj_d |jj_d |jj_d |jj_|j j jdg7_dS)Nzsystem-upgraderz\WARNING: this operation is not supported on the RHEL distribution. Proceed at your own risk.z-Additional information for System Upgrade: {}zyBefore you continue ensure that your system is fully upgraded by running "dnf --refresh upgrade". Do you want to continuez {} [y/N]: z {} [Y/n]: )rtZdefaultyes_msgzOperation aborted.r)r2zoffline-upgradeTZtest)rrrrCrr rrr rZ _promptWantedr1ZassumenooutputZ userconfirmerrorsysexitr3r.rZ _populate_update_security_filterdemands root_user resolvingavailable_repossack_activationZfreshest_metadataZtsflags)r>Zhelp_urlrtrrrconfigure_downloads*        z'SystemUpgradeCommand.configure_downloadcCsd|jj_dS)NT)rrr)r>rrrconfigure_rebootsz%SystemUpgradeCommand.configure_rebootcCsd|jj_d|jj_d|jj_d|jj_|jj|j_|jj dk rN|jj |j j _ |jj dk rx$|j j jD]}|j|jj k|_ qhW|jjdk rx$|j j jD]}|j|jjk|_qW|jj|j j _d|jj_d|j j _t|jj_d|j j _d|j j _dS)NTF)rrrrrrrrdrr_rr1r`reposvaluesrra repo_gpgcheckrfZ cacheonlyZ assumeyesr~Ztransaction_displayZclean_requirements_on_removeZinstall_weak_deps)r>rrrrconfigure_upgrades&            z&SystemUpgradeCommand.configure_upgradecCsd|jj_dS)NT)rrr)r>rrrconfigure_cleansz$SystemUpgradeCommand.configure_cleancCsdS)Nr)r>rrr configure_logsz"SystemUpgradeCommand.configure_logcCs~|jjdksttd|j|jj|jj|jjkrRtdj|jjd}t|t j j |j rlttdt jj|jdS)Ncompletezsystem is not ready for upgradezZthe transaction was not prepared for '{command}'. Rerun 'dnf {command} download [OPTIONS]')rzupgrade is already scheduled)rr[r rrrrrcrrrlexistsrr$r%rIr)r>rtrrr check_reboot s    z!SystemUpgradeCommand.check_rebootcCstjj|js$tjtdtdtj|j|j krLtjtdtdt j j j |j|jj}|sp|jj}|j||jjdkstdj|d}t|dS)Nz-trigger file does not exist. exiting quietly.rz1another upgrade tool is running. exiting quietly.readyz/use 'dnf {command} reboot' to begin the upgrade)r)rrrrrrr SystemExitreadlinkrr$ZyumZmiscZunlink_frrcrrrrbrr )r>rrtrrr check_upgrades  z"SystemUpgradeCommand.check_upgradec Cs,tj|j|j|j }d|_WdQRXdS)Nr)rsymlinkrrrrb)r>rrrr run_prepare,sz SystemUpgradeCommand.run_preparecCs6|j|jjddksdS|jtdttdS)NrrzRebooting to perform upgrade.)rrrrrREBOOT_REQUESTED_IDr)r>rrr run_reboot3s  zSystemUpgradeCommand.run_rebootc sjjrjjn jjjjdkrjjfddjjjD}|r\jj|fddjjj D}|rjj|j $}d|_ jj j |_jj j|_WdQRXdS)Noffline-upgradeoffline-distrosynccs$g|]}jjjj|jr|jqSr)rhistorygrouprVr).0g)r>rr Gsz5SystemUpgradeCommand.run_download..cs$g|]}jjjj|jr|jqSr)rrenvrVr)rr)r>rrrJsZ downloading)rr)rrdrZ upgrade_allrZ read_compscompsgroupsZenv_group_upgradeZ environmentsrr[r1r.r]r\)r>Zinstalled_groupsZinstalled_environmentsrr)r>r run_download=s       z!SystemUpgradeCommand.run_downloadc Csd}|j}d|_|j}WdQRX|dkr4td}n|dkrFtd}ntd}|j|ttjtjdtj |t t |j |j |_|jjdS) NZ incompletezoffline-upgradez1Starting offline upgrade. This will take a while.zoffline-distrosyncz4Starting offline distrosync. This will take a while.z0Starting system upgrade. This will take a while.r)rrbrcrrUPGRADE_STARTED_IDrrzr|rur9r rrreplayr)r>rrrtrrr run_upgradeSs      z SystemUpgradeCommand.run_upgradec Csdtjtdt|jjjtjj |jjjj g|j $}d|_ d|_ d|_d|_d|_WdQRXdS)NzCleaning up downloaded data...)rrrr*rr1rr$Z persistorZTempfilePersistorZdb_pathrr[rZrbrcr\)r>rrrr run_cleanms zSystemUpgradeCommand.run_cleancCs |jjrt|jjntdS)N)rZnumberrr)r>rrrrun_logzszSystemUpgradeCommand.run_logcCs|jjdS)z5Adjust transaction reasons according to stored valuesN)r Zpost_transaction)r>rrrresolved_upgradesz%SystemUpgradeCommand.resolved_upgradecCs|jjj}|js&tjtddSt|}yLt|j d"}t j ||ddd|j dWdQRXt tdj|j Wn<tk r}z tjjtdjt|WYdd}~XnXtjj|jjj}|j}d |_t|_|jj|_|jjj|_d d |jjj D|_!d d |jjj D|_"||_#|jjj$|_%|jjj&|_&|jj'|_(|jjj)|_)|jj*|_+WdQRXt,j|jj*d }tj||j-tdt.dS)NzKThe system-upgrade transaction is empty, your system is already up-to-date.rErFT)rGrH zTransaction saved to {}.zError storing transaction: {}rcSsg|]}|jr|jqSr)r_r)rrrrrrsz=SystemUpgradeCommand.transaction_download..cSsg|]}|jr|jqSr)rr)rrrrrrs)rzDownload finished.)/rrZ get_currentZpackagesrrrr rrr@rKr5r7rr'r$rr r{r+r,r1r-rr[rrZrrdr_rrr`rar^r.r]rfrrer\rrcDOWNLOAD_FINISHED_MSGrDOWNLOAD_FINISHED_ID)r>rdatafr8Z system_verrrtrrrtransaction_downloads: ,       z)SystemUpgradeCommand.transaction_downloadcCs@tjtd|jtdt|j|jjddkrrrrtransaction_upgrades  z(SystemUpgradeCommand.transaction_upgrade)rr))rgrhrialiasesrsummaryrr? staticmethodrrrrrrrrrrrrrrrrrrrrrrrrrr r r rrrrrr)rrrVsF        (rc@seZdZdZedZdS)roffline-upgradez%Prepare offline upgrade of the systemN)r)rgrhrirrrrrrrrsrc@seZdZdZedZdS)roffline-distrosyncz(Prepare offline distrosync of the systemN)r)rgrhrirrrrrrrrsr)N)?r} subprocessrrrrr@rZos.pathrvrZuuidZsystemdrZdnfpluginscorerrr$Zdnf.clir Zdnf.i18nr Zdnf.transactionZdnf.transaction_srr r Z libdnf.confrZUUIDrrr rrror/rr0rrr r*r3r9objectr:rjrrZTransactionProgressr~rrrrrZPluginrrZCommandrrrrrrrsd          @.  e__pycache__/groups_manager.cpython-36.opt-1.pyc000064400000020721151030231510015343 0ustar003 f4@sddlmZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl m Z m Z ddl Z ddlZ dZejdjeZejdZdddd Zd d Zd d ZddZe jjGddde jjZdS))absolute_import)unicode_literalsN)_loggerz -a-z0-9_.:z^[{}]+$z^[-a-zA-Z0-9_.@]+$T)Zdefault_explicitZuservisible_explicitZ empty_groupscCstj|stjtd|S)zgroup id validatorzInvalid group id) RE_GROUP_IDmatchargparseArgumentTypeErrorr)valuer $/usr/lib/python3.6/groups_manager.py group_id_type.s r cCsN|jdd}t|dkr&tjtd|\}}tj|sFtjtd||fS)ztranslated texts validator:z6Invalid translated data, should be in form 'lang:text'z*Invalid/empty language for translated data)splitlenrr rRE_LANGr)r datalangtextr r r translation_type5s    rcCs:|j}tjdjtd|}|s6tjjtdj||S)z#generate group id based on its namez[^{}]zFCan't generate group id from '{}'. Please specify group id using --id.) lowerresubformatRE_GROUP_ID_VALIDdnfcliCliErrorr)rgroup_idr r r text_to_idAsr!csdeZdZdZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZZS)GroupsManagerCommandgroups-managerz$create and edit groups metadata filecstt|j|tj|_dS)N)superr"__init__libcompsCompscomps)selfr) __class__r r r%QszGroupsManagerCommand.__init__cCs|jddgdtdd|jddgdtdd|jddtd d |jd d d tdd|jdttdd|jddtdd|jdtdd|jdttdd|jddgdttdd|jddgdttd d|j}|jd!d"d dtd#d$|jd%d"d&dtd'd$|j}|jd(d td)d*|jd+d td,d*|jd-d d td.d|jd/d td0d*|jd1d2d3td4d5dS)6Nz--loadappendz COMPS.XMLzload groups metadata from file)actiondefaultmetavarhelpz--savezsave groups metadata to filez--mergez%load and save groups metadata to file)r.r/z--print store_trueFz#print the result metadata to stdout)r,r-r/z--idzgroup id)typer/z-nz--namez group name)r/z --descriptionzgroup descriptionz--display-orderzgroup display orderz--translated-namez LANG:TEXTztranslated name for the group)r,r-r.r1r/z--translated-descriptionz$translated description for the groupz--user-visible user_visiblez%make the group user visible (default))destr,r-r/z--not-user-visibleZ store_falsezmake the group user invisiblez --mandatoryz%add packages to the mandatory section)r,r/z --optionalz$add packages to the optional sectionz--removez5remove packages from the group instead of adding themz--dependenciesz-include also direct dependencies for packagespackages*ZPACKAGEzpackage specification)nargsr.r/) add_argumentrr intrZadd_mutually_exclusive_group)parserZvisibleZsectionr r r set_argparserUsR                        z"GroupsManagerCommand.set_argparsercCs|jj}|jjr"d|_d|_d|_|jjrP|jjj d|jj|jj j |jj|jj s|jj s|jjs|jjs|jjdk s|jjr|jj r|jj rtjjtddS)NTFrz;Can't edit group without specifying it (use --id or --name))rdemandsoptsr4Zsack_activationZavailable_reposZload_system_repomergeloadinsertsaver+ description display_ordertranslated_nametranslated_descriptionr2idnamerrr)r)r;r r r configures" zGroupsManagerCommand.configurecCs x|jjD] }tj}yp|jdr~tj|F}tjdd}z$t j |||j |j |j Wdtj|j XWdQRXn |j |Wn~tttjfk r}zXt}x2|jD]&}||krqtj|j|j|qWtjjtdj||WYdd}~Xq X|j|7_q WdS)zm Loads all input xml files. Returns True if at least one file was successfuly loaded z.gzF)deleteNzCan't load file "{}": {})r<r>r&r'endswithgzipopentempfileZNamedTemporaryFileshutilZ copyfileobjcloseZ fromxml_frFosunlinkIOErrorOSErrorZ ParserErrorsetZget_last_errorsrerrorstripaddr exceptionsErrorrrr()r) file_nameZ file_compsZgz_fileZ temp_fileerrseenrTr r r load_input_filess,    $z%GroupsManagerCommand.load_input_filescCsx|jjD]}y|jj|td}Wn*tjk rL}z |g}WYdd}~XnX|r x"|ddD]}tj|j q`Wt j j t dj||dj q WdS)N) xml_optionszCan't save file "{}": {}r_)r<r@r(Zxml_fCOMPS_XML_OPTIONSr&Z XMLGenErrorrrTrUrrWrXrr)r)rYerrorsrZr r r save_output_filessz&GroupsManagerCommand.save_output_filescCs\d}|r*x |jjD]}|j|kr|}PqW|dkrX|rXx |jjD]}|j|kr@|}Pq@W|S)zl Try to find group according to command line parameters - first by id then by name. N)r(groupsrErF)r)r rFgroupZgrpr r r find_groups   zGroupsManagerCommand.find_groupc Csdd}|jjr|jj|_|jjr,|jj|_|jjr>|jj|_|jjdk rT|jj|_|jjrj||jj|_|jj r||jj |_ |jj rt }xZ|jj D]N}t jj|}|j|jjddddj}|stjtdj|q|j|qW|jjr2t }x|D]}|j|jqW|j|jjjj|dd d |D} |jjrx| D].} x&|j| tj d D]}|j j|qfWqPWnd|jj!rtj"} n|jj#rtj$} ntj%} x8t&| D],} |j| | d s|j j'tj(| | d qWdS) zE Set attributes and package lists for selected group cSs&tj}x|D]\}}|||<qW|S)N)r&ZStrDict)ZlstZstr_dictrrr r r langlist_to_strdicts z.langlist_to_strdictNTF)Z with_nevraZ with_providesZwith_filenameszNo match for argument: {})ZprovidescSsh|] }|jqSr )rF).0pkgr r r sz2GroupsManagerCommand.edit_group..)rFr1))r<rFrAZdescrBr2Z uservisiblerCZ name_by_langrDZ desc_by_langr4rSrZsubjectZSubjectZget_best_querybaseZsackZlatestrZwarningrrupdateZ dependenciesZrequiresZqueryZfiltermremoveZpackages_matchr&ZPACKAGE_TYPE_UNKNOWNZ mandatoryZPACKAGE_TYPE_MANDATORYZoptionalZPACKAGE_TYPE_OPTIONALZPACKAGE_TYPE_DEFAULTsortedr+ZPackage) r)rdrfr4Zpkg_specZsubjqZ requirementsrhZ pkg_namesZpkg_nameZpkg_typer r r edit_groupsT              zGroupsManagerCommand.edit_groupcCs|j|jjs|jjr|j|jj|jjd}|dkr|jjrNtjjt dt j }|jjrt|jj|_|jj|_nD|jjrt |jj}|j|ddrtj jt dj||jj||_|jjj||j||j|jjs|jj rt|jjtddS)N)r rFz-Can't remove packages from non-existent groupzRGroup id '{}' generated from '{}' is duplicit. Please specify group id using --id.)r])r\r<rErFrerlrrWrXrr&ZGroupr!rrrr(rcr+rorbprintr@Zxml_strr`)r)rdr r r r run!s,    zGroupsManagerCommand.run)r#)__name__ __module__ __qualname__aliasesrZsummaryr% staticmethodr:rGr\rbrerorq __classcell__r r )r*r r"Ls  1$=r")Z __future__rrrrJr&rOrrMrLZdnfpluginscorerrrZdnf.clircompilerrrr`r rr!ZpluginZregister_commandrZCommandr"r r r r s,     __pycache__/generate_completion_cache.cpython-36.pyc000064400000006000151030231510016533 0ustar003 gt`l@s^ddlmZddlmZddlmZddlmZddlZddlZ ddl Z Gdddej Z dS))absolute_import)unicode_literals)ucd)loggerNcs<eZdZdZfddZeddZddZdd ZZ S) BashCompletionCacheZgenerate_completion_cachecs"tt|j||||_d|_dS)Nz/var/cache/dnf/packages.db)superr__init__base cache_file)selfr Zcli) __class__//usr/lib/python3.6/generate_completion_cache.pyrszBashCompletionCache.__init__cCstjd|dS)NzCompletion plugin: %s)rdebug)msgr r r_out$szBashCompletionCache._outcCsd}x,|jjjD]}|jdk r|jjrd}PqWtjj|j sF|ry~t j |jh}|j d|j }|j d|j d|j d|jjjj}dd |D}|jd ||jWdQRXWn6t jk r}z|j d t|WYdd}~XnXdS) z& Generate cache of available packages FNTzGenerating completion cache...z/create table if not exists available (pkg TEXT)zAcreate unique index if not exists pkg_available ON available(pkg)zdelete from availablecSs g|]}|jdkrt|gqS)src)archstr).0xr r r @sz,BashCompletionCache.sack..z*insert or ignore into available values (?)z Can't write completion cache: %s)r ZreposZ iter_enabledZmetadatafreshospathexistsr sqlite3connectrcursorexecutesackqueryZ available executemanycommitOperationalErrorr)r rZrepoconncurZ avail_pkgsZavail_pkgs_inserter r rr (s,  zBashCompletionCache.sackcCs|js dSytj|jn}|jd|j}|jd|jd|jdtjj |j j j }dd|D}|j d||jWdQRXWn6tjk r}z|jd t|WYdd}~XnXdS) z& Generate cache of installed packages NzGenerating completion cache...z/create table if not exists installed (pkg TEXT)zAcreate unique index if not exists pkg_installed ON installed(pkg)zdelete from installedcSs g|]}|jdkrt|gqS)r)rr)rrr r rrVsz3BashCompletionCache.transaction..z*insert or ignore into installed values (?)z Can't write completion cache: %s) transactionrrr rrrdnfr Z _rpmdb_sackr r!Z installedr"r#r$r)r r%r&Z inst_pkgsZinst_pkgs_insertr'r r rr(Gs"   zBashCompletionCache.transaction) __name__ __module__ __qualname__namer staticmethodrr r( __classcell__r r )r rrs   r) Z __future__rrZdnf.i18nrZdnfpluginscorerr)Zos.pathrrZPluginrr r r rs    __pycache__/generate_completion_cache.cpython-36.opt-1.pyc000064400000006000151030231510017472 0ustar003 gt`l@s^ddlmZddlmZddlmZddlmZddlZddlZ ddl Z Gdddej Z dS))absolute_import)unicode_literals)ucd)loggerNcs<eZdZdZfddZeddZddZdd ZZ S) BashCompletionCacheZgenerate_completion_cachecs"tt|j||||_d|_dS)Nz/var/cache/dnf/packages.db)superr__init__base cache_file)selfr Zcli) __class__//usr/lib/python3.6/generate_completion_cache.pyrszBashCompletionCache.__init__cCstjd|dS)NzCompletion plugin: %s)rdebug)msgr r r_out$szBashCompletionCache._outcCsd}x,|jjjD]}|jdk r|jjrd}PqWtjj|j sF|ry~t j |jh}|j d|j }|j d|j d|j d|jjjj}dd |D}|jd ||jWdQRXWn6t jk r}z|j d t|WYdd}~XnXdS) z& Generate cache of available packages FNTzGenerating completion cache...z/create table if not exists available (pkg TEXT)zAcreate unique index if not exists pkg_available ON available(pkg)zdelete from availablecSs g|]}|jdkrt|gqS)src)archstr).0xr r r @sz,BashCompletionCache.sack..z*insert or ignore into available values (?)z Can't write completion cache: %s)r ZreposZ iter_enabledZmetadatafreshospathexistsr sqlite3connectrcursorexecutesackqueryZ available executemanycommitOperationalErrorr)r rZrepoconncurZ avail_pkgsZavail_pkgs_inserter r rr (s,  zBashCompletionCache.sackcCs|js dSytj|jn}|jd|j}|jd|jd|jdtjj |j j j }dd|D}|j d||jWdQRXWn6tjk r}z|jd t|WYdd}~XnXdS) z& Generate cache of installed packages NzGenerating completion cache...z/create table if not exists installed (pkg TEXT)zAcreate unique index if not exists pkg_installed ON installed(pkg)zdelete from installedcSs g|]}|jdkrt|gqS)r)rr)rrr r rrVsz3BashCompletionCache.transaction..z*insert or ignore into installed values (?)z Can't write completion cache: %s) transactionrrr rrrdnfr Z _rpmdb_sackr r!Z installedr"r#r$r)r r%r&Z inst_pkgsZinst_pkgs_insertr'r r rr(Gs"   zBashCompletionCache.transaction) __name__ __module__ __qualname__namer staticmethodrr r( __classcell__r r )r rrs   r) Z __future__rrZdnf.i18nrZdnfpluginscorerr)Zos.pathrrZPluginrr r r rs    __pycache__/groups_manager.cpython-36.pyc000064400000020721151030231510014404 0ustar003 f4@sddlmZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl m Z m Z ddl Z ddlZ dZejdjeZejdZdddd Zd d Zd d ZddZe jjGddde jjZdS))absolute_import)unicode_literalsN)_loggerz -a-z0-9_.:z^[{}]+$z^[-a-zA-Z0-9_.@]+$T)Zdefault_explicitZuservisible_explicitZ empty_groupscCstj|stjtd|S)zgroup id validatorzInvalid group id) RE_GROUP_IDmatchargparseArgumentTypeErrorr)valuer $/usr/lib/python3.6/groups_manager.py group_id_type.s r cCsN|jdd}t|dkr&tjtd|\}}tj|sFtjtd||fS)ztranslated texts validator:z6Invalid translated data, should be in form 'lang:text'z*Invalid/empty language for translated data)splitlenrr rRE_LANGr)r datalangtextr r r translation_type5s    rcCs:|j}tjdjtd|}|s6tjjtdj||S)z#generate group id based on its namez[^{}]zFCan't generate group id from '{}'. Please specify group id using --id.) lowerresubformatRE_GROUP_ID_VALIDdnfcliCliErrorr)rgroup_idr r r text_to_idAsr!csdeZdZdZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZZS)GroupsManagerCommandgroups-managerz$create and edit groups metadata filecstt|j|tj|_dS)N)superr"__init__libcompsCompscomps)selfr) __class__r r r%QszGroupsManagerCommand.__init__cCs|jddgdtdd|jddgdtdd|jddtd d |jd d d tdd|jdttdd|jddtdd|jdtdd|jdttdd|jddgdttdd|jddgdttd d|j}|jd!d"d dtd#d$|jd%d"d&dtd'd$|j}|jd(d td)d*|jd+d td,d*|jd-d d td.d|jd/d td0d*|jd1d2d3td4d5dS)6Nz--loadappendz COMPS.XMLzload groups metadata from file)actiondefaultmetavarhelpz--savezsave groups metadata to filez--mergez%load and save groups metadata to file)r.r/z--print store_trueFz#print the result metadata to stdout)r,r-r/z--idzgroup id)typer/z-nz--namez group name)r/z --descriptionzgroup descriptionz--display-orderzgroup display orderz--translated-namez LANG:TEXTztranslated name for the group)r,r-r.r1r/z--translated-descriptionz$translated description for the groupz--user-visible user_visiblez%make the group user visible (default))destr,r-r/z--not-user-visibleZ store_falsezmake the group user invisiblez --mandatoryz%add packages to the mandatory section)r,r/z --optionalz$add packages to the optional sectionz--removez5remove packages from the group instead of adding themz--dependenciesz-include also direct dependencies for packagespackages*ZPACKAGEzpackage specification)nargsr.r/) add_argumentrr intrZadd_mutually_exclusive_group)parserZvisibleZsectionr r r set_argparserUsR                        z"GroupsManagerCommand.set_argparsercCs|jj}|jjr"d|_d|_d|_|jjrP|jjj d|jj|jj j |jj|jj s|jj s|jjs|jjs|jjdk s|jjr|jj r|jj rtjjtddS)NTFrz;Can't edit group without specifying it (use --id or --name))rdemandsoptsr4Zsack_activationZavailable_reposZload_system_repomergeloadinsertsaver+ description display_ordertranslated_nametranslated_descriptionr2idnamerrr)r)r;r r r configures" zGroupsManagerCommand.configurecCs x|jjD] }tj}yp|jdr~tj|F}tjdd}z$t j |||j |j |j Wdtj|j XWdQRXn |j |Wn~tttjfk r}zXt}x2|jD]&}||krqtj|j|j|qWtjjtdj||WYdd}~Xq X|j|7_q WdS)zm Loads all input xml files. Returns True if at least one file was successfuly loaded z.gzF)deleteNzCan't load file "{}": {})r<r>r&r'endswithgzipopentempfileZNamedTemporaryFileshutilZ copyfileobjcloseZ fromxml_frFosunlinkIOErrorOSErrorZ ParserErrorsetZget_last_errorsrerrorstripaddr exceptionsErrorrrr()r) file_nameZ file_compsZgz_fileZ temp_fileerrseenrTr r r load_input_filess,    $z%GroupsManagerCommand.load_input_filescCsx|jjD]}y|jj|td}Wn*tjk rL}z |g}WYdd}~XnX|r x"|ddD]}tj|j q`Wt j j t dj||dj q WdS)N) xml_optionszCan't save file "{}": {}r_)r<r@r(Zxml_fCOMPS_XML_OPTIONSr&Z XMLGenErrorrrTrUrrWrXrr)r)rYerrorsrZr r r save_output_filessz&GroupsManagerCommand.save_output_filescCs\d}|r*x |jjD]}|j|kr|}PqW|dkrX|rXx |jjD]}|j|kr@|}Pq@W|S)zl Try to find group according to command line parameters - first by id then by name. N)r(groupsrErF)r)r rFgroupZgrpr r r find_groups   zGroupsManagerCommand.find_groupc Csdd}|jjr|jj|_|jjr,|jj|_|jjr>|jj|_|jjdk rT|jj|_|jjrj||jj|_|jj r||jj |_ |jj rt }xZ|jj D]N}t jj|}|j|jjddddj}|stjtdj|q|j|qW|jjr2t }x|D]}|j|jqW|j|jjjj|dd d |D} |jjrx| D].} x&|j| tj d D]}|j j|qfWqPWnd|jj!rtj"} n|jj#rtj$} ntj%} x8t&| D],} |j| | d s|j j'tj(| | d qWdS) zE Set attributes and package lists for selected group cSs&tj}x|D]\}}|||<qW|S)N)r&ZStrDict)ZlstZstr_dictrrr r r langlist_to_strdicts z.langlist_to_strdictNTF)Z with_nevraZ with_providesZwith_filenameszNo match for argument: {})ZprovidescSsh|] }|jqSr )rF).0pkgr r r sz2GroupsManagerCommand.edit_group..)rFr1))r<rFrAZdescrBr2Z uservisiblerCZ name_by_langrDZ desc_by_langr4rSrZsubjectZSubjectZget_best_querybaseZsackZlatestrZwarningrrupdateZ dependenciesZrequiresZqueryZfiltermremoveZpackages_matchr&ZPACKAGE_TYPE_UNKNOWNZ mandatoryZPACKAGE_TYPE_MANDATORYZoptionalZPACKAGE_TYPE_OPTIONALZPACKAGE_TYPE_DEFAULTsortedr+ZPackage) r)rdrfr4Zpkg_specZsubjqZ requirementsrhZ pkg_namesZpkg_nameZpkg_typer r r edit_groupsT              zGroupsManagerCommand.edit_groupcCs|j|jjs|jjr|j|jj|jjd}|dkr|jjrNtjjt dt j }|jjrt|jj|_|jj|_nD|jjrt |jj}|j|ddrtj jt dj||jj||_|jjj||j||j|jjs|jj rt|jjtddS)N)r rFz-Can't remove packages from non-existent groupzRGroup id '{}' generated from '{}' is duplicit. Please specify group id using --id.)r])r\r<rErFrerlrrWrXrr&ZGroupr!rrrr(rcr+rorbprintr@Zxml_strr`)r)rdr r r r run!s,    zGroupsManagerCommand.run)r#)__name__ __module__ __qualname__aliasesrZsummaryr% staticmethodr:rGr\rbrerorq __classcell__r r )r*r r"Ls  1$=r")Z __future__rrrrJr&rOrrMrLZdnfpluginscorerrrZdnf.clircompilerrrr`r rr!ZpluginZregister_commandrZCommandr"r r r r s,     __pycache__/reposync.cpython-36.pyc000064400000024276151030231510013246 0ustar003 f89@sddlmZddlmZddlZddlZddlZddlZddlmZm Z ddl m Z ddl Z ddl Z ddZGdd d e jjZe jjGd d d e jjZdS) )absolute_import)unicode_literalsN)_logger) OptionParsercCs(tjjtj}tjjtjj|||S)N)dnfZi18nZucdosgetcwdpathrealpathjoin)Z intermediatetargetcwdr/usr/lib/python3.6/reposync.py_pkgdir#srcs(eZdZfddZfddZZS)RPMPayloadLocationcs$tt|j||tjj||_dS)N)superr__init__rr dirname package_dir)selfpkgprogressZ pkg_location) __class__rrr)szRPMPayloadLocation.__init__cs*tt|j}tjj|j|j|d<|S)Ndest)rr_target_paramsrutil ensure_dirr)rtp)rrrr-s z!RPMPayloadLocation._target_params)__name__ __module__ __qualname__rr __classcell__rr)rrr(s rcseZdZdZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZZS) RepoSyncCommandreposyncz&download all packages from remote repocstt|j|dS)N)rr$r)rcli)rrrr9szRepoSyncCommand.__init__c Cs|jdddgtjdtdd|jddd td d |jd dd td d |jdddd tdd |jdddd tdd |jdtdd|jdddd tdd |jddd tdd |jdddtdd |jd!dd td"d |jd#dd td$d |jd%d&dd td'd dS)(Nz-az--archarchesz[arch]z$download only packages for this ARCH)rdefaultactionmetavarhelpz--deleteF store_truez5delete local packages no longer present in repository)r(r)r+z--download-metadatazdownload all the metadata.z-gz --gpgcheckzBRemove packages that fail GPG signature checking after downloadingz-mz--downloadcompsz&also download and uncompress comps.xmlz--metadata-pathzXwhere to store downloaded repository metadata. Defaults to the value of --download-path.)r+z-nz --newest-onlyz&download only newest packages per-repoz --norepopathz,Don't add the reponame to the download path.z-pz--download-pathz./z&where to store downloaded repositories)r(r+z --remote-timezCtry to set local timestamps of local files by the one on the serverz--sourcezdownload only source packagesz-uz--urlsz:Just list urls of what would be downloaded, don't download) add_argumentrZ_SplitCallbackr)parserrrr set_argparser<s2                      zRepoSyncCommand.set_argparserc Cs|jj}d|_d|_|jj}|jjr||jj xJ|jjD]>}y ||}Wn$t k rnt jj d|YnX|j q:W|jjr|jtt|jdkr|jjrt jj tdx |jD]}|jjd|_qWdS)NTzUnknown repo: '%s'.z1Can't use --norepopath with multiple repositoriesF)r&demandsZavailable_reposZsack_activationbasereposoptsrepoalldisableKeyErrorrZCliErrorenablesourceZenable_source_reposlenlist iter_enabled norepopathr_repoZexpireZdeltarpm)rr1r3Zrepoidr5rrr configure\s(     zRepoSyncCommand.configurecsd|jj_d}x|jjjD]}|jjr8|jjd|jj r|jj rxP|jj D]6\}}|j |}|rtt |qTtd|}tj|qTWn |j ||jjr|jj rt|jj fdddD}|rxB|D]}|j |}|rt |PqWtd}tj|n |j||j|} |jj r8|j| n|j| |jjrxt| D]l} |j| } tj| fd d | | _|jj| \} } | d krRtjtd jtjj | | tj!| d }qRW|jj"r|j#|| qW|st$j%j&tddS)NTz%Failed to get mirror for metadata: %scsg|]}|kr|qSrr).0md_type)mdlrr sz'RepoSyncCommand.run..groupgroup_gz group_gz_zckz(Failed to get mirror for the group file.cSs|S)Nr)s local_pathrrrsz%RepoSyncCommand.run..rzRemoving {}: {}FzGPG signature check failed.)rErFrG)'r2ZconfZ keepcacher3r=r4Z remote_timer?ZsetPreserveRemoteTimedownload_metadataZurlsZgetMetadataLocationsremote_locationprintrrwarningZ downloadcompsdictgetcomps get_pkglist print_urlsdownload_packagesZgpgcheckpkg_download_pathtypes MethodTypeZlocalPkgZpackage_signature_checkformatrr basenameunlinkdeletedelete_old_local_packagesr exceptionsError)rZ gpgcheck_okr5rBZ md_locationurlmsgZgroup_locationsZgroup_locationpkglistrrIresulterrorr)rCrrunws^                        zRepoSyncCommand.runcCs$t|jjp|jj|jjs|jndS)N)rr4ZdestdirZ download_pathr>id)rr5rrr repo_targetszRepoSyncCommand.repo_targetcCs&|jjrt|jj|jS|j|SdS)N)r4Z metadata_pathrrerf)rr5rrrmetadata_targetszRepoSyncCommand.metadata_targetcCsT|j|j}tjjtjj||j}|jtjj|dsPtj j t dj |||S)Nrdz6Download target '{}' is outside of download path '{}'.) rfr5rr r r location startswithrr\r]rrW)rrrfrTrrrrTs  z!RepoSyncCommand.pkg_download_pathc stfdd|D}xtjj|D]\}}}x||D]t}tjj||}|jdr8tjj|r8||kr8ytj|t j t d|Wq8t k rt j t d|Yq8Xq8Wq(WdS)Nc3s|]}j|VqdS)N)rT)rAr)rrr sz.z.rpmz [DELETED] %szfailed to delete file %s)setrwalkrfr r endswithisfilerYrinforOSErrorrb) rr5r`Zdownloaded_filesdirpathZdirnames filenamesfilenamer r)rrr[s  z)RepoSyncCommand.delete_old_local_packagescCsZ|jj}|rV|j|}tjj|tjj|d}tj j j ||dt j td|jdS)Nz comps.xml)rz!comps.xml for repository %s saved)r?Z getCompsFnrgrrrrr r ZyumZmiscZ decompressrrorre)rr5Zcomps_fnZ dest_pathrrrrrPs   zRepoSyncCommand.getcompscCs|j|}|jj|dS)NT)rgr?ZdownloadMetadata)rr5rfrrrrKs  z!RepoSyncCommand.download_metadatacCstjjs|jS|j|jjj}t}i}i}xp|D]h}|j}|j ||j |j ij |j gj |x.|D]&}|j |ij |j gj |j qvWq8W|j|j|ddj} t} x|jD]\} } t} | jt| jdddt}x0| jD]$}x|D]}|j |jqWqWx:|j|djD]&}dj|}| jt||| q>Wx0| D](}x | |D]}| j |jq|WqnWqW| j|j| d} | S)a\ return union of these queries: - the latest NEVRAs from non-modular packages - all packages from stream version with the latest package NEVRA (this should not be needed but the latest package NEVRAs might be part of an older module version) - all packages from the latest stream version )Z nevra_strict)Zpkg__neqT)reverserz3{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch})rr2Z WITH_MODULESZlatestZapplyZ_moduleContainerZgetModulePackagesrkZ getArtifactsupdate setdefaultZ getNameStreamZ getVersionNumappendfilteritemsaddsortedkeysvaluesrWmaxunion)rqueryZmodule_packagesZ all_artifactsZ module_dictZartifact_versionZmodule_packageZ artifactsZartifactZ latest_queryZlatest_stream_artifactsZ namestreamZ version_dictZversionsZstream_artifactsmodulesmoduleZ latest_pkgZnevraversionrrr _get_latestsB         zRepoSyncCommand._get_latestcCsd|jjjtjdjj|jd}|jj r2|j |}|jj rH|jddn|jj r`|j|jj d|S)N)flags)Zreponamesrc)Zarch) r2sackrhawkeyIGNORE_MODULAR_EXCLUDESZ availableZfiltermrer4Z newest_onlyrr:r')rr5rrrrrQs  zRepoSyncCommand.get_pkglistcsjj}|jjdkr tjjtjj|jj t j dj d}fdd|D}|j ||dddS)N)rrcsg|]}t|j|qSr)rrT)rAr)rrrrrD0sz5RepoSyncCommand.download_packages..F)r2outputrrcallbackZNullDownloadProgressdrpmZ DeltaInforrrrZ installedZ_download_remote_payloads)rr`r2rZpayloadsr)rrrrS)s  z!RepoSyncCommand.download_packagescCs@x:|D]2}|j}|r t|qtd|j}tj|qWdS)Nz$Failed to get mirror for package: %s)rLrMrnamerrN)rr`rr^r_rrrrR4s   zRepoSyncCommand.print_urls)r%)r r!r"aliasesrZsummaryr staticmethodr/r@rcrfrgrTr[rPrKrrQrSrRr#rr)rrr$4s   :  9  r$)Z __future__rrrrZshutilrUZdnfpluginscorerrZdnf.cli.option_parserrrZdnf.clirr5Z RPMPayloadrZpluginZregister_commandr&ZCommandr$rrrrs    __pycache__/config_manager.cpython-36.opt-1.pyc000064400000016133151030231510015273 0ustar003 gt`*@sddlmZddlmZddlmZmZmZddlZddlZddl Zddl Zddl Z ddl Z ddl Z ddlZddlZejjGdddejjZddZejd Zejd Zejd Zejd Zd dZdS))absolute_import)unicode_literals)_loggerP_Nc@sReZdZdgZedjejjdZ e ddZ ddZ dd Z d d Zd d ZdS)ConfigManagerCommandzconfig-managerz4manage {prog} configuration options and repositories)progcCs|jdddtdd|jdddtd d |jd gd d tdd|jdddtdd |jdddtdd |j}|jddddtdd|jddddtdddS)Ncrepo*repozrepo to modify)nargsmetavarhelpz--saveF store_truez/save the current options (useful with --setopt))defaultactionrz --add-repoappendZURLz8add (and enable) the repo from the specified file or url)rrr rz--dumpz,print current configuration values to stdoutz--dump-variableszprint variable values to stdoutz --set-enabled set_enabledz"enable repos (automatically saves))rdestrrz--set-disabled set_disabledz#disable repos (automatically saves)) add_argumentrZadd_mutually_exclusive_group)parserZ enable_groupr$/usr/lib/python3.6/config_manager.py set_argparser)s,      z"ConfigManagerCommand.set_argparserc Cs|jj}d|_|jjgkp@|jjp@|jjp@|jjp@|jjp@|jj sp|jj j t dj djdddddd d d g|jjgkrtjt d |jjs|jj s|jjs|jjrd|_d d|jjD}dd|D|j_dS)NTz.one of the following arguments is required: {} z--savez --add-repoz--dumpz--dump-variablesz --set-enabledz--enablez--set-disabledz --disablez{Warning: --enablerepo/--disablerepo arguments have no meaningwith config manager. Use --set-enabled/--set-disabled instead.cSsg|]}|dkr|jdqS),)split).0xrrr _sz2ConfigManagerCommand.configure..cSs"g|]}|D]}|dkr |q qS)r)rZsublistitemrrrr as)clidemandsZavailable_reposoptsadd_reposavedumpdump_variablesrrZ optparsererrorrformatjoinZrepos_edrZwarningZ root_userr )selfr$Z temp_listrrr configureBs*  zConfigManagerCommand.configurecCs|jjr|jn|jdS)zExecute the util action here.N)r%r& modify_repo)r-rrrrunds zConfigManagerCommand.runc sgtfdd}jjrnxjjD]|dq.WtjdrxLjjjD]|dqZWn,tjdrxjjjD]|dqWrtjjt ddj j j }i}tjdrjj rjj }jjrx*j j jjD]\}td |fqWjj s0d jjkrjjr\|r\j j jj j jd |j|jjrtj jjd tj j jsd Sjjsjjrdj_xtD]}i}jjrd |d <njjrd|d <tjdr*x4jjjD]$\}}tj|j|r|j|qWjjrT|rTj j j|j|j|j|jjrtj jjd|jt|jqWd S)z< process --set-enabled, --set-disabled and --setopt options cs0jjj|}|sjn|r,j|dS)N)baseZreposZ get_matchingaddextend)keyZadd_matching_reposZmatching)matching_reposnamenot_matching_repos_idr-rr match_reposqs  z5ConfigManagerCommand.modify_repo..match_reposT repo_setoptsFzNo matching repo to modify: %s.z, main_setoptsz%s = %smainN1Zenabled0zrepo: )setr%r hasattrr9keysdnf exceptionsErrorrr,r1confr:r)Z substitutionsitemsprintr'Zwrite_raw_configfileZconfig_file_pathr(outputZ fmtSectionrrsortedfnmatchidupdateZrepofile) r-r8ZsbcZmodifyvalr Z repo_modifyrepoidZsetoptsr)r5r6r7r-rr/ks`          z ConfigManagerCommand.modify_repoc CsN|jjj}d}x|jjD]}tjjj|jdkrDdt j j |}t j td||jdrt j j|}t j j||}y6|jj|dd}tj|j|t j|d|jWn6tk r}z|d 7}t j|wWYd d }~XnXqt|}d jtjj|}t j j|d |}d |||f} t|| sqqW|rJtjj t!dd|d S)z process --add-repo option rr!zfile://zAdding repo from: %sz.repozw+)modeiNz$created by {} config-manager from {}z%s.repoz"[%s] name=%s baseurl=%s enabled=1 zConfiguration of repo failedzConfiguration of repos failed)"r1rDZ get_reposdirr%r&rApycompZurlparseschemeospathabspathrinforendswithbasenamer,ZurlopenshutilZcopy2r6chmodcloseIOErrorr*sanitize_url_to_fsr+util MAIN_PROG save_to_filerBrCr) r-Z myrepodirZ errors_counturlZdestnameferMZreponamecontentrrrr&s8         zConfigManagerCommand.add_repoN)__name__ __module__ __qualname__aliasesrr+rAr]r^Zsummary staticmethodrr.r0r/r&rrrrr"s  "BrcCspy4t|d }tjj||tj|dWdQRXWn6ttfk rj}ztj t d||dSd}~XnXdS)Nzw+iz&Could not save repo to repofile %s: %sFT) openrArPZ write_to_filerRrYr[OSErrorrr*r)filenamercfdrbrrrr_s  r_z^\w+:/*(\w+:|www\.)?z[?/:&#|~\*\[\]\(\)\'\\]+z^[,.]*z[,.]*$cCs*ybtj|r`tjjr&|jdjd}n:t|trB|jdjd}n |jd}t|t r`|jd}Wnt t t t fk r~YnXtjd|}tjd|}tjd|}tjd|}t|dkr|ddjd}dt|d }tj}|j||djd|d|d|j}d }tj|d|S) zReturn a filename suitable for the filesystem and for repo id Strips dangerous and common characters to create a filename we can use to store the cache in. Zidnazutf-8r!rNrOzE[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]) RE_SCHEMEmatchrArPZPY3encodedecode isinstancestrZunicodeUnicodeDecodeErrorUnicodeEncodeError UnicodeError TypeErrorsubRE_SLASHRE_BEGINRE_FINALlenrhashlibZsha256rKZ hexdigestre)r`parts lastindexZcsumZ allowed_regexrrrr\s.        r\)Z __future__rrZdnfpluginscorerrrrAZdnf.cliZ dnf.pycompZdnf.utilrIrrRrrXZpluginZregister_commandr#ZCommandrr_compilerpr{r|r}r\rrrrs(   1    __pycache__/needs_restarting.cpython-36.opt-1.pyc000064400000023602151030231510015673 0ustar003 f`. @s$ddlmZddlmZddlmZddlmZddlmZmZddlZddl Zddl Z ddl Z ddl Z ddl Z ddlZddlZddd d d d d ddg ZdgZddZddZddZddZddZddZddZdd Zd!d"ZGd#d$d$eZGd%d&d&eZejjGd'd(d(ej j!Z"dS)))absolute_import)division)print_function)unicode_literals)logger_NZkernelz kernel-rtZglibczlinux-firmwareZsystemddbusz dbus-brokerz dbus-daemonZ microcode_ctlzlibc stjj|stSt}xjtj|D]\}tjj|s$|jd rBq$ttjj||&}x|D]}|j |j |fq\WWdQRXq$Wtx4|j j j jdd|DdD]}j |jqWx6fdd|DD] \}}tjtdj||dqWS) z Provide filepath as string if single dir or list of strings Return set of package names contained in files under filepath z.confNcSsh|] }|dqS)r).0xr r &/usr/lib/python3.6/needs_restarting.py Bsz'get_options_from_dir..)namecsh|]}|dkr|qS)rr )r r )packagesr r rDsz`No installed package found for package name "{pkg}" specified in needs-restarting file "{file}".)pkgfile)ospathexistssetlistdirisdirendswithopenjoinaddrstripsackquery installedfilterrrwarningrformat)filepathbaseZoptionsrfplinerrr )rr get_options_from_dir0s"  $&r(ccsxtD]\}}y<|dk r(|t|kr(wt|ddd}|j}WdQRXWn"tk rntjd|wYnXx$|D]}t||}|dk rv|VqvWqWdS)Nrreplace)errorszFailed to read PID %d's smaps.) list_smaps owner_uidr readlinesEnvironmentErrorrr"smap2opened_file)uidpidsmapsZ smaps_filelinesr'ofiler r r list_opened_filesKs   r6c csNxHtjdD]:}y t|}Wntk r2w YnXd|}||fVq WdS)Nz/procz/proc/%d/smaps)rrint ValueError)Zdir_r2r3r r r r,\s r,cstifdd}|S)Ncs,j|}|k r|S|}||<|S)N)get)Zparamval)cachefuncsentinelr r wrapperis  zmemoize..wrapper)object)r<r>r )r;r<r=r memoizefsr@cCstj|tjS)N)rstatST_UID)fnamer r r r-ssr-cCs$|jj|dj}|r |dSdS)N)rr)rr!run)rrCZmatchesr r r owning_packagewsrEc CsPd|}t|}tjj|j}WdQRXdj|jd}td||fdS)Nz/proc/%d/cmdline z%d : %s)rdnfZi18nZucdreadrsplitprint)r2ZcmdlineZ cmdline_fileZcommandr r r print_cmd~s  rLc Cstj}|jdd}tj|d}d}y|jd|j|}Wn<tjk rv}zt|}tjdj ||dSd}~XnXtj|dd}|j dd}|j d r|SdS) Nzorg.freedesktop.systemd1z/org/freedesktop/systemd1z org.freedesktop.systemd1.Managerz)Failed to get systemd unit for PID {}: {}zorg.freedesktop.DBus.Properties)Zdbus_interfacezorg.freedesktop.systemd1.UnitZIdz.service) rZ SystemBusZ get_objectZ InterfaceZ GetUnitByPIDZ DBusExceptionstrrr"r#ZGetr) r2ZbusZsystemd_manager_objectZsystemd_manager_interfaceZ service_proxyemsgZservice_propertiesrr r r get_service_dbuss0  rPcCsn|jd}|dkrdS|jddkr(dS||dj}|jd}|dkrVt||dSt||d|dSdS)N/rz00:z (deleted)FT)findstriprfind OpenedFile)r2r'ZslashfnZ suffix_indexr r r r0s   r0c@s*eZdZejdZddZeddZdS)rUz^(.+);[0-9A-Fa-f]{8,}$cCs||_||_||_dS)N)deletedrr2)selfr2rrWr r r __init__szOpenedFile.__init__cCs(|jr"|jj|j}|r"|jdS|jS)a;Calculate the name of the file pre-transaction. In case of a file that got deleted during the transactionm, possibly just because of an upgrade to a newer version of the same file, RPM renames the old file to the same name with a hexadecimal suffix just before delting it. )rWRE_TRANSACTION_FILEmatchrgroup)rXr\r r r presumed_names  zOpenedFile.presumed_nameN) __name__ __module__ __qualname__recompiler[rYpropertyr^r r r r rUs rUc@s4eZdZddZeddZeddZddZd S) ProcessStartcCs|j|_|j|_dS)N) get_boot_time boot_timeget_sc_clk_tck sc_clk_tck)rXr r r rYs zProcessStart.__init__c Cshttjdj}tjjdrdtdd8}|jjj dj}tt j t |}t ||SQRX|S)a  We have two sources from which to derive the boot time. These values vary depending on containerization, existence of a Real Time Clock, etc. For our purposes we want the latest derived value. - st_mtime of /proc/1 Reflects the time the first process was run after booting This works for all known cases except machines without a RTC - they awake at the start of the epoch. - /proc/uptime Seconds field of /proc/uptime subtracted from the current time Works for machines without RTC iff the current time is reasonably correct. Does not work on containers which share their kernel with the host - there the host kernel uptime is returned z/proc/1z /proc/uptimerbrN) r7rrAst_mtimerisfilerreadlinerSrJtimefloatmax)Zproc_1_boot_timefZuptimeZproc_uptime_boot_timer r r rfs  zProcessStart.get_boot_timecCstjtjdS)N SC_CLK_TCK)rsysconf sysconf_namesr r r r rhszProcessStart.get_sc_clk_tckc CsLd|}t|}|jjj}WdQRXt|d}||j}|j|S)Nz /proc/%d/stat)rrIrSrJr7rirg)rXr2Zstat_fnZ stat_fileZstatsZticks_after_bootZsecs_after_bootr r r __call__s    zProcessStart.__call__N)r_r`rarY staticmethodrfrhrvr r r r res  rec@s4eZdZd ZedZeddZddZddZ d S) NeedsRestartingCommandneeds-restartingz/determine updated binaries that need restartingcCsF|jdddtdd|jdddtdd|jd d dtd ddS) Nz-uz --useronly store_truez#only consider this user's processes)actionhelpz-rz --reboothintzKonly report whether a reboot is required (exit code 1) or not (exit code 0)z-sz --servicesz%only report affected systemd services) add_argumentr)parserr r r set_argparsers      z$NeedsRestartingCommand.set_argparsercCs|jj}d|_dS)NT)clidemandsZsack_activation)rXrr r r configure sz NeedsRestartingCommand.configurecCsNt}tjt|jj}t|}ttj j |jj j d|j}t j||jjrt}t}|jjjj}x,|jt dD]}|j|jkrx|j|jqxW|jdddgd}t|dkrx,|jtdD]}|j|jkr|j|jqW|s|rfttdxt|D]} td| qWxt|D]} td | q$Wtttd ttd d tjj nttd ttddSt} |jj!rtj"nd} xHt#| D]<} || j$}|dkrĐq|j|| j%kr| j| j%qW|jj&r.tddt| D} x | D]} | dk rt| qWdSxt| D]}t'|q8WdS)Nz#etc/dnf/plugins/needs-restarting.d/)rrz dbus-daemonz dbus-brokerrz;Core libraries or services have been updated since boot-up:z * %sz8 * %s (dependency of dbus. Recommending reboot of dbus)z2Reboot is required to fully utilize these updates.zMore information:z)https://access.redhat.com/solutions/27943z>No core libraries or services have been updated since boot-up.zReboot should not be necessary.cSsg|] }t|qSr )rP)r r2r r r Bsz.NeedsRestartingCommand.run..)(re functoolspartialrEr%rr@r(rrrZconfZ installroot NEED_REBOOTextendZoptsZ reboothintrrr r!Z installtimergrrlenNEED_REBOOT_DEPENDS_ON_DBUSrKrsortedrH exceptionsErrorZuseronlygeteuidr6r^r2ZservicesrL)rXZ process_startZ owning_pkg_fnoptZ need_rebootZneed_reboot_depends_on_dbusr rZdbus_installedrZ stale_pidsr1r5namesr2r r r rDsd                zNeedsRestartingCommand.runN)ry) r_r`raaliasesrZsummaryrwrrrDr r r r rxs  rx)#Z __future__rrrrZdnfpluginscorerrrHZdnf.clirrrrbrArnrrr(r6r,r@r-rErLrPr0r?rUreZpluginZregister_commandrZCommandrxr r r r s:      "+__pycache__/changelog.cpython-36.opt-1.pyc000064400000010117151030231510014257 0ustar003 gt`g@s|ddlmZddlmZddlZddlZddlZddlmZm Z m Z ddl Z ddl Z ddZ e jjGddde jjZdS) )absolute_import)unicode_literalsN)_P_loggerc CsDytjj|ddStttfk r>tjtdj |YnXdS)NT)ZfuzzyzNot a valid date: "{0}".) dateutilparserparse ValueError TypeError OverflowErrorargparseZArgumentTypeErrorrformat)valr/usr/lib/python3.6/changelog.py validate_date!src@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS)ChangelogCommand changelogzShow changelog data of packagescCsd|j}|jdddttdd|jddttdd|jdd d td d |jd dtdddS)Nz--sinceZDATEzZshow changelog entries since DATE. To avoid ambiguosity, YYYY-MM-DD format is recommended.)metavardefaulttypehelpz--countz2show given number of changelog entries per package)rrrz --upgradesF store_truezmshow only new changelog entries for packages, that provide an upgrade for some of already installed packages.)ractionrpackage*ZPACKAGE)nargsr)Zadd_mutually_exclusive_group add_argumentrrint)rZ filter_grouprrr set_argparser-s   zChangelogCommand.set_argparsercCs|jj}d|_d|_d|_dS)NT)clidemandsZavailable_reposZsack_activation changelogs)selfr"rrr configure>szChangelogCommand.configurecCs|jjj}|jjr|jddx|jjD]d}tjj|ddj |jjdddd}|jj rh|j|jj d|r||j |j }q*t jtd|q*Wn|jj r|j|jj d|jjr|j}n|j}|S)NT)empty)Z ignore_caseF)Z with_nevraZ with_providesZwith_filenames)ZreponamezNo match for argument: %s)baseZsackqueryoptsrZfiltermdnfZsubjectZSubjectZget_best_queryZrepounionZlatestrinforupgradesZ available)r$qpkgZpkg_qrrrr(Ds$    zChangelogCommand.querycCs>tj}x0t|D]$}|j|jp$|j|jfgj|qW|S)N) collections OrderedDictsorted setdefaultZ source_namenameZevrappend)r$Zpackagesby_srpmr/rrrr6Zs$zChangelogCommand.by_srpmcsTjjrjj|Sjjr.|jdjjSjjrJfdd|jDS|jSdS)Ncs$g|]}|djjjkr|qS)Z timestamp)r)sinceZdate).0chlog)r$rr fsz6ChangelogCommand.filter_changelogs..)r)r-r'Zlatest_changelogscountr#r7)r$rr)r$rfilter_changelogs`s z"ChangelogCommand.filter_changelogscCs|jjr"tjtdj|jjnP|jjrLtjtdd|jjj|jjn&|jjrdtjtdntjtd|j |j }xb|D]Z}t tdjdj t dd ||Dx*|j||d D]}t |jj|qWqWdS) NzListing changelogs since {}zListing only latest changelogzListing {} latest changelogszBListing only new changelogs since installed version of the packagezListing all changelogszChangelogs for {}z, cSsh|] }t|qSr)str)r8r/rrr {sz'ChangelogCommand.run..r)r)r7rr,rrr;rr-r6r(printjoinr2r<r'Zformat_changelog)r$r6r4r9rrrrunks     zChangelogCommand.runN)r) __name__ __module__ __qualname__aliasesrZsummary staticmethodr r%r(r6r<rArrrrr(s  r)Z __future__rrr r0Zdateutil.parserrZdnfpluginscorerrrr*Zdnf.clirZpluginZregister_commandr!ZCommandrrrrrs  __pycache__/repoclosure.cpython-36.opt-1.pyc000064400000010500151030231510014666 0ustar003 gt`@sVddlmZddlmZddlmZddlZGdddejZGdddej j Z dS) )absolute_import)unicode_literals)_Ncs eZdZdZfddZZS) RepoClosure repoclosurecs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoClosureCommand)selfbasecli) __class__!/usr/lib/python3.6/repoclosure.pyr!szRepoClosure.__init__)__name__ __module__ __qualname__namer __classcell__rr)r rrsrc@s>eZdZd ZedZddZddZd dd Ze d d Z dS)r rz:Display a list of unresolved dependencies for repositoriescCsd|jj}d|_d|_|jjr`xB|jjjD]2}|j |jjkrT|j |jj krT|j q*|j q*WdS)NT) r demandsZsack_activationZavailable_reposoptsrepor reposallidcheckdisableenable)r rrrrr configure,s zRepoClosureCommand.configurecCs|jjr|j|jj}n|j}xRt|jD]B}tdjt||jtdx||D]}tdj|qZWq.Wt |dkrt d}t j j |dS)Nzpackage: {} from {}z unresolved deps:z {}rz/Repoclosure ended with unresolved dependencies.)rarches_get_unresolvedsortedkeysprintformatstrreponamelenrdnf exceptionsError)r unresolvedpkgZdepmsgrrrrun7s zRepoClosureCommand.runNcsLi}t}|jjr|jjjjdd|jjjjdd}xv|jjjD]D}j |jjjj|j dj |j |jjjj|j dj }qHWn |jjjj |jjjj }|jj rN|jjjjdd}g}xT|jj D]H}tjj|} |j| j|jjdddd} | r|j | }q|j|qW|rJtjjtddj||}|jjrh|j|jjd|dk r~|j|d|jjjrjdd j|jxf|D]^}t||<xL|jD]B} t| } | jd s| jd rq|j | ||j | qWqWtfd d |Dfdd|j!D} dd| j!DS)NT)empty)r&F)Z with_nevraZ with_providesZwith_filenameszno package matched: %sz, )arch)Zlatest_per_archz solvable:zrpmlib(c3s|]}j|ds|VqdS))ZprovidesN)filter).0x) availablerr sz5RepoClosureCommand._get_unresolved..cs(i|] \}}tfdd|D|qS)c3s|]}|kr|VqdS)Nr)r2r3)unresolved_depsrrr5sz@RepoClosureCommand._get_unresolved...)set)r2kv)r6rr sz6RepoClosureCommand._get_unresolved..cSsi|]\}}|r||qSrr)r2r8r9rrrr:s)"r7rZnewestr ZsackZqueryr1rZ iter_enabledunionrZlatestr4pkglistr(ZsubjectZSubject intersectionZget_best_queryappendr)r*rjoinrZfiltermZconfZbestZapplyZrequiresr% startswithadditems)r r0r+ZdepsZto_checkrZ pkglist_qerrorsr,ZsubjZpkg_qZreqZreqnameZunresolved_transitionr)r4r6rr Es\ &                  z"RepoClosureCommand._get_unresolvedcCs`|jdgddtdd|jdgdtdd|jd d d td d |jdgdtddddS)Nz--archr>rzBcheck packages of the given archs, can be specified multiple times)defaultactiondesthelpz--checkzSpecify repositories to check)rDrErGz-nz--newest store_truez+Check only the newest packages in the repos)rErGz--pkgz#Check closure for this package onlyr<)rDrErGrF) add_argumentr)parserrrr set_argparsers       z RepoClosureCommand.set_argparser)r)N) rrraliasesrZsummaryrr.r staticmethodrKrrrrr (s   Qr ) Z __future__rrZdnfpluginscorerZdnf.clir(ZPluginrr ZCommandr rrrrs    __pycache__/debug.cpython-36.pyc000064400000025101151030231510012456 0ustar003 gt`1@sddlmZddlmZddlmZddlmZmZddlZddl Zddl Z ddl Z ddl Z ddl Z ddlZddlZdZGdddejZGd d d ejjZGd d d ejjZd dZddZddZdS))absolute_import)unicode_literals)ucd)_loggerNzdnf-debug-dump version 1 cs eZdZdZfddZZS)DebugdebugcsDtt|j||||_||_|jdk r@|jjt|jjtdS)N)superr__init__basecliZregister_commandDebugDumpCommandDebugRestoreCommand)selfr r ) __class__/usr/lib/python3.6/debug.pyr )s   zDebug.__init__)__name__ __module__ __qualname__namer __classcell__rr)rrr%srcsteZdZdZedZfddZddZeddZ d d Z d d Z d dZ ddZ ddZddZddZZS)r debug-dumpz5dump information about installed rpm packages to filecstt|j|d|_dS)N)r r r dump_file)rr )rrrr 7szDebugDumpCommand.__init__cCsd|jj_d|jj_dS)NT)r demandssack_activationavailable_repos)rrrr configure;s zDebugDumpCommand.configurecCs.|jdddtdd|jddtdd dS) Nz --norepos store_trueFz/do not attempt to dump the repository contents.)actiondefaulthelpfilename?zoptional name of dump file)nargsr!) add_argumentr)parserrrr set_argparser?s  zDebugDumpCommand.set_argparsercCs|jj}|s6tjdtjtj}dtjd|f}tjj|}|j dr\t j |d|_ n t |d|_ |jt|j|j|j|j|jj |j|j jttd|dS)z{create debug txt file and compress it, if no filename specified use dnf_debug_dump-.txt.gz by defaultz %Y-%m-%d_%Tzdnf_debug_dump-%s-%s.txt.gzz.gzwzOutput written to: %sN)optsr"timeZstrftimeZ localtimeosunamepathabspathendswithgzipGzipFileropenwrite DEBUG_VERSIONdump_system_infodump_dnf_config_infodump_rpm_problems dump_packagesZnoreposdump_rpmdb_versionscloseprintr)rr"ZnowrrrrunHs      zDebugDumpCommand.runcCs4tjjr t|jtjr t|d}tjj|j|dS)Nutf8) dnfZpycompZPY3 isinstancerr1r2bytesZ write_to_file)rmsgrrrr4as zDebugDumpCommand.writecCsX|jdtj}|jd|d|df|jdtj|jdtjjdddS) Nz%%%%SYSTEM INFO z uname: %s, %s z rpm ver: %s z python ver: %s  )r4r,r-rpm __version__sysversionreplace)rr-rrrr6fs  z!DebugDumpCommand.dump_system_infocCs|jjj}djdd|jjjD}|jd|jd|d|jd|d|jd |d |jd tjj |jd ||jd dj|jjj dS)N,cSsg|] }|jqSr)r).0prrr psz9DebugDumpCommand.dump_dnf_config_info..z %%%%DNF INFO z arch: %s archz basearch: %s Zbasearchz releasever: %s Z releaseverz dnf ver: %s z enabled plugins: %s z global excludes: %s ) r confZ substitutionsjoinZ_pluginspluginsr4r?constVERSION excludepkgs)rvarrSrrrr7ns  z%DebugDumpCommand.dump_dnf_config_infocCsP|jdt|j\}}|jdjdd|D|jdjdd|DdS)Nz%%%%RPMDB PROBLEMS rFcSs$g|]\}}dt|t|fqS)zPackage %s requires %s )r)rMreqpkgrrrrO}sz6DebugDumpCommand.dump_rpm_problems..cSs$g|]\}}dt|t|fqS)zPackage %s conflicts with %s )r)rMrQrYrrrrOs)r4 rpm_problemsr rR)rZmissing conflictsrrrr8zs   z"DebugDumpCommand.dump_rpm_problemsc Cs\|jjj}|jdx&t|jD]}|jdt|q$W|sFdS|jd|j}xt|jjj dddD]}yd}|j dk r|j }n*|j dk r|j }nt |j dkr|j d}|jd|j|f|jd d j|jx,t|j|jd D]}|jdt|qWWqrtjjk rR}z|jd |t|fwrWYdd}~XqrXqrWdS) Nz %%%%RPMDB z %s z %%%%REPOS cSs|jS)N)id)xrrrsz0DebugDumpCommand.dump_packages..)keyrz %%%s - %s z excludes: %s rL)ZreponamezError accessing repo %s: %s )r sackqueryr4sorted installedpkgspec availableZreposZ iter_enabledZmetalinkZ mirrorlistlenZbaseurlr\rRrVfilterr? exceptionsErrorstr) rZ load_reposqrNreZrepoZurlZpoerrrr9s2      zDebugDumpCommand.dump_packagescCs(|jd|jjj}|jd|dS)Nz%%%%RPMDB VERSIONS z all: %s )r4r r`Z_rpmdb_version)rrJrrrr:s  z$DebugDumpCommand.dump_rpmdb_versions)r)rrraliasesrsummaryr r staticmethodr'r=r4r6r7r8r9r:rrr)rrr 2s    r c@sPeZdZdZedZddZeddZddZ d d Z d d Z ed dZ dS)r debug-restorez,restore packages recorded in debug-dump filecCs4d|jj_d|jj_d|jj_|jjs0d|jj_dS)NT)r rrrZ root_userr*outputZ resolving)rrrrrs    zDebugRestoreCommand.configurecCs~|jddtdd|jddtdd|jddtdd|jd d d td d |jddtdd|jddtdddS)Nz--outputrz,output commands that would be run to stdout.)rr!z--install-latestz0Install the latest version of recorded packages.z --ignore-archz_Ignore architecture and install missing packages matching the name, epoch, version and release.z--filter-typesz[install, remove, replace]zinstall, remove, replacezlimit to specified type)metavarr r!z--remove-installonlyzqAllow removing of install-only packages. Using this option may result in an attempt to remove the running kernel.r"r(zname of dump file)r$r!)r%r)r&rrrr's$     z!DebugRestoreCommand.set_argparsercCsV|jjr$t|jjjddj|j_|j|jjd}|j||j|j||jdS)z Execute the command action here.rL rN) r* filter_typessetrKsplitread_dump_filer"process_installed process_dump)r dump_pkgsrrrr=s zDebugRestoreCommand.runc Cs|jjjj}|jj|}x|D]}d}t|}|j|j|jfd}|dk r|j |j |j f} | |krpd|| <q||kr~d}qd|j krd} nd} x|j D] } | || <qWnd}|r"d|j kr"||ks|jr"|jrtd|q"|jj|q"WdS)NFskipTrKremovez remove %s)r r`rarcZ_get_installonly_queryrdgetrrPepochrJreleasertkeysZremove_installonlyrqr<Zpackage_remove) rrzr*rcZinstallonly_pkgsrYZ pkg_removespecdumped_versionsZevrrZd_evrrrrrxs.    z%DebugRestoreCommand.process_installedc Csxt|jD]\}}|||f}xt|jD]\}}}||||f} | dkrRq0|jr^d} nd|} |jr| dkrd|| f} nt|| |||} | |jkr0|jrtd| | fq0y|jj | Wq0t j j k rt jtd| Yq0Xq0WqWdS)Nr{rF.installz%s%sz%s %szPackage %s is not available)rbrZ ignore_archZinstall_latest pkgtup2specrtrqr<r rr?rhZ MarkingErrorrerrorr) rrzr*narrlvrrrPpkg_specrrrrys&  z DebugRestoreCommand.process_dumpcCs|jdrtj|}nt|}t|jtkrFtjt d|t j j d}i}xp|D]h}t|}|rr|dkrTd}qT| s|ddkrP|j }tj|}d|j|j|jfi|j|j|jf<qTW|S) Nz.gzzBad dnf debug file: %sTz %%%%RPMDB Frrsr)r0r1r2r3rreadliner5rrrr?rhristriphawkeyZ split_nevra setdefaultrrPr~rJr)r"Zfobjr{ZpkgslinerZnevrarrrrws(    (z"DebugRestoreCommand.read_dump_fileN)rp) rrrrmrrnrror'r=rxryrwrrrrrs  #rcstjj|}|jjt}t}x@D]8|jfddjD|jfddjDq*Wfdd|D}fdd|D}||fS)Ncs2g|]*}t|dk rt|jd r|fqS)zsolvable:prereqmarkerzrpmlib()rj startswith)rMrX)rYrrrO:sz rpm_problems..csg|] }|fqSrr)rMrQ)rYrrrO=scs$g|]\}}j|ds||fqS))provides)rg)rMrXrY)allpkgsrrrO?scs$g|]\}}j|dr||fqS))r)rg)rMrQrY)rrrrOAs) r?r`Z _rpmdb_sackrarcruupdaterequiresr[)r Zrpmdbrr[Zmissing_requiresZexisting_conflictsr)rrYrrZ3s   rZcCst|j|j|j|j|jS)N)rrrPr~rJr)rYrrrrdFsrdcCs<|sdn d|jd}|dkr"dnd|}d|||||fS)NrFz.%srz%s:z %s-%s%s-%s%s)NrF)lstrip)rrPr~rJrrrlrrrrJsr)Z __future__rrZdnf.i18nrZdnfpluginscorerrr?Zdnf.clir1rr,rGrIr+r5ZPluginrr ZCommandr rrZrdrrrrrs&    w __pycache__/debuginfo-install.cpython-36.pyc000064400000013665151030231510015012 0ustar003 gt`L+@sNddlmZmZddlZddlmZGdddejZGdddejj Z dS))_loggerN)Packagecs,eZdZdZdZfddZddZZS)DebuginfoInstallz5DNF plugin supplying the 'debuginfo-install' command.zdebuginfo-installcs4tt|j||||_||_|dk r0|jtdS)zInitialize the plugin instance.N)superr__init__basecliZregister_commandDebuginfoInstallCommand)selfrr ) __class__'/usr/lib/python3.6/debuginfo-install.pyr s zDebuginfoInstall.__init__cCsf|j|jj}|jdo.|jddo.|jdd}|rbtjj|jj j dd}t |rb|jj j dS)Nmain autoupdatez *-debuginfo)Z name__glob)Z read_configrconfZ has_sectionZ has_optionZ getbooleandnfsackZ _rpmdb_sackqueryfiltermlenreposenable_debug_repos)r ZcprZdbginfor r rconfig(s   zDebuginfoInstall.config)__name__ __module__ __qualname____doc__namerr __classcell__r r )r rrs rcsheZdZdZdZedZfddZeddZ dd Z d d Z d d Z ddZ ddZddZZS)r z! DebuginfoInstall plugin for DNF debuginfo-installzinstall debuginfo packagescs4tt|j|t|_t|_t|_t|_dS)N)rr rsetavailable_debuginfo_missingavailable_debugsource_missinginstalled_debuginfo_missinginstalled_debugsource_missing)r r )r r rr:s z DebuginfoInstallCommand.__init__cCs|jddddS)Npackage+)nargs) add_argument)parserr r r set_argparserBsz%DebuginfoInstallCommand.set_argparsercCs0|jj}d|_d|_d|_d|_|jjjdS)NT) r demandsZ resolvingZ root_userZsack_activationZavailable_reposrrr)r r,r r r configureFs z!DebuginfoInstallCommand.configurecCsg}ttj}ttj}x|jjD]}tjj|j |j j dd}|d}|sxt j td|j jjj||j|q$|jj}|j|jjxdt|jD]T}|jtjr|d| |kr|j||jtjr|d| |kr|j|qWx|jD]} | d} | jri} x"| D]} | j| jgj| q(Wxj| jD]^} | d} |j| j | s|j| j!| s|j"j#t$| |j| j%| sP|j&j#t$| qPWq| j'jtjs| j'jtjr|j(| q|ddk rb|j)| j |ds2|j)| j!|ds2|j*j#dj+| j'| j,|j)| j%|ds|j-j#dj+| j'| j,q|j.| j | s|j.| j!| s|j*j#dj+| j'| j,|j.| j%| s|j-j#dj+| j'| j,qWq$W|j*rt j tdd j/t0|j*|j-rt j td d j/t0|j-|j"r8t j td d j/t0|j"|j&r\t j td d j/t0|j&|r|j j1j2rtj3j4td dj/|ddS)NF)Zwith_srcrzNo match for argument: %srZnevraz{}-{}zICould not find debuginfo package for the following available packages: %sz, zKCould not find debugsource package for the following available packages: %szICould not find debuginfo package for the following installed packages: %szKCould not find debugsource package for the following installed packages: %szUnable to find a match )Zpkg_spec)5rrZDEBUGINFO_SUFFIXZDEBUGSOURCE_SUFFIXZoptsr&rZsubjectZSubjectZget_best_solutionrrrinforoutputZtermZboldappendZ availableZ _name_dictupdateZ installedlistkeysendswithpopvaluesZ _from_system setdefaultarch_install_debug_from_system debug_nameZsource_debug_namer$addstrZdebugsource_namer%r_install_install_debugr"formatevrr#_install_debug_no_nevrajoinsortedrstrict exceptionsZPackagesNotAvailableError)r Z errors_specZdebuginfo_suffix_lenZdebugsource_suffix_lenZpkgspecZsolutionrZ package_dictrpkgsZ first_pkgZ arch_dictpkgZpackage_arch_listr r rrunNs           zDebuginfoInstallCommand.runcCs:|jjjj||j|j|j|jd}|r6|j|dSdS)N)repochversionreleaser9TF) rrrfilterrJrKrLr9r>)r r;rHrr r rr:s  z2DebuginfoInstallCommand._install_debug_from_systemcCsi}|jdk r|j|d<|jdk r,|j|d<|jdk r@|j|d<|jdk rT|j|d<|jjjjfd|i|}|r|j|dSdS)NZ epoch__globZ version__globZ release__globZ arch__globrTF) rJrKrLr9rrrrMr>)r r;Z base_nevrakwargsrr r rr?s         z&DebuginfoInstallCommand._install_debugcs8|jjjjfdd|Dd}|r4|j|dSdS)Ncsg|]}dj|j|jqS)z{}-{}.{})r@rAr9).0p)r;r r szCDebuginfoInstallCommand._install_debug_no_nevra..)Z nevra_strictTF)rrrrr>)r r;rGrr )r;rrBs   z/DebuginfoInstallCommand._install_debug_no_nevracCs:tjj|jj}|j|d|jjj||jjj ddS)N)rH)ZselectZoptional) rselectorZSelectorrrr!ZgoalZinstallrrE)r rGrRr r rr>s z DebuginfoInstallCommand._install)r )rrrraliasesrZsummaryr staticmethodr+r-rIr:r?rBr>rr r )r rr 4s  |  r ) ZdnfpluginscorerrrZ dnf.packagerZPluginrr ZCommandr r r r rs __pycache__/reposync.cpython-36.opt-1.pyc000064400000024276151030231510014205 0ustar003 f89@sddlmZddlmZddlZddlZddlZddlZddlmZm Z ddl m Z ddl Z ddl Z ddZGdd d e jjZe jjGd d d e jjZdS) )absolute_import)unicode_literalsN)_logger) OptionParsercCs(tjjtj}tjjtjj|||S)N)dnfZi18nZucdosgetcwdpathrealpathjoin)Z intermediatetargetcwdr/usr/lib/python3.6/reposync.py_pkgdir#srcs(eZdZfddZfddZZS)RPMPayloadLocationcs$tt|j||tjj||_dS)N)superr__init__rr dirname package_dir)selfpkgprogressZ pkg_location) __class__rrr)szRPMPayloadLocation.__init__cs*tt|j}tjj|j|j|d<|S)Ndest)rr_target_paramsrutil ensure_dirr)rtp)rrrr-s z!RPMPayloadLocation._target_params)__name__ __module__ __qualname__rr __classcell__rr)rrr(s rcseZdZdZedZfddZeddZddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZZS) RepoSyncCommandreposyncz&download all packages from remote repocstt|j|dS)N)rr$r)rcli)rrrr9szRepoSyncCommand.__init__c Cs|jdddgtjdtdd|jddd td d |jd dd td d |jdddd tdd |jdddd tdd |jdtdd|jdddd tdd |jddd tdd |jdddtdd |jd!dd td"d |jd#dd td$d |jd%d&dd td'd dS)(Nz-az--archarchesz[arch]z$download only packages for this ARCH)rdefaultactionmetavarhelpz--deleteF store_truez5delete local packages no longer present in repository)r(r)r+z--download-metadatazdownload all the metadata.z-gz --gpgcheckzBRemove packages that fail GPG signature checking after downloadingz-mz--downloadcompsz&also download and uncompress comps.xmlz--metadata-pathzXwhere to store downloaded repository metadata. Defaults to the value of --download-path.)r+z-nz --newest-onlyz&download only newest packages per-repoz --norepopathz,Don't add the reponame to the download path.z-pz--download-pathz./z&where to store downloaded repositories)r(r+z --remote-timezCtry to set local timestamps of local files by the one on the serverz--sourcezdownload only source packagesz-uz--urlsz:Just list urls of what would be downloaded, don't download) add_argumentrZ_SplitCallbackr)parserrrr set_argparser<s2                      zRepoSyncCommand.set_argparserc Cs|jj}d|_d|_|jj}|jjr||jj xJ|jjD]>}y ||}Wn$t k rnt jj d|YnX|j q:W|jjr|jtt|jdkr|jjrt jj tdx |jD]}|jjd|_qWdS)NTzUnknown repo: '%s'.z1Can't use --norepopath with multiple repositoriesF)r&demandsZavailable_reposZsack_activationbasereposoptsrepoalldisableKeyErrorrZCliErrorenablesourceZenable_source_reposlenlist iter_enabled norepopathr_repoZexpireZdeltarpm)rr1r3Zrepoidr5rrr configure\s(     zRepoSyncCommand.configurecsd|jj_d}x|jjjD]}|jjr8|jjd|jj r|jj rxP|jj D]6\}}|j |}|rtt |qTtd|}tj|qTWn |j ||jjr|jj rt|jj fdddD}|rxB|D]}|j |}|rt |PqWtd}tj|n |j||j|} |jj r8|j| n|j| |jjrxt| D]l} |j| } tj| fd d | | _|jj| \} } | d krRtjtd jtjj | | tj!| d }qRW|jj"r|j#|| qW|st$j%j&tddS)NTz%Failed to get mirror for metadata: %scsg|]}|kr|qSrr).0md_type)mdlrr sz'RepoSyncCommand.run..groupgroup_gz group_gz_zckz(Failed to get mirror for the group file.cSs|S)Nr)s local_pathrrrsz%RepoSyncCommand.run..rzRemoving {}: {}FzGPG signature check failed.)rErFrG)'r2ZconfZ keepcacher3r=r4Z remote_timer?ZsetPreserveRemoteTimedownload_metadataZurlsZgetMetadataLocationsremote_locationprintrrwarningZ downloadcompsdictgetcomps get_pkglist print_urlsdownload_packagesZgpgcheckpkg_download_pathtypes MethodTypeZlocalPkgZpackage_signature_checkformatrr basenameunlinkdeletedelete_old_local_packagesr exceptionsError)rZ gpgcheck_okr5rBZ md_locationurlmsgZgroup_locationsZgroup_locationpkglistrrIresulterrorr)rCrrunws^                        zRepoSyncCommand.runcCs$t|jjp|jj|jjs|jndS)N)rr4ZdestdirZ download_pathr>id)rr5rrr repo_targetszRepoSyncCommand.repo_targetcCs&|jjrt|jj|jS|j|SdS)N)r4Z metadata_pathrrerf)rr5rrrmetadata_targetszRepoSyncCommand.metadata_targetcCsT|j|j}tjjtjj||j}|jtjj|dsPtj j t dj |||S)Nrdz6Download target '{}' is outside of download path '{}'.) rfr5rr r r location startswithrr\r]rrW)rrrfrTrrrrTs  z!RepoSyncCommand.pkg_download_pathc stfdd|D}xtjj|D]\}}}x||D]t}tjj||}|jdr8tjj|r8||kr8ytj|t j t d|Wq8t k rt j t d|Yq8Xq8Wq(WdS)Nc3s|]}j|VqdS)N)rT)rAr)rrr sz.z.rpmz [DELETED] %szfailed to delete file %s)setrwalkrfr r endswithisfilerYrinforOSErrorrb) rr5r`Zdownloaded_filesdirpathZdirnames filenamesfilenamer r)rrr[s  z)RepoSyncCommand.delete_old_local_packagescCsZ|jj}|rV|j|}tjj|tjj|d}tj j j ||dt j td|jdS)Nz comps.xml)rz!comps.xml for repository %s saved)r?Z getCompsFnrgrrrrr r ZyumZmiscZ decompressrrorre)rr5Zcomps_fnZ dest_pathrrrrrPs   zRepoSyncCommand.getcompscCs|j|}|jj|dS)NT)rgr?ZdownloadMetadata)rr5rfrrrrKs  z!RepoSyncCommand.download_metadatacCstjjs|jS|j|jjj}t}i}i}xp|D]h}|j}|j ||j |j ij |j gj |x.|D]&}|j |ij |j gj |j qvWq8W|j|j|ddj} t} x|jD]\} } t} | jt| jdddt}x0| jD]$}x|D]}|j |jqWqWx:|j|djD]&}dj|}| jt||| q>Wx0| D](}x | |D]}| j |jq|WqnWqW| j|j| d} | S)a\ return union of these queries: - the latest NEVRAs from non-modular packages - all packages from stream version with the latest package NEVRA (this should not be needed but the latest package NEVRAs might be part of an older module version) - all packages from the latest stream version )Z nevra_strict)Zpkg__neqT)reverserz3{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch})rr2Z WITH_MODULESZlatestZapplyZ_moduleContainerZgetModulePackagesrkZ getArtifactsupdate setdefaultZ getNameStreamZ getVersionNumappendfilteritemsaddsortedkeysvaluesrWmaxunion)rqueryZmodule_packagesZ all_artifactsZ module_dictZartifact_versionZmodule_packageZ artifactsZartifactZ latest_queryZlatest_stream_artifactsZ namestreamZ version_dictZversionsZstream_artifactsmodulesmoduleZ latest_pkgZnevraversionrrr _get_latestsB         zRepoSyncCommand._get_latestcCsd|jjjtjdjj|jd}|jj r2|j |}|jj rH|jddn|jj r`|j|jj d|S)N)flags)Zreponamesrc)Zarch) r2sackrhawkeyIGNORE_MODULAR_EXCLUDESZ availableZfiltermrer4Z newest_onlyrr:r')rr5rrrrrQs  zRepoSyncCommand.get_pkglistcsjj}|jjdkr tjjtjj|jj t j dj d}fdd|D}|j ||dddS)N)rrcsg|]}t|j|qSr)rrT)rAr)rrrrrD0sz5RepoSyncCommand.download_packages..F)r2outputrrcallbackZNullDownloadProgressdrpmZ DeltaInforrrrZ installedZ_download_remote_payloads)rr`r2rZpayloadsr)rrrrS)s  z!RepoSyncCommand.download_packagescCs@x:|D]2}|j}|r t|qtd|j}tj|qWdS)Nz$Failed to get mirror for package: %s)rLrMrnamerrN)rr`rr^r_rrrrR4s   zRepoSyncCommand.print_urls)r%)r r!r"aliasesrZsummaryr staticmethodr/r@rcrfrgrTr[rPrKrrQrSrRr#rr)rrr$4s   :  9  r$)Z __future__rrrrZshutilrUZdnfpluginscorerrZdnf.cli.option_parserrrZdnf.clirr5Z RPMPayloadrZpluginZregister_commandr&ZCommandr$rrrrs    __pycache__/repoclosure.cpython-36.pyc000064400000010500151030231510013727 0ustar003 gt`@sVddlmZddlmZddlmZddlZGdddejZGdddej j Z dS) )absolute_import)unicode_literals)_Ncs eZdZdZfddZZS) RepoClosure repoclosurecs,tt|j|||dkrdS|jtdS)N)superr__init__Zregister_commandRepoClosureCommand)selfbasecli) __class__!/usr/lib/python3.6/repoclosure.pyr!szRepoClosure.__init__)__name__ __module__ __qualname__namer __classcell__rr)r rrsrc@s>eZdZd ZedZddZddZd dd Ze d d Z dS)r rz:Display a list of unresolved dependencies for repositoriescCsd|jj}d|_d|_|jjr`xB|jjjD]2}|j |jjkrT|j |jj krT|j q*|j q*WdS)NT) r demandsZsack_activationZavailable_reposoptsrepor reposallidcheckdisableenable)r rrrrr configure,s zRepoClosureCommand.configurecCs|jjr|j|jj}n|j}xRt|jD]B}tdjt||jtdx||D]}tdj|qZWq.Wt |dkrt d}t j j |dS)Nzpackage: {} from {}z unresolved deps:z {}rz/Repoclosure ended with unresolved dependencies.)rarches_get_unresolvedsortedkeysprintformatstrreponamelenrdnf exceptionsError)r unresolvedpkgZdepmsgrrrrun7s zRepoClosureCommand.runNcsLi}t}|jjr|jjjjdd|jjjjdd}xv|jjjD]D}j |jjjj|j dj |j |jjjj|j dj }qHWn |jjjj |jjjj }|jj rN|jjjjdd}g}xT|jj D]H}tjj|} |j| j|jjdddd} | r|j | }q|j|qW|rJtjjtddj||}|jjrh|j|jjd|dk r~|j|d|jjjrjdd j|jxf|D]^}t||<xL|jD]B} t| } | jd s| jd rq|j | ||j | qWqWtfd d |Dfdd|j!D} dd| j!DS)NT)empty)r&F)Z with_nevraZ with_providesZwith_filenameszno package matched: %sz, )arch)Zlatest_per_archz solvable:zrpmlib(c3s|]}j|ds|VqdS))ZprovidesN)filter).0x) availablerr sz5RepoClosureCommand._get_unresolved..cs(i|] \}}tfdd|D|qS)c3s|]}|kr|VqdS)Nr)r2r3)unresolved_depsrrr5sz@RepoClosureCommand._get_unresolved...)set)r2kv)r6rr sz6RepoClosureCommand._get_unresolved..cSsi|]\}}|r||qSrr)r2r8r9rrrr:s)"r7rZnewestr ZsackZqueryr1rZ iter_enabledunionrZlatestr4pkglistr(ZsubjectZSubject intersectionZget_best_queryappendr)r*rjoinrZfiltermZconfZbestZapplyZrequiresr% startswithadditems)r r0r+ZdepsZto_checkrZ pkglist_qerrorsr,ZsubjZpkg_qZreqZreqnameZunresolved_transitionr)r4r6rr Es\ &                  z"RepoClosureCommand._get_unresolvedcCs`|jdgddtdd|jdgdtdd|jd d d td d |jdgdtddddS)Nz--archr>rzBcheck packages of the given archs, can be specified multiple times)defaultactiondesthelpz--checkzSpecify repositories to check)rDrErGz-nz--newest store_truez+Check only the newest packages in the repos)rErGz--pkgz#Check closure for this package onlyr<)rDrErGrF) add_argumentr)parserrrr set_argparsers       z RepoClosureCommand.set_argparser)r)N) rrraliasesrZsummaryrr.r staticmethodrKrrrrr (s   Qr ) Z __future__rrZdnfpluginscorerZdnf.clir(ZPluginrr ZCommandr rrrrs    __pycache__/copr.cpython-36.pyc000064400000050311151030231510012334 0ustar003 fZv@sddlmZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddlZddl m Z m Z ddlZddlmZddlmZddlZy$ddlmZmZmZmZddZWnLek rd d Zydd lmZWnek rd dZYnXYnXd Zee de dgZee de ddgZerdddl m!Z!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'n(ddl!m!Z!m"Z"m#Z#ddl(m%Z%m&Z&m'Z'ej)j*Gdddej+j,Z-ej)j*Gddde-Z.dS))print_functionN)_logger)PY3)ucd)nameversioncodenameos_release_attrcCstttfS)N)rrr r r /usr/lib/python3.6/copr.pylinux_distribution.sr cCsdS)Nr )rr r r r 1sr )r cCsrtd`}i}xF|D]>}y$|jjd\}}|jd||<Wqtk rPYqXqW|d|ddfSQRXdS)Nz/etc/os-release="NAMEZ VERSION_ID)openrstripsplitstrip ValueError)Zos_release_fileZos_release_datalineZos_release_keyZos_release_valuer r r r 7s   copryesynonr) ConfigParser NoOptionErrorNoSectionError)urlopen HTTPErrorURLErrorc@s eZdZdZdZdZdZdZdZedeZ d8Z e d Z d Z e d Zed d ZddZddZddZddZddZddZddZddZddZd d!Zd"d#Zed$d%Zd&d'Zd(d)Zd*d+Z d,d-Z!d.d/Z"d0d1Z#ed2d3Z$ed4d5Z%ed6d7Z&dS)9 CoprCommandz Copr plugin for DNF Nzcopr.fedorainfracloud.orgZfedoraZhttpsiz://rz Interact with Copr repositories.Ta enable name/project [chroot] disable name/project remove name/project list --installed/enabled/disabled list --available-by-user=NAME search project Examples: copr enable rhscl/perl516 epel-6-x86_64 copr enable ignatenkobrain/ocltoys copr disable rhscl/perl516 copr remove rhscl/perl516 copr list --enabled copr list --available-by-user=ignatenkobrain copr search tests c Cs|jddddddddgd |j}|jd d td d |jdd tdd |jdd tdd |jddtdd|jdtdd|jddddS)N subcommandhelpenabledisableremovelistsearch)nargschoicesz --installed store_truez.List all installed Copr repositories (default))actionr&z --enabledzList enabled Copr repositoriesz --disabledzList disabled Copr repositoriesz--available-by-userrz-List available Copr repositories by user NAME)metavarr&z--hubz(Specify an instance of Copr to work with)r&arg*)r,) add_argumentZadd_mutually_exclusive_groupr)parserZ list_optionr r r set_argparserps    zCoprCommand.set_argparsercCs|jjjjdkrdSd}t}g}|jjjd}tjj |t d}tjj |r|j ||j ||jddr|jddr|jdd}|jdd}||g|_n ddg|_xHtjtjj |t dD],}|jdrtjj |t d|} |j | qWg} t|jjr|jjdjd } t| d krV|jjrVtjtd td tjjtd nL|jj rt| d kr|j|_|j|_n t| d kr| d}n|jj}|rH|rHd|_|j t |dd|j!||dd} | rH|j!||d|j"} |j!||d|j#} | |_| d| |_t$| |j#krH|jd| 7_|jd| 7_|jsd|krr||_|j"d||_n|jddd|_||_dS)Nrrz.confmain distribution releaseverFz.d/zError: z^specify Copr hub either with `--hub` or using `copr_hub/copr_username/copr_projectname` formatzmultiple hubs specifiedT)reversehostnameprotocolportz://:r%)%cliZcommandoptsrbaseconfZpluginconfpathospathjoin PLUGIN_CONFisfileappendreadZ has_optionget chroot_configlistdirendswithlenr1rhubrcriticalrdnfCliErrordefault_hostname copr_hostname default_urlcopr_urlsorted_read_config_itemdefault_protocol default_portint)selfZcopr_hubZcopr_plugin_configZ config_filesZ config_pathZdefault_config_filer7r8filenameZ config_fileprojectr<r=r>r r r configuresl                 zCoprCommand.configurec Cs*y |j||Sttfk r$|SXdS)N)rKrr)r]configrPZsectiondefaultr r r rYs zCoprCommand._read_config_itemcCstjjdj|jdS)Nz{0} )sysstderrwriteformatr)r]textr r r _user_warning_before_promptsz'CoprCommand._user_warning_before_promptc Cs|jjd}|dkr&|jjj|dS|dkrl|jjrH|j|jjdS|j|jj j d|jj |jj dSy|jj d}WnLttfk rtjtdtd|jjj|tjjtdYnXy\|jj d}t|jj dkrtjjtd|jd |_t|jd kr$tjjtd Wn*tk rP|j}|jd |_YnX|d krj|j|dS|jd }t|dkrtjtdtdtjjtdnexactly two additional parameters to copr command are requiredr%zToo many arguments.-r:zOBad format of optional chroot. The format is distribution-version-architecture.r+r9zEuse format `copr_username/copr_projectname` to reference copr projectzbad copr project formatz{0}/_copr:{1}:{2}:{3}.repor'a Enabling a Copr repository. Please note that this repository is not part of the main distribution, and quality may vary. The Fedora Project does not exercise any power over the contents of this repository beyond the rules outlined in the Copr FAQ at , and packages are not held to any quality or security level. Please do not file bug reports about these packages in Fedora Bugzilla. In case of problems, contact the owner of this repository. z!Do you really want to enable {0}?z Repository successfully enabled.r(z!Repository successfully disabled.r)z Repository successfully removed.zUnknown subcommand {}.)rir:)(rAr$r@Z optparserZ print_helpZavailable_by_user_list_user_projects_list_installed_repositoriesrBrCZreposdirenabledZdisabledr1r IndexErrorrrQrrRrSrO exceptionsErrorr chroot_parts _guess_chroot_searchrf get_reposdirrU_sanitize_username _need_rootrF _ask_user_download_repoinfo_runtime_deps_warning _disable_repo _remove_repo) r]r$ project_namechrootr_ copr_usernamecopr_projectname repo_filenamerymsgr r r runs                     zCoprCommand.runcCs|jjdd}tjd|j|}|j|jko8tjd|}tjd|}|jjr`| rh| rhdSn|shdStjd|rxdStjd|rdS|j } | r|s| r|rdSd} tjd |r|jd d \} } } }| d| d|}ntjd|r2|jd d}|j d d djd dd} | d|dd|d}n.|jd d}|j d|dd|d}d} | sn|d7}| r||d7}t || S)Nr9r%z_copr:Z_copr_z_copr:|^_copr_zcopr:.*:.*:.*:mlz coprdep:.*Fzcopr:r?r:rjrirTz (disabled)z *) repofilerrematchrUrWrVrArPrmrsplitrTprint)r]repo_idrepo enabled_only disabled_only file_nameZ match_newZ match_oldZ match_anyrmold_reporrUZ copr_ownerZcopr_dirrZ copr_namer r r _list_repo_file8sB       zCoprCommand._list_repo_filecCsFd}x,|jjjD]\}}|j||||rd}qW|rBttddS)NFTz* These coprs have repo file with an old format that contains no information about Copr hub - the default one was assumed. Re-enable the project to fix this.)rBrepositemsrrr)r]Z directoryrrrrrr r r rlis z(CoprCommand._list_installed_repositoriesc Csdj|}|j|}|jj|dd}ytj|j}Wn*tk r`tj j t dj|YnX|j |t dj|}|j |xL|dD]@}dj||d}|d pt d } |jjjt|| }t|qWdS) Nz!/api_3/project/list?ownername={0}zw+)modez+Can't parse repositories for username '{}'.zList of {} coprsrz {0}/{1} : r descriptionzNo description given)rfrWrBr jsonloadsrJrrRrorpr_check_json_output_print_match_sectionoutput fmtKeyValFillrr) r]Z user_nameapi_pathurlres json_parse section_textitemrdescr r r rkss"    zCoprCommand._list_user_projectsc Csdj|}|j|}|jj|dd}ytj|j}Wn*tk r`tj j t dj|YnX|j |t dj|}|j |xJ|dD]>}dj|d}|d pt d } |jjjt|| }t|qWdS) Nz/api_3/project/search?query={}zw+)rzCan't parse search for '{}'.z Matched: {}rz{0} : Z full_namerzNo description given.)rfrWrBr rrrJrrRrorprrrrrrr) r]Zqueryrrrrrrrrr r r rss     zCoprCommand._searchcCs|jjj|}t|dS)N)rBrZ fmtSectionr)r]rgZ formattedr r r rsz CoprCommand._print_match_sectioncCsj|jstjjdd|_tjjdj|j|jjrf|jjj sb|jj j dj|dj|d rfdSdS)N Fz{0} z {} [y/N]: z {} [Y/n]: )rZdefaultyes_msgT) first_warningrcrdrerfrrBZ _promptWantedrCZassumenorZ userconfirm)r]ryrr r r _ask_user_no_raises  zCoprCommand._ask_user_no_raisecCs |j||stjjtddS)NzSafe and good answer. Exiting.)rrRrorpr)r]ryrr r r rws zCoprCommand._ask_usercCs tjdkrtjjtddS)Nrz/This command has to be run under the root user.)rDgeteuidrRrorpr)clsr r r rvs zCoprCommand._need_rootcs|jdks&ddks&ddkr,t|jjjd}tfdddDrd krbd |}n&d td krxd |}ndjd|}ndkrtj d}dkrdj|}ndjd|}nPdkrtj d}dkrdj|}ndjd|}nddj ddd}|S)z2 Guess which chroot is equivalent to this machine NrFr%Zbasearchcsg|] }|kqSr r ).0r)distr r sz-CoprCommand._guess_chroot..Fedora Fedora LinuxZRawhidezfedora-rawhide-ZrawhideZredhat_support_product_versionzfedora-{0}-{1}ZMageiaz%{distro_arch}ZCauldronzmageia-cauldron-{}zmageia-{0}-{1}ZopenSUSEz%{_target_cpu}Z Tumbleweedzopensuse-tumbleweed-{}zopensuse-leap-{0}-{1}zepel-%s-x86_64.)rr) rLr rBrC substitutionsanyr rfrpmZ expandMacror)r]Zdistarchr~r )rr rrs,        zCoprCommand._guess_chrootcCsdj|jdd}|jd}dj|||}y*t|j|}tjj|rRtj|Wn^t k rl}z|j dkrt dj|j||j t |}t jj|t d}|jjd} | r>tj| jd} tj| } |t d jdj|j|7}| jd r0|t d d jd d| d D7}|t dj|7}t jj|n|t dj|7}t jj|WYdd}~XnJtk r}z,t dj|j||jj}t jj|WYdd}~XnX|j} | jd} tjd| rtjj|jjjd| ddd}|j|j krR|j!dddj!|j"dj!dddj!ddj!dd} tjj| rRtj| t#|d.}|j$| x|j%D]} |j$| qrWWdQRXtj&|t'j(t'j)Bt'j*Bt'j+BdS) Nrjr%z%/coprs/{0}/repo/{1}/dnf.repo?arch={2}iz Request to {0} failed: {1} - {2}z+It wasn't possible to enable this project. zCopr-Error-Datazutf-8z1Repository '{0}' does not exist in project '{1}'.zavailable chrootsz Available repositories: z, css|]}dj|VqdS)z'{}'N)rf)rxr r r sz-CoprCommand._download_repo..z If you want to enable a non-default repository, use the following command: 'dnf copr enable {0} ' But note that the installed repo file will likely need a manual modification.zProject {0} does not exist.zFailed to connect to {0}: {1}z\[copr:rriz.repoz_copr:_coprrr?Zgroup_@wbrr),rFrqrfr rWrDrEexistsr)r!coderstrrRrorpZheadersrKbase64Z b64decodedecoderrr"reasonstrerrorreadlinerrrBrCrtrVreplacerUrre readlineschmodstatS_IRUSRS_IWUSRS_IRGRPS_IROTH)r]r}rZ short_chrootZarchrZresponseeZ error_msgZ error_dataZerror_data_decodedZ first_linerZold_repo_filenamefr r r rxsX           $   zCoprCommand._download_repocs|jjdd|jj|j|j||g}x(jjD]}|jdrJq:|j|q:W|s`dSt d}t j d|j dj fdd |D}|j|t d sx,|D]$}|jjjj||jjjd d iqWdS) a, In addition to the main copr repo (that has repo ID prefixed with `copr:`), the repofile might contain additional repositories that serve as runtime dependencies. This method informs the user about the additional repos and provides an option to disable them. T)rzcopr:NaMaintainer of the enabled Copr repository decided to make it dependent on other repositories. Such repositories are usually necessary for successful installation of RPMs from the main Copr repository (they provide runtime dependencies). Be aware that the note about quality and bug-reporting above applies here too, Fedora Project doesn't control the content. Please review the list: {0} These repositories have been enabled automatically.r%z cs*g|]"}djt|jj|ddqS)z){num:2}. [{repoid}] baseurl={baseurl}baseurl)Znumrepoidr)rfnextcfgZgetValue)rr)counterrr r r9sz5CoprCommand._runtime_deps_warning..z!Do you want to keep them enabled?rm0)rBresetZread_all_repos_get_copr_reporursections startswithrIr itertoolscountrfrFrrCwrite_raw_configfilerr)r]rrZ runtime_depsrryZdepr )rrr rzs*      z!CoprCommand._runtime_deps_warningc Csdj|jjddd|j||}||jjkrdj|j||}}||jjkrd|jj|jkr|jj|jjdd }y.|jdddjddd}||jkrdSWqtk rYqXndS|jj|S) Nzcopr:{0}:{1}:{2}r?r%rz{0}-{1}rr9rir) rfrUrrurBrrrrn)r]rrrrrUr r r rFs     zCoprCommand._get_copr_repocCst|j||}|s,tjjtdj|j||ytj|j Wn2t k rn}ztjjt |WYdd}~XnXdS)Nz&Failed to remove copr repo {0}/{1}/{2}) rrRrorprrfrUrDr)rOSErrorr)r]rrrrr r r r|\s zCoprCommand._remove_repocCsd|j||}|dkr,tjjtdj||x2|jjD]$}|jj j |j ||jj j ddiq8WdS)Nz!Failed to disable copr repo {}/{}rmr) rrRrorprrfrrrBrCrrr)r]rrrrr r r r{hs  zCoprCommand._disable_repoc Cs<ytj|j}Wn$tk r6tjjtddSX|S)z Wrapper around response from server check data and print nice error in case of some error (and return None) otherwise return json object. zUnknown response from server.N)rrrJrrRr@rSr)rrrr r r _get_datats zCoprCommand._get_datacCs"d|krtjjdj|ddS)Nerrorz{})rRrorprf)rZjson_objr r r rszCoprCommand._check_json_outputcCs&|ddkrdj|ddS|SdS)Nrrzgroup_{}r%)rf)rrr r r rus zCoprCommand._sanitize_username)r)'__name__ __module__ __qualname____doc__rLrTZ default_hubrZr[rValiasesrsummaryrusage staticmethodr5r`rYrhrrrlrkrsrrrw classmethodrvrrrxrzrr|r{rrrur r r r r#PsD  L_1   %82   r#c@sDeZdZdZdZedZdZddZddZ e d d Z d d Z d S)PlaygroundCommandz Playground plugin for DNF playgroundz$Interact with Playground repository.z [enable|disable|upgrade]c Cs0|j|jtdtddj|j}|jj|dd}|j|}|j|ddkrft j j tdx|d D]}d j|d |d }d j|jj j |jdd}yj||dkrwpdj|j||}|jj|dd}|j|}|j|od|ko|ddkr |j||Wqpt jjk r&YqpXqpWdS)Nz!Enabling a Playground repository.zDo you want to continue?z{0}/api/playground/list/zw+)rrokzUnknown response from server.rz{0}/{1}ZusernameZcoprnamez{}/_playground_{}.repor9rjZchrootsz{0}/api/coprs/{1}/detail/{2}/)rvrwrrfrWrBr rcloserRr@rSrCrtrrxrorp) r]r~Zapi_urlrrrr}rZoutput2r r r _cmd_enables8         zPlaygroundCommand._cmd_enablecCs6|jx(tjdj|jjjD]}|j|q WdS)Nz{}/_playground_*.repo)rvglobrfrBrCrtr|)r]rr r r _cmd_disableszPlaygroundCommand._cmd_disablecCs|jdddddgddS)Nr$r%r'r(upgrade)r,r-)r3)r4r r r r5szPlaygroundCommand.set_argparsercCstjjd|jjd}|j}|dkrB|j|tjt dn`|dkrb|j tjt dn@|dkr|j |j|tjt dntjjt d j |dS) Nz%Playground is temporarily unsupportedrr'z-Playground repositories successfully enabled.r(z.Playground repositories successfully disabled.rz-Playground repositories successfully updated.zUnknown subcommand {}.) rRrorprAr$rrrrryrrrf)r]r$r~r r r rs    zPlaygroundCommand.runN)r) rrrrrrrrrrrr5rr r r r rs  r)/Z __future__rrrrrDrZshutilrrcrZdnfpluginscorerrrRZ dnf.pycomprZdnf.i18nrrZdistrorrr r r ImportErrorplatformrGsetZYESZNOZ configparserrrrZurllib.requestr r!r"Zurllib2ZpluginZregister_commandr@ZCommandr#rr r r r sP      B__pycache__/system_upgrade.cpython-36.pyc000064400000054753151030231510014442 0ustar003 fh@sdZddlmZmZmZmZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddlmZmZddlZddlZddlmZddlmZddlZddlmZmZddlZed e jd Ze jd Ze jd Ze jd Z eZ!dZ"edZ#edZ$edZ%dZ&ddZ'ddZ(gfddZ)d7ddZ*ddZ+Gddde,Z-Gdd d e,Z.e.Z/Gd!d"d"ej0j1Z2d#d$Z3d%d&Z4d'd(Z5d)d*Z6d+d,dd-d.gZ7Gd/d0d0ej8Z9Gd1d2d2ej:j;ZdS)8zGsystem_upgrade.py - DNF plugin to handle major-version system upgrades.)callPopen check_outputCalledProcessErrorN)journal)_logger)CliError)ucd)serialize_transactionTransactionReplayzthe color of the skyZ 9348174c5cc74001a71ef26bd79d302eZ fef1cc509d5047268b83a3a553f54b43Z 3e0a5636d16b4ca4bbe5321d06c6aa62Z 8cec00a1566f4d3594f116450395f06cz/usr/bin/plymouthzfprrrr=s   z State._readc CsFtjjtjj|jt|jd}tj |j |dddWdQRXdS)NwT)indent sort_keys) r$r% ensure_dirrrdirnamer;rr@dumpr<)r>Zoutfrrrr5sz State.writecCs&tjj|jrtj|j|jdS)N)rrexistsr;r&r=)r>rrrclears z State.clearcCs|S)Nr)r>rrr __enter__szState.__enter__cCs|dkr|jdS)N)r5)r>exc_type exc_value tracebackrrr__exit__szState.__exit__cs"fdd}fdd}t||S)Ncs||j<dS)N)r<)r>value)optionrrsetpropszState._prop..setpropcs |jjS)N)r<get)r>)rTrrgetpropszState._prop..getprop)property)rTrUrWr)rTr_props  z State._prop state_versiondownload_statusdestdirtarget_releaseversystem_releasevergpgcheckgpgcheck_reposrepo_gpgcheck_reposupgrade_statusupgrade_command distro_syncenable_disable_reposmodule_platform_idN)__name__ __module__ __qualname__r?r=r5rMrNrRrYrZr[r\r]r^r_r`rarbrcrdrerfrrrrr:s(  r:c@s@eZdZdZddZddZddZdd Zd d Zd d Z dS)PlymouthOutputzA plymouth output helper class. Filters duplicate calls, and stops calling the plymouth binary if we fail to contact it. cCsd|_t|_d|_dS)NT)alivedict _last_args _last_msg)r>rrrr?szPlymouthOutput.__init__c Gsj||jj|k}|jr| s$|dkrdytt|f|dk|_Wntk rXd|_YnX||j|<|jS)Nz--pingrF)rmrVrkrPLYMOUTHr')r>cmdargsZdupe_cmdrrr _plymouths  zPlymouthOutput._plymouthcCs |jdS)Nz--ping)rr)r>rrrpingszPlymouthOutput.pingcCs4|jr |j|kr |jdd|j||_|jdd|S)Nz hide-messagez--textzdisplay-message)rnrr)r>msgrrrmessageszPlymouthOutput.messagec CsRd}y$ttdg}tjdt|r&d}Wnttfk r@YnX|jdd|S)NZupdatesz--helpz--system-upgradezsystem-upgradez change-modez--)rroresearchr rr'rr)r>modesrrrset_modes zPlymouthOutput.set_modecCs|jddt|S)Nz system-updatez --progress)rrstr)r>ZpercentrrrprogressszPlymouthOutput.progressN) rgrhri__doc__r?rrrsrurzr|rrrrrjs  rjc@s$eZdZddZddZddZdS)PlymouthTransactionProgresscCs|j||||dS)N)_update_plymouth)r>packageactionZti_doneZti_totalZts_doneZts_totalrrrr|sz$PlymouthTransactionProgress.progresscCsd|dkr dS|tjjkr0tjtd||ntjdtd||tj|j||||dS)NgV@Zg$@)r$callbackZ PKG_VERIFYPlymouthr|intru _fmt_event)r>rrcurrenttotalrrrrs  z,PlymouthTransactionProgress._update_plymouthcCs tjjj||}d||||fS)Nz[%d/%d] %s %s...)r$ transactionZACTIONSrV)r>rrrrrrrrsz&PlymouthTransactionProgress._fmt_eventN)rgrhrir|rrrrrrr~sr~ccsJtj}|j|jddd}x(|D] }|d}||kr8q"|}|Vq"WdS)zVFind all boots with this message id. Returns the entries of all found boots. r) MESSAGE_IDZ_UIDN_BOOT_ID)rReaderZ add_matchhex) message_idjZoldbootr)Zbootrrr find_bootss rc Cstttdd }xJtttD]:\}}tdj|d|d|d|jdd|jddqW|d krpttd dS) Nz3The following boots appear to contain upgrade logs:ru){} / {.hex}: {:%Y-%m-%d %H:%M:%S} {}→{}rZ__REALTIME_TIMESTAMPSYSTEM_RELEASEVERz??TARGET_RELEASEVERz-- no logs were found --r)r7r enumeraterID_TO_IDENTIFY_BOOTSformatrV)nr)rrr list_logs s  rc CsZtt|}y(|dkrt|dkr*|d8}||dStk rTttdYnXdS)Nrrrz!Cannot find logs with this index.)listr IndexErrorr r)rrZbootsrrr pick_boot.s  rcCsDtt|}tdd|jg}|j|j}|dkr@tjjt ddS)NZ journalctlz--bootrz%Unable to match systemd journal entry) rrrrwait returncoder$ exceptionsErrorr)rZboot_idZprocessZrcrrrshow_log=s  rZdownloadZcleanupgradelogcs eZdZdZfddZZS)SystemUpgradePluginzsystem-upgradecs8tt|j|||r4|jt|jt|jtdS)N)superrr?Zregister_commandSystemUpgradeCommandOfflineUpgradeCommandOfflineDistrosyncCommand)r>basecli) __class__rrr?Ns   zSystemUpgradePlugin.__init__)rgrhrinamer? __classcell__rr)rrrKsrcs(eZdZdEZedZdZfddZeddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6Z d7d8Z!d9d:Z"d;d<Z#d=d>Z$d?d@Z%dAdBZ&dCdDZ'Z(S)Frsystem-upgradefedupz+Prepare system for upgrade to a new releasezvar/lib/dnf/system-upgradecsjtt|j|tjj|jjj|j |_ tjj|j d|_ tjj|jjjd|_ t tjj|j d|_dS)Nzsystem-upgrade-transaction.jsonz system-updatezsystem-upgrade-state.json)rrr?rrr#rr1r-DATADIRdatadirtransaction_file magic_symlinkr:state)r>r)rrrr?\s zSystemUpgradeCommand.__init__cCsJ|jdddtdd|jddtdd jtd |jd ttd d dS)Nz--no-downgraderdZ store_falsez=keep installed packages if the new release's version is older)destrhelptidrz[%s]|)nargschoicesmetavarz--numberzwhich logs to show)typer) add_argumentrCMDSr#r)parserrrr set_argparserds   z"SystemUpgradeCommand.set_argparsercCs(tj||tj|jj|jjtjjddS)zLog directly to the journal.)rZPRIORITYrrZ DNF_VERSIONN) rsendZ LOG_NOTICErr^r]r$constVERSION)r>rurrrr log_statusns zSystemUpgradeCommand.log_statuscCs|jd|jddS)NZcheck pre_configure) _call_sub)r>rrrrws z"SystemUpgradeCommand.pre_configurecCs|jddS)N configure)r)r>rrrr{szSystemUpgradeCommand.configurecCs|jddS)Nrun)r)r>rrrr~szSystemUpgradeCommand.runcCs|jddS)Nr)r)r>rrrrun_transactionsz$SystemUpgradeCommand.run_transactioncCs|jddS)NZresolved)r)r>rrr run_resolvedsz!SystemUpgradeCommand.run_resolvedcCs.t||d|jjdd}t|r*|dS)Nrr)getattroptsrcallable)r>rZsubfuncrrrrszSystemUpgradeCommand._call_subcCs(|jjtkr$tdj|d}t|dS)NzFIncompatible version of data. Rerun 'dnf {command} download [OPTIONS]')command)rrZ STATE_VERSIONrrr )r>rrtrrr_check_state_versions z)SystemUpgradeCommand._check_state_versioncCs*|j|jj_|jjr|jjnd|jj_dS)N)rrr1cachedirrr\)r>rrr _set_cachedirs z"SystemUpgradeCommand._set_cachedircCsttjjtjjg}ttjj}i}i}xl|jjjD]^}|j |krp|j }|j |j |j jij t|i|j <q6|j |kr6|j |j t|j i|j <q6W||fS)z forward = {repoid:{pkg_nevra: {tsi.action: tsi.reason}} reverse = {pkg_nevra: {tsi.action: tsi.reason}} :return: forward, reverse )setr$rZBACKWARD_ACTIONSlibdnfZ!TransactionItemAction_REINSTALLEDZFORWARD_ACTIONSrrrpkgreason setdefaultrepoidr{)r>Zbackward_actionZforward_actionsZforwardreverseZtsirrrr%_get_forward_reverse_pkg_reason_pairss  & z:SystemUpgradeCommand._get_forward_reverse_pkg_reason_pairscCsb|j|jj_|jjr|jjnd|jj_d|jjkrJ|jj rJtt dnd|jjkr^d|j_dS)Nzoffline-distrosynczFCommand 'offline-distrosync' cannot be used with --no-downgrade optionzoffline-upgradeF) rrr1rrr\rrdr r)r>rrrpre_configure_downloads   z+SystemUpgradeCommand.pre_configure_downloadcCs |jdS)N)r)r>rrrpre_configure_rebootsz)SystemUpgradeCommand.pre_configure_rebootcCs.|j|jjr|jj|j_|jj|jj_dS)N) rrrerrepos_edr]rr1r.)r>rrrpre_configure_upgrades z*SystemUpgradeCommand.pre_configure_upgradecCs |jdS)N)r)r>rrrpre_configure_cleansz(SystemUpgradeCommand.pre_configure_cleancCsd|jjksd|jjkrtjtdt}|rLtd}tj|jt||j j rtd}|j j j s|j j jdj|dj|d rtjtd tjd t|j j |jjd nd |jjkr|jj|jd |jj_d |jj_d |jj_d |jj_d |jj_|j j jdg7_dS)Nzsystem-upgraderz\WARNING: this operation is not supported on the RHEL distribution. Proceed at your own risk.z-Additional information for System Upgrade: {}zyBefore you continue ensure that your system is fully upgraded by running "dnf --refresh upgrade". Do you want to continuez {} [y/N]: z {} [Y/n]: )rtZdefaultyes_msgzOperation aborted.r)r2zoffline-upgradeTZtest)rrrrCrr rrr rZ _promptWantedr1ZassumenooutputZ userconfirmerrorsysexitr3r.rZ _populate_update_security_filterdemands root_user resolvingavailable_repossack_activationZfreshest_metadataZtsflags)r>Zhelp_urlrtrrrconfigure_downloads*        z'SystemUpgradeCommand.configure_downloadcCsd|jj_dS)NT)rrr)r>rrrconfigure_rebootsz%SystemUpgradeCommand.configure_rebootcCsd|jj_d|jj_d|jj_d|jj_|jj|j_|jj dk rN|jj |j j _ |jj dk rx$|j j jD]}|j|jj k|_ qhW|jjdk rx$|j j jD]}|j|jjk|_qW|jj|j j _d|jj_d|j j _t|jj_d|j j _d|j j _dS)NTF)rrrrrrrrdrr_rr1r`reposvaluesrra repo_gpgcheckrfZ cacheonlyZ assumeyesr~Ztransaction_displayZclean_requirements_on_removeZinstall_weak_deps)r>rrrrconfigure_upgrades&            z&SystemUpgradeCommand.configure_upgradecCsd|jj_dS)NT)rrr)r>rrrconfigure_cleansz$SystemUpgradeCommand.configure_cleancCsdS)Nr)r>rrr configure_logsz"SystemUpgradeCommand.configure_logcCs~|jjdksttd|j|jj|jj|jjkrRtdj|jjd}t|t j j |j rlttdt jj|jdS)Ncompletezsystem is not ready for upgradezZthe transaction was not prepared for '{command}'. Rerun 'dnf {command} download [OPTIONS]')rzupgrade is already scheduled)rr[r rrrrrcrrrlexistsrr$r%rIr)r>rtrrr check_reboot s    z!SystemUpgradeCommand.check_rebootcCstjj|js$tjtdtdtj|j|j krLtjtdtdt j j j |j|jj}|sp|jj}|j||jjdkstdj|d}t|dS)Nz-trigger file does not exist. exiting quietly.rz1another upgrade tool is running. exiting quietly.readyz/use 'dnf {command} reboot' to begin the upgrade)r)rrrrrrr SystemExitreadlinkrr$ZyumZmiscZunlink_frrcrrrrbrr )r>rrtrrr check_upgrades  z"SystemUpgradeCommand.check_upgradec Cs,tj|j|j|j }d|_WdQRXdS)Nr)rsymlinkrrrrb)r>rrrr run_prepare,sz SystemUpgradeCommand.run_preparecCs6|j|jjddksdS|jtdttdS)NrrzRebooting to perform upgrade.)rrrrrREBOOT_REQUESTED_IDr)r>rrr run_reboot3s  zSystemUpgradeCommand.run_rebootc sjjrjjn jjjjdkrjjfddjjjD}|r\jj|fddjjj D}|rjj|j $}d|_ jj j |_jj j|_WdQRXdS)Noffline-upgradeoffline-distrosynccs$g|]}jjjj|jr|jqSr)rhistorygrouprVr).0g)r>rr Gsz5SystemUpgradeCommand.run_download..cs$g|]}jjjj|jr|jqSr)rrenvrVr)rr)r>rrrJsZ downloading)rr)rrdrZ upgrade_allrZ read_compscompsgroupsZenv_group_upgradeZ environmentsrr[r1r.r]r\)r>Zinstalled_groupsZinstalled_environmentsrr)r>r run_download=s       z!SystemUpgradeCommand.run_downloadc Csd}|j}d|_|j}WdQRX|dkr4td}n|dkrFtd}ntd}|j|ttjtjdtj |t t |j |j |_|jjdS) NZ incompletezoffline-upgradez1Starting offline upgrade. This will take a while.zoffline-distrosyncz4Starting offline distrosync. This will take a while.z0Starting system upgrade. This will take a while.r)rrbrcrrUPGRADE_STARTED_IDrrzr|rur9r rrreplayr)r>rrrtrrr run_upgradeSs      z SystemUpgradeCommand.run_upgradec Csdtjtdt|jjjtjj |jjjj g|j $}d|_ d|_ d|_d|_d|_WdQRXdS)NzCleaning up downloaded data...)rrrr*rr1rr$Z persistorZTempfilePersistorZdb_pathrr[rZrbrcr\)r>rrrr run_cleanms zSystemUpgradeCommand.run_cleancCs |jjrt|jjntdS)N)rZnumberrr)r>rrrrun_logzszSystemUpgradeCommand.run_logcCs|jjdS)z5Adjust transaction reasons according to stored valuesN)r Zpost_transaction)r>rrrresolved_upgradesz%SystemUpgradeCommand.resolved_upgradecCs|jjj}|js&tjtddSt|}yLt|j d"}t j ||ddd|j dWdQRXt tdj|j Wn<tk r}z tjjtdjt|WYdd}~XnXtjj|jjj}|j}d |_t|_|jj|_|jjj|_d d |jjj D|_!d d |jjj D|_"||_#|jjj$|_%|jjj&|_&|jj'|_(|jjj)|_)|jj*|_+WdQRXt,j|jj*d }tj||j-tdt.dS)NzKThe system-upgrade transaction is empty, your system is already up-to-date.rErFT)rGrH zTransaction saved to {}.zError storing transaction: {}rcSsg|]}|jr|jqSr)r_r)rrrrrrsz=SystemUpgradeCommand.transaction_download..cSsg|]}|jr|jqSr)rr)rrrrrrs)rzDownload finished.)/rrZ get_currentZpackagesrrrr rrr@rKr5r7rr'r$rr r{r+r,r1r-rr[rrZrrdr_rrr`rar^r.r]rfrrer\rrcDOWNLOAD_FINISHED_MSGrDOWNLOAD_FINISHED_ID)r>rdatafr8Z system_verrrtrrrtransaction_downloads: ,       z)SystemUpgradeCommand.transaction_downloadcCs@tjtd|jtdt|j|jjddkrrrrtransaction_upgrades  z(SystemUpgradeCommand.transaction_upgrade)rr))rgrhrialiasesrsummaryrr? staticmethodrrrrrrrrrrrrrrrrrrrrrrrrrr r r rrrrrr)rrrVsF        (rc@seZdZdZedZdS)roffline-upgradez%Prepare offline upgrade of the systemN)r)rgrhrirrrrrrrrsrc@seZdZdZedZdS)roffline-distrosyncz(Prepare offline distrosync of the systemN)r)rgrhrirrrrrrrrsr)N)?r} subprocessrrrrr@rZos.pathrvrZuuidZsystemdrZdnfpluginscorerrr$Zdnf.clir Zdnf.i18nr Zdnf.transactionZdnf.transaction_srr r Z libdnf.confrZUUIDrrr rrror/rr0rrr r*r3r9objectr:rjrrZTransactionProgressr~rrrrrZPluginrrZCommandrrrrrrrsd          @.  e__pycache__/universal_hooks.cpython-36.opt-1.pyc000064400000013564151030231510015554 0ustar003 "h@sddlZddlZddlZddlZddlmZddlZddlZddlZddlZddl m Z ej e Z e jeje jejejGddde ZGdddejdZGd d d eZeZdd d ZGdddejdZGdddeZddZddZddZdS)N)path)PlugincsPeZdZdZfddZddZddZdd Zd d Zd d Z ddZ Z S)UniversalHooksPluginzuniversal-hookscstj||d|_dS)Nz/etc/dnf/universal-hooks)super__init__ hook_root)selfbaseZcli) __class__%/usr/lib/python3.6/universal_hooks.pyr,szUniversalHooksPlugin.__init__cCsttj|j|jjtdS)N)_run_dirrjoinr pre_config__name__LOG)rr r r r0szUniversalHooksPlugin.pre_configcCsttj|j|jjtdS)N)r rrrconfigrr)rr r r r3szUniversalHooksPlugin.configcCsttj|j|jjtdS)N)r rrrresolvedrr)rr r r r6szUniversalHooksPlugin.resolvedcCsttj|j|jjtdS)N)r rrrsackrr)rr r r r9szUniversalHooksPlugin.sackcCs8|jj}t|jt|t|jjtt j |j|tdS)N) pre_transactionr _run_pkg_dirsrrDnfTransactionInfor transactionr rr)rnamer r r r<sz$UniversalHooksPlugin.pre_transactioncCs8|jj}t|jt|t|jjttj |j|tdS)N) rrrrrrr r rr)rrr r r rAsz UniversalHooksPlugin.transaction) r __module__ __qualname__rrrrrrrr __classcell__r r )r r r)s rc@sDeZdZejddZejddZejddZejddZd S) FileSystemcCsdS)Nr )rpathnamer r r globHszFileSystem.globcCsdS)Nr )rrr r r isdirLszFileSystem.isdircCsdS)Nr )rrmoder r r accessPszFileSystem.accesscCsdS)Nr )rr!encodingr r r NamedTemporaryFileTszFileSystem.NamedTemporaryFileN) rrrabcabstractmethodrr r"r$r r r r rGsr) metaclassc@s,eZdZddZddZddZddZd S) RealFileSystemcCs tj|S)N)r)rrr r r rZszRealFileSystem.globcCs tj|S)N)rr )rrr r r r ]szRealFileSystem.isdircCs tj||S)N)osr")rrr!r r r r"`szRealFileSystem.accesscCstj||dS)N)r!r#)tempfiler$)rr!r#r r r r$csz!RealFileSystem.NamedTemporaryFileN)rrrrr r"r$r r r r r(Ysr(cCstj|sdSxxttj|dD]b}tj|r2q"tj|tjrx|d|}tj|dd}d|j kr|j d||j q"|j d|q"WdS)Nz/* T)shellrz!!! %s did not exit cleanly: %dz!!! %s is not executable) fsr sortedrr"r)X_OK subprocessZrun returncodeerror)Zhook_dirlogargsZscriptZcmdlineZ completedr r r r js   r c@seZdZejddZdS)TransactionInfocCsdS)Nr )rr r r getMembers|szTransactionInfo.getMembersN)rrrr%r&r7r r r r r6{sr6c@s"eZdZddddZddZdS)rN)returncCs ||_dS)N)r)rrr r r rszDnfTransactionInfo.__init__cCs|jS)N)r)rr r r r7szDnfTransactionInfo.getMembers)rrrrr7r r r r rsrcCstj|d|}t|}i}tjddd}i}|j} xtt| dddD]h} | j} | |kr`qLd|| <|j | d t tj|d | ||x&|j D]\} } | j | rd|| <qWqLW|j x&|D]} t tj|| |d |jqWWd QRXd S) zu :param str base_dir: :param logging.Logger log: :param str slot: :param TransactionInfo tinfo: Z multi_pkgswzutf-8)r!r#cSs|jS)N)r)mr r r sz_run_pkg_dirs..)key Zpkgsz --pkg_list=N)rr_make_dir_matchersr.r$r7r/setrwriter itemssearchflush)Zbase_dirr4ZslotZtinfoZ wildcard_path dir_matchersZwildcard_to_runZ temp_pkg_fileZ members_seenmembersmemberZpkgZ wildcard_dirZmatcherr r r rs&   rcCsFi}xs,     & __pycache__/changelog.cpython-36.pyc000064400000010117151030231510013320 0ustar003 gt`g@s|ddlmZddlmZddlZddlZddlZddlmZm Z m Z ddl Z ddl Z ddZ e jjGddde jjZdS) )absolute_import)unicode_literalsN)_P_loggerc CsDytjj|ddStttfk r>tjtdj |YnXdS)NT)ZfuzzyzNot a valid date: "{0}".) dateutilparserparse ValueError TypeError OverflowErrorargparseZArgumentTypeErrorrformat)valr/usr/lib/python3.6/changelog.py validate_date!src@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS)ChangelogCommand changelogzShow changelog data of packagescCsd|j}|jdddttdd|jddttdd|jdd d td d |jd dtdddS)Nz--sinceZDATEzZshow changelog entries since DATE. To avoid ambiguosity, YYYY-MM-DD format is recommended.)metavardefaulttypehelpz--countz2show given number of changelog entries per package)rrrz --upgradesF store_truezmshow only new changelog entries for packages, that provide an upgrade for some of already installed packages.)ractionrpackage*ZPACKAGE)nargsr)Zadd_mutually_exclusive_group add_argumentrrint)rZ filter_grouprrr set_argparser-s   zChangelogCommand.set_argparsercCs|jj}d|_d|_d|_dS)NT)clidemandsZavailable_reposZsack_activation changelogs)selfr"rrr configure>szChangelogCommand.configurecCs|jjj}|jjr|jddx|jjD]d}tjj|ddj |jjdddd}|jj rh|j|jj d|r||j |j }q*t jtd|q*Wn|jj r|j|jj d|jjr|j}n|j}|S)NT)empty)Z ignore_caseF)Z with_nevraZ with_providesZwith_filenames)ZreponamezNo match for argument: %s)baseZsackqueryoptsrZfiltermdnfZsubjectZSubjectZget_best_queryZrepounionZlatestrinforupgradesZ available)r$qpkgZpkg_qrrrr(Ds$    zChangelogCommand.querycCs>tj}x0t|D]$}|j|jp$|j|jfgj|qW|S)N) collections OrderedDictsorted setdefaultZ source_namenameZevrappend)r$Zpackagesby_srpmr/rrrr6Zs$zChangelogCommand.by_srpmcsTjjrjj|Sjjr.|jdjjSjjrJfdd|jDS|jSdS)Ncs$g|]}|djjjkr|qS)Z timestamp)r)sinceZdate).0chlog)r$rr fsz6ChangelogCommand.filter_changelogs..)r)r-r'Zlatest_changelogscountr#r7)r$rr)r$rfilter_changelogs`s z"ChangelogCommand.filter_changelogscCs|jjr"tjtdj|jjnP|jjrLtjtdd|jjj|jjn&|jjrdtjtdntjtd|j |j }xb|D]Z}t tdjdj t dd ||Dx*|j||d D]}t |jj|qWqWdS) NzListing changelogs since {}zListing only latest changelogzListing {} latest changelogszBListing only new changelogs since installed version of the packagezListing all changelogszChangelogs for {}z, cSsh|] }t|qSr)str)r8r/rrr {sz'ChangelogCommand.run..r)r)r7rr,rrr;rr-r6r(printjoinr2r<r'Zformat_changelog)r$r6r4r9rrrrunks     zChangelogCommand.runN)r) __name__ __module__ __qualname__aliasesrZsummary staticmethodr r%r(r6r<rArrrrr(s  r)Z __future__rrr r0Zdateutil.parserrZdnfpluginscorerrrr*Zdnf.clirZpluginZregister_commandr!ZCommandrrrrrs  debug.py000064400000030425151030231510006177 0ustar00# # Copyright (C) 2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import ucd from dnfpluginscore import _, logger import dnf import dnf.cli import gzip import hawkey import os import rpm import sys import time DEBUG_VERSION = "dnf-debug-dump version 1\n" class Debug(dnf.Plugin): name = 'debug' def __init__(self, base, cli): super(Debug, self).__init__(base, cli) self.base = base self.cli = cli if self.cli is not None: self.cli.register_command(DebugDumpCommand) self.cli.register_command(DebugRestoreCommand) class DebugDumpCommand(dnf.cli.Command): aliases = ("debug-dump",) summary = _("dump information about installed rpm packages to file") def __init__(self, cli): super(DebugDumpCommand, self).__init__(cli) self.dump_file = None def configure(self): self.cli.demands.sack_activation = True self.cli.demands.available_repos = True @staticmethod def set_argparser(parser): parser.add_argument( "--norepos", action="store_true", default=False, help=_("do not attempt to dump the repository contents.")) parser.add_argument( "filename", nargs="?", help=_("optional name of dump file")) def run(self): """create debug txt file and compress it, if no filename specified use dnf_debug_dump-.txt.gz by default""" filename = self.opts.filename if not filename: now = time.strftime("%Y-%m-%d_%T", time.localtime(time.time())) filename = "dnf_debug_dump-%s-%s.txt.gz" % (os.uname()[1], now) filename = os.path.abspath(filename) if filename.endswith(".gz"): self.dump_file = gzip.GzipFile(filename, "w") else: self.dump_file = open(filename, "w") self.write(DEBUG_VERSION) self.dump_system_info() self.dump_dnf_config_info() self.dump_rpm_problems() self.dump_packages(not self.opts.norepos) self.dump_rpmdb_versions() self.dump_file.close() print(_("Output written to: %s") % filename) def write(self, msg): if dnf.pycomp.PY3 and isinstance(self.dump_file, gzip.GzipFile): msg = bytes(msg, "utf8") dnf.pycomp.write_to_file(self.dump_file, msg) def dump_system_info(self): self.write("%%%%SYSTEM INFO\n") uname = os.uname() self.write(" uname: %s, %s\n" % (uname[2], uname[4])) self.write(" rpm ver: %s\n" % rpm.__version__) self.write(" python ver: %s\n" % sys.version.replace("\n", "")) return def dump_dnf_config_info(self): var = self.base.conf.substitutions plugins = ",".join([p.name for p in self.base._plugins.plugins]) self.write("%%%%DNF INFO\n") self.write(" arch: %s\n" % var["arch"]) self.write(" basearch: %s\n" % var["basearch"]) self.write(" releasever: %s\n" % var["releasever"]) self.write(" dnf ver: %s\n" % dnf.const.VERSION) self.write(" enabled plugins: %s\n" % plugins) self.write(" global excludes: %s\n" % ",".join(self.base.conf.excludepkgs)) return def dump_rpm_problems(self): self.write("%%%%RPMDB PROBLEMS\n") (missing, conflicts) = rpm_problems(self.base) self.write("".join(["Package %s requires %s\n" % (ucd(pkg), ucd(req)) for (req, pkg) in missing])) self.write("".join(["Package %s conflicts with %s\n" % (ucd(pkg), ucd(conf)) for (conf, pkg) in conflicts])) def dump_packages(self, load_repos): q = self.base.sack.query() # packages from rpmdb self.write("%%%%RPMDB\n") for p in sorted(q.installed()): self.write(" %s\n" % pkgspec(p)) if not load_repos: return self.write("%%%%REPOS\n") available = q.available() for repo in sorted(self.base.repos.iter_enabled(), key=lambda x: x.id): try: url = None if repo.metalink is not None: url = repo.metalink elif repo.mirrorlist is not None: url = repo.mirrorlist elif len(repo.baseurl) > 0: url = repo.baseurl[0] self.write("%%%s - %s\n" % (repo.id, url)) self.write(" excludes: %s\n" % ",".join(repo.excludepkgs)) for po in sorted(available.filter(reponame=repo.id)): self.write(" %s\n" % pkgspec(po)) except dnf.exceptions.Error as e: self.write("Error accessing repo %s: %s\n" % (repo, str(e))) continue return def dump_rpmdb_versions(self): self.write("%%%%RPMDB VERSIONS\n") version = self.base.sack._rpmdb_version() self.write(" all: %s\n" % version) return class DebugRestoreCommand(dnf.cli.Command): aliases = ("debug-restore",) summary = _("restore packages recorded in debug-dump file") def configure(self): self.cli.demands.sack_activation = True self.cli.demands.available_repos = True self.cli.demands.root_user = True if not self.opts.output: self.cli.demands.resolving = True @staticmethod def set_argparser(parser): parser.add_argument( "--output", action="store_true", help=_("output commands that would be run to stdout.")) parser.add_argument( "--install-latest", action="store_true", help=_("Install the latest version of recorded packages.")) parser.add_argument( "--ignore-arch", action="store_true", help=_("Ignore architecture and install missing packages matching " "the name, epoch, version and release.")) parser.add_argument( "--filter-types", metavar="[install, remove, replace]", default="install, remove, replace", help=_("limit to specified type")) parser.add_argument( "--remove-installonly", action="store_true", help=_('Allow removing of install-only packages. Using this option may ' 'result in an attempt to remove the running kernel.')) parser.add_argument( "filename", nargs=1, help=_("name of dump file")) def run(self): """Execute the command action here.""" if self.opts.filter_types: self.opts.filter_types = set( self.opts.filter_types.replace(",", " ").split()) dump_pkgs = self.read_dump_file(self.opts.filename[0]) self.process_installed(dump_pkgs, self.opts) self.process_dump(dump_pkgs, self.opts) def process_installed(self, dump_pkgs, opts): installed = self.base.sack.query().installed() installonly_pkgs = self.base._get_installonly_query(installed) for pkg in installed: pkg_remove = False spec = pkgspec(pkg) dumped_versions = dump_pkgs.get((pkg.name, pkg.arch), None) if dumped_versions is not None: evr = (pkg.epoch, pkg.version, pkg.release) if evr in dumped_versions: # the correct version is already installed dumped_versions[evr] = 'skip' else: # other version is currently installed if pkg in installonly_pkgs: # package is install-only, should be removed pkg_remove = True else: # package should be upgraded / downgraded if "replace" in opts.filter_types: action = 'replace' else: action = 'skip' for d_evr in dumped_versions.keys(): dumped_versions[d_evr] = action else: # package should not be installed pkg_remove = True if pkg_remove and "remove" in opts.filter_types: if pkg not in installonly_pkgs or opts.remove_installonly: if opts.output: print("remove %s" % spec) else: self.base.package_remove(pkg) def process_dump(self, dump_pkgs, opts): for (n, a) in sorted(dump_pkgs.keys()): dumped_versions = dump_pkgs[(n, a)] for (e, v, r) in sorted(dumped_versions.keys()): action = dumped_versions[(e, v, r)] if action == 'skip': continue if opts.ignore_arch: arch = "" else: arch = "." + a if opts.install_latest and action == "install": pkg_spec = "%s%s" % (n, arch) else: pkg_spec = pkgtup2spec(n, arch, e, v, r) if action in opts.filter_types: if opts.output: print("%s %s" % (action, pkg_spec)) else: try: self.base.install(pkg_spec) except dnf.exceptions.MarkingError: logger.error(_("Package %s is not available"), pkg_spec) @staticmethod def read_dump_file(filename): if filename.endswith(".gz"): fobj = gzip.GzipFile(filename) else: fobj = open(filename) if ucd(fobj.readline()) != DEBUG_VERSION: logger.error(_("Bad dnf debug file: %s"), filename) raise dnf.exceptions.Error skip = True pkgs = {} for line in fobj: line = ucd(line) if skip: if line == "%%%%RPMDB\n": skip = False continue if not line or line[0] != " ": break pkg_spec = line.strip() nevra = hawkey.split_nevra(pkg_spec) # {(name, arch): {(epoch, version, release): action}} pkgs.setdefault((nevra.name, nevra.arch), {})[ (nevra.epoch, nevra.version, nevra.release)] = "install" return pkgs def rpm_problems(base): rpmdb = dnf.sack._rpmdb_sack(base) allpkgs = rpmdb.query().installed() requires = set() conflicts = set() for pkg in allpkgs: requires.update([(req, pkg) for req in pkg.requires if not str(req) == "solvable:prereqmarker" and not str(req).startswith("rpmlib(")]) conflicts.update([(conf, pkg) for conf in pkg.conflicts]) missing_requires = [(req, pkg) for (req, pkg) in requires if not allpkgs.filter(provides=req)] existing_conflicts = [(conf, pkg) for (conf, pkg) in conflicts if allpkgs.filter(provides=conf)] return missing_requires, existing_conflicts def pkgspec(pkg): return pkgtup2spec(pkg.name, pkg.arch, pkg.epoch, pkg.version, pkg.release) def pkgtup2spec(name, arch, epoch, version, release): a = "" if not arch else ".%s" % arch.lstrip('.') e = "" if epoch in (None, "") else "%s:" % epoch return "%s-%s%s-%s%s" % (name, e, version, release, a) repodiff.py000064400000026323151030231510006711 0ustar00# repodiff.py # DNF plugin adding a command to show differencies between two sets # of repositories. # # Copyright (C) 2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import dnf.cli from dnf.cli.option_parser import OptionParser import hawkey from dnfpluginscore import _ class RepoDiff(dnf.Plugin): name = "repodiff" def __init__(self, base, cli): super(RepoDiff, self).__init__(base, cli) if cli is None: return cli.register_command(RepoDiffCommand) class RepoDiffCommand(dnf.cli.Command): aliases = ("repodiff",) summary = _("List differences between two sets of repositories") @staticmethod def set_argparser(parser): # I'd like to use --old and --new options like Yum did. # But ability to disable abbreviated long options is added # only in Python >= 3.5 # So in command arguments we are not able to use arguments, # which are prefixes of main arguments (i.w. --new would be # treated as --newpackage). This is because we run .parse_args # two times - for main and then for command arguments. # https://stackoverflow.com/questions/33900846 parser.add_argument("--repo-old", "-o", default=[], action="append", dest="old", help=_("Specify old repository, can be used multiple times")) parser.add_argument("--repo-new", "-n", default=[], action="append", dest="new", help=_("Specify new repository, can be used multiple times")) parser.add_argument("--arch", "--archlist", "-a", default=[], action=OptionParser._SplitCallback, dest="arches", help=_("Specify architectures to compare, can be used " "multiple times. By default, only source rpms are " "compared.")) parser.add_argument("--size", "-s", action="store_true", help=_("Output additional data about the size of the changes.")) parser.add_argument("--compare-arch", action="store_true", help=_("Compare packages also by arch. By default " "packages are compared just by name.")) parser.add_argument("--simple", action="store_true", help=_("Output a simple one line message for modified packages.")) parser.add_argument("--downgrade", action="store_true", help=_("Split the data for modified packages between " "upgraded and downgraded packages.")) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.changelogs = True self.base.conf.disable_excludes = ["all"] # TODO yum was able to handle mirrorlist in --new/--old arguments # Can be resolved by improving --repofrompath option if not self.opts.new or not self.opts.old: msg = _("Both old and new repositories must be set.") raise dnf.exceptions.Error(msg) for repo in self.base.repos.all(): if repo.id in self.opts.new + self.opts.old: repo.enable() else: repo.disable() if not self.opts.arches: self.opts.arches = ['src'] def _pkgkey(self, pkg): if self.opts.compare_arch: return (pkg.name, pkg.arch) return pkg.name def _repodiff(self, old, new): '''compares packagesets old and new, returns dictionary with packages: added: only in new set removed: only in old set upgraded: in both old and new, new has bigger evr downgraded: in both old and new, new has lower evr obsoletes: dictionary of which old package is obsoleted by which new ''' old_d = dict([(self._pkgkey(p), p) for p in old]) old_keys = set(old_d.keys()) new_d = dict([(self._pkgkey(p), p) for p in new]) new_keys = set(new_d.keys()) # mapping obsoleted_package_from_old: obsoleted_by_package_from_new obsoletes = dict() for obsoleter in new.filter(obsoletes=old): for obsoleted in old.filter(provides=obsoleter.obsoletes): obsoletes[self._pkgkey(obsoleted)] = obsoleter evr_cmp = self.base.sack.evr_cmp repodiff = dict( added=[new_d[k] for k in new_keys - old_keys], removed=[old_d[k] for k in old_keys - new_keys], obsoletes=obsoletes, upgraded=[], downgraded=[]) for k in old_keys.intersection(new_keys): pkg_old = old_d[k] pkg_new = new_d[k] if pkg_old.evr == pkg_new.evr: continue if evr_cmp(pkg_old.evr, pkg_new.evr) > 0: repodiff['downgraded'].append((pkg_old, pkg_new)) else: repodiff['upgraded'].append((pkg_old, pkg_new)) return repodiff def _report(self, repodiff): def pkgstr(pkg): if self.opts.compare_arch: return str(pkg) return "%s-%s" % (pkg.name, pkg.evr) def sizestr(num): msg = str(num) if num > 0: msg += " ({})".format(dnf.cli.format.format_number(num).strip()) elif num < 0: msg += " (-{})".format(dnf.cli.format.format_number(-num).strip()) return msg def report_modified(pkg_old, pkg_new): msgs = [] if self.opts.simple: msgs.append("%s -> %s" % (pkgstr(pkg_old), pkgstr(pkg_new))) else: msgs.append('') msgs.append("%s -> %s" % (pkgstr(pkg_old), pkgstr(pkg_new))) msgs.append('-' * len(msgs[-1])) if pkg_old.changelogs: old_chlog = pkg_old.changelogs[0] else: old_chlog = None for chlog in pkg_new.changelogs: if old_chlog: if chlog['timestamp'] < old_chlog['timestamp']: break elif (chlog['timestamp'] == old_chlog['timestamp'] and chlog['author'] == old_chlog['author'] and chlog['text'] == old_chlog['text']): break msgs.append('* %s %s\n%s' % ( chlog['timestamp'].strftime("%a %b %d %Y"), dnf.i18n.ucd(chlog['author']), dnf.i18n.ucd(chlog['text']))) if self.opts.size: msgs.append(_("Size change: {} bytes").format( pkg_new.size - pkg_old.size)) print('\n'.join(msgs)) sizes = dict(added=0, removed=0, upgraded=0, downgraded=0) for pkg in sorted(repodiff['added']): print(_("Added package : {}").format(pkgstr(pkg))) sizes['added'] += pkg.size for pkg in sorted(repodiff['removed']): print(_("Removed package: {}").format(pkgstr(pkg))) obsoletedby = repodiff['obsoletes'].get(self._pkgkey(pkg)) if obsoletedby: print(_("Obsoleted by : {}").format(pkgstr(obsoletedby))) sizes['removed'] += pkg.size if self.opts.downgrade: if repodiff['upgraded']: print(_("\nUpgraded packages")) for (pkg_old, pkg_new) in sorted(repodiff['upgraded']): sizes['upgraded'] += (pkg_new.size - pkg_old.size) report_modified(pkg_old, pkg_new) if repodiff['downgraded']: print(_("\nDowngraded packages")) for (pkg_old, pkg_new) in sorted(repodiff['downgraded']): sizes['downgraded'] += (pkg_new.size - pkg_old.size) report_modified(pkg_old, pkg_new) else: modified = repodiff['upgraded'] + repodiff['downgraded'] if modified: print(_("\nModified packages")) for (pkg_old, pkg_new) in sorted(modified): sizes['upgraded'] += (pkg_new.size - pkg_old.size) report_modified(pkg_old, pkg_new) print(_("\nSummary")) print(_("Added packages: {}").format(len(repodiff['added']))) print(_("Removed packages: {}").format(len(repodiff['removed']))) if self.opts.downgrade: print(_("Upgraded packages: {}").format(len(repodiff['upgraded']))) print(_("Downgraded packages: {}").format(len(repodiff['downgraded']))) else: print(_("Modified packages: {}").format( len(repodiff['upgraded']) + len(repodiff['downgraded']))) if self.opts.size: print(_("Size of added packages: {}").format(sizestr(sizes['added']))) print(_("Size of removed packages: {}").format(sizestr(sizes['removed']))) if not self.opts.downgrade: print(_("Size of modified packages: {}").format( sizestr(sizes['upgraded'] + sizes['downgraded']))) else: print(_("Size of upgraded packages: {}").format( sizestr(sizes['upgraded']))) print(_("Size of downgraded packages: {}").format( sizestr(sizes['downgraded']))) print(_("Size change: {}").format( sizestr(sizes['added'] + sizes['upgraded'] + sizes['downgraded'] - sizes['removed']))) def run(self): # prepare old and new packagesets based by given arguments q_new = self.base.sack.query(hawkey.IGNORE_EXCLUDES).filter( reponame=self.opts.new) q_old = self.base.sack.query(hawkey.IGNORE_EXCLUDES).filter( reponame=self.opts.old) if self.opts.arches and '*' not in self.opts.arches: q_new.filterm(arch=self.opts.arches) q_old.filterm(arch=self.opts.arches) if self.opts.compare_arch: q_new.filterm(latest_per_arch=1) q_old.filterm(latest_per_arch=1) else: q_new.filterm(latest=1) q_old.filterm(latest=1) q_new.apply() q_old.apply() self._report(self._repodiff(q_old, q_new)) needs_restarting.py000064400000027140151030231510010451 0ustar00# needs_restarting.py # DNF plugin to check for running binaries in a need of restarting. # # Copyright (C) 2014 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # the mechanism of scanning smaps for opened files and matching them back to # packages is heavily inspired by the original needs-restarting.py: # http://yum.baseurl.org/gitweb?p=yum-utils.git;a=blob;f=needs-restarting.py from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from dnfpluginscore import logger, _ import dnf import dnf.cli import dbus import functools import os import re import stat import time # For which package updates we should recommend a reboot # Mostly taken from https://access.redhat.com/solutions/27943 NEED_REBOOT = ['kernel', 'kernel-rt', 'glibc', 'linux-firmware', 'systemd', 'dbus', 'dbus-broker', 'dbus-daemon', 'microcode_ctl'] NEED_REBOOT_DEPENDS_ON_DBUS = ['zlib'] def get_options_from_dir(filepath, base): """ Provide filepath as string if single dir or list of strings Return set of package names contained in files under filepath """ if not os.path.exists(filepath): return set() options = set() for file in os.listdir(filepath): if os.path.isdir(file) or not file.endswith('.conf'): continue with open(os.path.join(filepath, file)) as fp: for line in fp: options.add((line.rstrip(), file)) packages = set() for pkg in base.sack.query().installed().filter(name={x[0] for x in options}): packages.add(pkg.name) for name, file in {x for x in options if x[0] not in packages}: logger.warning( _('No installed package found for package name "{pkg}" ' 'specified in needs-restarting file "{file}".'.format(pkg=name, file=file))) return packages def list_opened_files(uid): for (pid, smaps) in list_smaps(): try: if uid is not None and uid != owner_uid(smaps): continue with open(smaps, 'r', errors='replace') as smaps_file: lines = smaps_file.readlines() except EnvironmentError: logger.warning("Failed to read PID %d's smaps.", pid) continue for line in lines: ofile = smap2opened_file(pid, line) if ofile is not None: yield ofile def list_smaps(): for dir_ in os.listdir('/proc'): try: pid = int(dir_) except ValueError: continue smaps = '/proc/%d/smaps' % pid yield (pid, smaps) def memoize(func): sentinel = object() cache = {} def wrapper(param): val = cache.get(param, sentinel) if val is not sentinel: return val val = func(param) cache[param] = val return val return wrapper def owner_uid(fname): return os.stat(fname)[stat.ST_UID] def owning_package(sack, fname): matches = sack.query().filter(file=fname).run() if matches: return matches[0] return None def print_cmd(pid): cmdline = '/proc/%d/cmdline' % pid with open(cmdline) as cmdline_file: command = dnf.i18n.ucd(cmdline_file.read()) command = ' '.join(command.split('\000')) print('%d : %s' % (pid, command)) def get_service_dbus(pid): bus = dbus.SystemBus() systemd_manager_object = bus.get_object( 'org.freedesktop.systemd1', '/org/freedesktop/systemd1' ) systemd_manager_interface = dbus.Interface( systemd_manager_object, 'org.freedesktop.systemd1.Manager' ) service_proxy = None try: service_proxy = bus.get_object( 'org.freedesktop.systemd1', systemd_manager_interface.GetUnitByPID(pid) ) except dbus.DBusException as e: # There is no unit for the pid. Usually error is 'NoUnitForPid'. # Considering what we do at the bottom (just return if not service) # Then there's really no reason to exit here on that exception. # Log what's happened then move on. msg = str(e) logger.warning("Failed to get systemd unit for PID {}: {}".format(pid, msg)) return service_properties = dbus.Interface( service_proxy, dbus_interface="org.freedesktop.DBus.Properties") name = service_properties.Get( "org.freedesktop.systemd1.Unit", 'Id' ) if name.endswith(".service"): return name return def smap2opened_file(pid, line): slash = line.find('/') if slash < 0: return None if line.find('00:') >= 0: # not a regular file return None fn = line[slash:].strip() suffix_index = fn.rfind(' (deleted)') if suffix_index < 0: return OpenedFile(pid, fn, False) else: return OpenedFile(pid, fn[:suffix_index], True) class OpenedFile(object): RE_TRANSACTION_FILE = re.compile('^(.+);[0-9A-Fa-f]{8,}$') def __init__(self, pid, name, deleted): self.deleted = deleted self.name = name self.pid = pid @property def presumed_name(self): """Calculate the name of the file pre-transaction. In case of a file that got deleted during the transactionm, possibly just because of an upgrade to a newer version of the same file, RPM renames the old file to the same name with a hexadecimal suffix just before delting it. """ if self.deleted: match = self.RE_TRANSACTION_FILE.match(self.name) if match: return match.group(1) return self.name class ProcessStart(object): def __init__(self): self.boot_time = self.get_boot_time() self.sc_clk_tck = self.get_sc_clk_tck() @staticmethod def get_boot_time(): """ We have two sources from which to derive the boot time. These values vary depending on containerization, existence of a Real Time Clock, etc. For our purposes we want the latest derived value. - st_mtime of /proc/1 Reflects the time the first process was run after booting This works for all known cases except machines without a RTC - they awake at the start of the epoch. - /proc/uptime Seconds field of /proc/uptime subtracted from the current time Works for machines without RTC iff the current time is reasonably correct. Does not work on containers which share their kernel with the host - there the host kernel uptime is returned """ proc_1_boot_time = int(os.stat('/proc/1').st_mtime) if os.path.isfile('/proc/uptime'): with open('/proc/uptime', 'rb') as f: uptime = f.readline().strip().split()[0].strip() proc_uptime_boot_time = int(time.time() - float(uptime)) return max(proc_1_boot_time, proc_uptime_boot_time) return proc_1_boot_time @staticmethod def get_sc_clk_tck(): return os.sysconf(os.sysconf_names['SC_CLK_TCK']) def __call__(self, pid): stat_fn = '/proc/%d/stat' % pid with open(stat_fn) as stat_file: stats = stat_file.read().strip().split() ticks_after_boot = int(stats[21]) secs_after_boot = ticks_after_boot // self.sc_clk_tck return self.boot_time + secs_after_boot @dnf.plugin.register_command class NeedsRestartingCommand(dnf.cli.Command): aliases = ('needs-restarting',) summary = _('determine updated binaries that need restarting') @staticmethod def set_argparser(parser): parser.add_argument('-u', '--useronly', action='store_true', help=_("only consider this user's processes")) parser.add_argument('-r', '--reboothint', action='store_true', help=_("only report whether a reboot is required " "(exit code 1) or not (exit code 0)")) parser.add_argument('-s', '--services', action='store_true', help=_("only report affected systemd services")) def configure(self): demands = self.cli.demands demands.sack_activation = True def run(self): process_start = ProcessStart() owning_pkg_fn = functools.partial(owning_package, self.base.sack) owning_pkg_fn = memoize(owning_pkg_fn) opt = get_options_from_dir(os.path.join( self.base.conf.installroot, "etc/dnf/plugins/needs-restarting.d/"), self.base) NEED_REBOOT.extend(opt) if self.opts.reboothint: need_reboot = set() need_reboot_depends_on_dbus = set() installed = self.base.sack.query().installed() for pkg in installed.filter(name=NEED_REBOOT): if pkg.installtime > process_start.boot_time: need_reboot.add(pkg.name) dbus_installed = installed.filter(name=['dbus', 'dbus-daemon', 'dbus-broker']) if len(dbus_installed) != 0: for pkg in installed.filter(name=NEED_REBOOT_DEPENDS_ON_DBUS): if pkg.installtime > process_start.boot_time: need_reboot_depends_on_dbus.add(pkg.name) if need_reboot or need_reboot_depends_on_dbus: print(_('Core libraries or services have been updated ' 'since boot-up:')) for name in sorted(need_reboot): print(' * %s' % name) for name in sorted(need_reboot_depends_on_dbus): print(' * %s (dependency of dbus. Recommending reboot of dbus)' % name) print() print(_('Reboot is required to fully utilize these updates.')) print(_('More information:'), 'https://access.redhat.com/solutions/27943') raise dnf.exceptions.Error() # Sets exit code 1 else: print(_('No core libraries or services have been updated ' 'since boot-up.')) print(_('Reboot should not be necessary.')) return None stale_pids = set() uid = os.geteuid() if self.opts.useronly else None for ofile in list_opened_files(uid): pkg = owning_pkg_fn(ofile.presumed_name) if pkg is None: continue if pkg.installtime > process_start(ofile.pid): stale_pids.add(ofile.pid) if self.opts.services: names = set([get_service_dbus(pid) for pid in sorted(stale_pids)]) for name in names: if name is not None: print(name) return 0 for pid in sorted(stale_pids): print_cmd(pid) config_manager.py000064400000025205151030231510010050 0ustar00# # Copyright (C) 2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnfpluginscore import _, logger, P_ import dnf import dnf.cli import dnf.pycomp import dnf.util import fnmatch import hashlib import os import re import shutil @dnf.plugin.register_command class ConfigManagerCommand(dnf.cli.Command): aliases = ['config-manager'] summary = _('manage {prog} configuration options and repositories').format( prog=dnf.util.MAIN_PROG) @staticmethod def set_argparser(parser): parser.add_argument( 'crepo', nargs='*', metavar='repo', help=_('repo to modify')) parser.add_argument( '--save', default=False, action='store_true', help=_('save the current options (useful with --setopt)')) parser.add_argument( '--add-repo', default=[], action='append', metavar='URL', help=_('add (and enable) the repo from the specified file or url')) parser.add_argument( '--dump', default=False, action='store_true', help=_('print current configuration values to stdout')) parser.add_argument( '--dump-variables', default=False, action='store_true', help=_('print variable values to stdout')) enable_group = parser.add_mutually_exclusive_group() enable_group.add_argument("--set-enabled", default=False, dest="set_enabled", action="store_true", help=_("enable repos (automatically saves)")) enable_group.add_argument("--set-disabled", default=False, dest="set_disabled", action="store_true", help=_("disable repos (automatically saves)")) def configure(self): # setup sack and populate it with enabled repos demands = self.cli.demands demands.available_repos = True # if no argument was passed then error if (not (self.opts.add_repo != [] or self.opts.save or self.opts.dump or self.opts.dump_variables or self.opts.set_disabled or self.opts.set_enabled) ): self.cli.optparser.error(_("one of the following arguments is required: {}") .format(' '.join([ "--save", "--add-repo", "--dump", "--dump-variables", "--set-enabled", "--enable", "--set-disabled", "--disable"]))) # warn with hint if --enablerepo or --disablerepo argument was passed if self.opts.repos_ed != []: logger.warning(_("Warning: --enablerepo/--disablerepo arguments have no meaning" "with config manager. Use --set-enabled/--set-disabled instead.")) if (self.opts.save or self.opts.set_enabled or self.opts.set_disabled or self.opts.add_repo): demands.root_user = True # sanitize commas https://bugzilla.redhat.com/show_bug.cgi?id=1830530 temp_list = [x.split(',') for x in self.opts.crepo if x != ','] # flatten sublists self.opts.crepo = [item for sublist in temp_list for item in sublist if item != ''] def run(self): """Execute the util action here.""" if self.opts.add_repo: self.add_repo() else: self.modify_repo() def modify_repo(self): """ process --set-enabled, --set-disabled and --setopt options """ matching_repos = [] # list of matched repositories not_matching_repos_id = set() # IDs of not matched repositories def match_repos(key, add_matching_repos): matching = self.base.repos.get_matching(key) if not matching: not_matching_repos_id.add(name) elif add_matching_repos: matching_repos.extend(matching) if self.opts.crepo: for name in self.opts.crepo: match_repos(name, True) if hasattr(self.opts, 'repo_setopts'): for name in self.opts.repo_setopts.keys(): match_repos(name, False) else: if hasattr(self.opts, 'repo_setopts'): for name in self.opts.repo_setopts.keys(): match_repos(name, True) if not_matching_repos_id: raise dnf.exceptions.Error(_("No matching repo to modify: %s.") % ', '.join(not_matching_repos_id)) sbc = self.base.conf modify = {} if hasattr(self.opts, 'main_setopts') and self.opts.main_setopts: modify = self.opts.main_setopts if self.opts.dump_variables: for name, val in self.base.conf.substitutions.items(): print("%s = %s" % (name, val)) if not self.opts.crepo or 'main' in self.opts.crepo: if self.opts.save and modify: # modify [main] in global configuration file self.base.conf.write_raw_configfile(self.base.conf.config_file_path, 'main', sbc.substitutions, modify) if self.opts.dump: print(self.base.output.fmtSection('main')) print(self.base.conf.dump()) if not matching_repos: return if self.opts.set_enabled or self.opts.set_disabled: self.opts.save = True for repo in sorted(matching_repos): repo_modify = {} if self.opts.set_enabled: repo_modify['enabled'] = "1" elif self.opts.set_disabled: repo_modify['enabled'] = "0" if hasattr(self.opts, 'repo_setopts'): for repoid, setopts in self.opts.repo_setopts.items(): if fnmatch.fnmatch(repo.id, repoid): repo_modify.update(setopts) if self.opts.save and repo_modify: self.base.conf.write_raw_configfile(repo.repofile, repo.id, sbc.substitutions, repo_modify) if self.opts.dump: print(self.base.output.fmtSection('repo: ' + repo.id)) print(repo.dump()) def add_repo(self): """ process --add-repo option """ # Get the reposdir location myrepodir = self.base.conf.get_reposdir errors_count = 0 for url in self.opts.add_repo: if dnf.pycomp.urlparse.urlparse(url).scheme == '': url = 'file://' + os.path.abspath(url) logger.info(_('Adding repo from: %s'), url) if url.endswith('.repo'): # .repo file - download, put into reposdir and enable it destname = os.path.basename(url) destname = os.path.join(myrepodir, destname) try: f = self.base.urlopen(url, mode='w+') shutil.copy2(f.name, destname) os.chmod(destname, 0o644) f.close() except IOError as e: errors_count += 1 logger.error(e) continue else: # just url to repo, create .repo file on our own repoid = sanitize_url_to_fs(url) reponame = 'created by {} config-manager from {}'.format(dnf.util.MAIN_PROG, url) destname = os.path.join(myrepodir, "%s.repo" % repoid) content = "[%s]\nname=%s\nbaseurl=%s\nenabled=1\n" % \ (repoid, reponame, url) if not save_to_file(destname, content): continue if errors_count: raise dnf.exceptions.Error(P_("Configuration of repo failed", "Configuration of repos failed", errors_count)) def save_to_file(filename, content): try: with open(filename, 'w+') as fd: dnf.pycomp.write_to_file(fd, content) os.chmod(filename, 0o644) except (IOError, OSError) as e: logger.error(_('Could not save repo to repofile %s: %s'), filename, e) return False return True # Regular expressions to sanitise cache filenames RE_SCHEME = re.compile(r'^\w+:/*(\w+:|www\.)?') RE_SLASH = re.compile(r'[?/:&#|~\*\[\]\(\)\'\\]+') RE_BEGIN = re.compile(r'^[,.]*') RE_FINAL = re.compile(r'[,.]*$') def sanitize_url_to_fs(url): """Return a filename suitable for the filesystem and for repo id Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if RE_SCHEME.match(url): if dnf.pycomp.PY3: url = url.encode('idna').decode('utf-8') else: if isinstance(url, str): url = url.decode('utf-8').encode('idna') else: url = url.encode('idna') if isinstance(url, unicode): url = url.encode('utf-8') except (UnicodeDecodeError, UnicodeEncodeError, UnicodeError, TypeError): pass url = RE_SCHEME.sub("", url) url = RE_SLASH.sub("_", url) url = RE_BEGIN.sub("", url) url = RE_FINAL.sub("", url) # limit length of url if len(url) > 250: parts = url[:185].split('_') lastindex = 185-len(parts[-1]) csum = hashlib.sha256() csum.update(url[lastindex:].encode('utf-8')) url = url[:lastindex] + '_' + csum.hexdigest() # remove all not allowed characters allowed_regex = "[^abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.:-]" return re.sub(allowed_regex, '', url)