All of lore.kernel.org
 help / color / mirror / Atom feed
From: "Fredrik Gustafsson" <fredrik.gustafsson@axis.com>
To: <openembedded-core@lists.openembedded.org>
Cc: <tools-cfpbuild-internal@axis.com>,
	Fredrik Gustafsson <fredrigu@axis.com>
Subject: [PATCH v4 12/12] Move deb package manager to its own dir
Date: Fri, 3 Jul 2020 13:44:02 +0200	[thread overview]
Message-ID: <20200703114402.8850-13-fredrigu@axis.com> (raw)
In-Reply-To: <20200703114402.8850-1-fredrigu@axis.com>

This is a part of a refactor that will split the package manager
code so that it's possible to use other package managers in other
layers.

Signed-off-by: Fredrik Gustafsson <fredrigu@axis.com>
---
 meta/lib/oe/package_manager.py                | 512 ------------------
 .../package_managers/deb/package_manager.py   | 491 +++++++++++++++++
 meta/lib/oe/package_managers/deb/rootfs.py    |   2 +-
 meta/lib/oe/rootfs.py                         |   1 +
 4 files changed, 493 insertions(+), 513 deletions(-)
 create mode 100644 meta/lib/oe/package_managers/deb/package_manager.py

diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
index 65b6f55e9b..538da3a14c 100644
--- a/meta/lib/oe/package_manager.py
+++ b/meta/lib/oe/package_manager.py
@@ -149,79 +149,6 @@ class Indexer(object, metaclass=ABCMeta):
     def write_index(self):
         pass
 
-class DpkgIndexer(Indexer):
-    def _create_configs(self):
-        bb.utils.mkdirhier(self.apt_conf_dir)
-        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial"))
-        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d"))
-        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d"))
-
-        with open(os.path.join(self.apt_conf_dir, "preferences"),
-                "w") as prefs_file:
-            pass
-        with open(os.path.join(self.apt_conf_dir, "sources.list"),
-                "w+") as sources_file:
-            pass
-
-        with open(self.apt_conf_file, "w") as apt_conf:
-            with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
-                "apt", "apt.conf.sample")) as apt_conf_sample:
-                for line in apt_conf_sample.read().split("\n"):
-                    line = re.sub(r"#ROOTFS#", "/dev/null", line)
-                    line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
-                    apt_conf.write(line + "\n")
-
-    def write_index(self):
-        self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"),
-                "apt-ftparchive")
-        self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
-        self._create_configs()
-
-        os.environ['APT_CONFIG'] = self.apt_conf_file
-
-        pkg_archs = self.d.getVar('PACKAGE_ARCHS')
-        if pkg_archs is not None:
-            arch_list = pkg_archs.split()
-        sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS')
-        if sdk_pkg_archs is not None:
-            for a in sdk_pkg_archs.split():
-                if a not in pkg_archs:
-                    arch_list.append(a)
-
-        all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
-        arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
-
-        apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
-        gzip = bb.utils.which(os.getenv('PATH'), "gzip")
-
-        index_cmds = []
-        deb_dirs_found = False
-        for arch in arch_list:
-            arch_dir = os.path.join(self.deploy_dir, arch)
-            if not os.path.isdir(arch_dir):
-                continue
-
-            cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
-
-            cmd += "%s -fcn Packages > Packages.gz;" % gzip
-
-            with open(os.path.join(arch_dir, "Release"), "w+") as release:
-                release.write("Label: %s\n" % arch)
-
-            cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
-
-            index_cmds.append(cmd)
-
-            deb_dirs_found = True
-
-        if not deb_dirs_found:
-            bb.note("There are no packages in %s" % self.deploy_dir)
-            return
-
-        oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
-        if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
-            raise NotImplementedError('Package feed signing not implementd for dpkg')
-
 class PkgsList(object, metaclass=ABCMeta):
     def __init__(self, d, rootfs_dir):
         self.d = d
@@ -231,51 +158,6 @@ class PkgsList(object, metaclass=ABCMeta):
     def list_pkgs(self):
         pass
 
-class OpkgPkgsList(PkgsList):
-    def __init__(self, d, rootfs_dir, config_file):
-        super(OpkgPkgsList, self).__init__(d, rootfs_dir)
-
-        self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
-        self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
-        self.opkg_args += self.d.getVar("OPKG_ARGS")
-
-    def list_pkgs(self, format=None):
-        cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args)
-
-        # opkg returns success even when it printed some
-        # "Collected errors:" report to stderr. Mixing stderr into
-        # stdout then leads to random failures later on when
-        # parsing the output. To avoid this we need to collect both
-        # output streams separately and check for empty stderr.
-        p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
-        cmd_output, cmd_stderr = p.communicate()
-        cmd_output = cmd_output.decode("utf-8")
-        cmd_stderr = cmd_stderr.decode("utf-8")
-        if p.returncode or cmd_stderr:
-            bb.fatal("Cannot get the installed packages list. Command '%s' "
-                     "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr))
-
-        return opkg_query(cmd_output)
-
-
-class DpkgPkgsList(PkgsList):
-
-    def list_pkgs(self):
-        cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
-               "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
-               "-W"]
-
-        cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n")
-
-        try:
-            cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8")
-        except subprocess.CalledProcessError as e:
-            bb.fatal("Cannot get the installed packages list. Command '%s' "
-                     "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
-
-        return opkg_query(cmd_output)
-
-
 class PackageManager(object, metaclass=ABCMeta):
     """
     This is an abstract class. Do not instantiate this directly.
@@ -641,400 +523,6 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
                     else:
                         raise
 
-class OpkgDpkgPM(PackageManager):
-    def __init__(self, d, target_rootfs):
-        """
-        This is an abstract class. Do not instantiate this directly.
-        """
-        super(OpkgDpkgPM, self).__init__(d, target_rootfs)
-
-    def package_info(self, pkg, cmd):
-        """
-        Returns a dictionary with the package info.
-
-        This method extracts the common parts for Opkg and Dpkg
-        """
-
-        try:
-            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
-        except subprocess.CalledProcessError as e:
-            bb.fatal("Unable to list available packages. Command '%s' "
-                     "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
-        return opkg_query(output)
-
-    def extract(self, pkg, pkg_info):
-        """
-        Returns the path to a tmpdir where resides the contents of a package.
-
-        Deleting the tmpdir is responsability of the caller.
-
-        This method extracts the common parts for Opkg and Dpkg
-        """
-
-        ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
-        tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
-        pkg_path = pkg_info[pkg]["filepath"]
-
-        if not os.path.isfile(pkg_path):
-            bb.fatal("Unable to extract package for '%s'."
-                     "File %s doesn't exists" % (pkg, pkg_path))
-
-        tmp_dir = tempfile.mkdtemp()
-        current_dir = os.getcwd()
-        os.chdir(tmp_dir)
-        data_tar = 'data.tar.xz'
-
-        try:
-            cmd = [ar_cmd, 'x', pkg_path]
-            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
-            cmd = [tar_cmd, 'xf', data_tar]
-            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError as e:
-            bb.utils.remove(tmp_dir, recurse=True)
-            bb.fatal("Unable to extract %s package. Command '%s' "
-                     "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
-        except OSError as e:
-            bb.utils.remove(tmp_dir, recurse=True)
-            bb.fatal("Unable to extract %s package. Command '%s' "
-                     "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
-
-        bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
-        bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
-        bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
-        os.chdir(current_dir)
-
-        return tmp_dir
-
-    def _handle_intercept_failure(self, registered_pkgs):
-        self.mark_packages("unpacked", registered_pkgs.split())
-
-class DpkgPM(OpkgDpkgPM):
-    def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
-        super(DpkgPM, self).__init__(d, target_rootfs)
-        self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), deb_repo_workdir)
-
-        create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_DEB"), "package_write_deb", filterbydependencies)
-
-        if apt_conf_dir is None:
-            self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
-        else:
-            self.apt_conf_dir = apt_conf_dir
-        self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
-        self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
-        self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache")
-
-        self.apt_args = d.getVar("APT_ARGS")
-
-        self.all_arch_list = archs.split()
-        all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
-        self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
-
-        self._create_configs(archs, base_archs)
-
-        self.indexer = DpkgIndexer(self.d, self.deploy_dir)
-
-    def mark_packages(self, status_tag, packages=None):
-        """
-        This function will change a package's status in /var/lib/dpkg/status file.
-        If 'packages' is None then the new_status will be applied to all
-        packages
-        """
-        status_file = self.target_rootfs + "/var/lib/dpkg/status"
-
-        with open(status_file, "r") as sf:
-            with open(status_file + ".tmp", "w+") as tmp_sf:
-                if packages is None:
-                    tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
-                                        r"Package: \1\n\2Status: \3%s" % status_tag,
-                                        sf.read()))
-                else:
-                    if type(packages).__name__ != "list":
-                        raise TypeError("'packages' should be a list object")
-
-                    status = sf.read()
-                    for pkg in packages:
-                        status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
-                                        r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
-                                        status)
-
-                    tmp_sf.write(status)
-
-        os.rename(status_file + ".tmp", status_file)
-
-    def run_pre_post_installs(self, package_name=None):
-        """
-        Run the pre/post installs for package "package_name". If package_name is
-        None, then run all pre/post install scriptlets.
-        """
-        info_dir = self.target_rootfs + "/var/lib/dpkg/info"
-        ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"])
-        control_scripts = [
-                ControlScript(".preinst", "Preinstall", "install"),
-                ControlScript(".postinst", "Postinstall", "configure")]
-        status_file = self.target_rootfs + "/var/lib/dpkg/status"
-        installed_pkgs = []
-
-        with open(status_file, "r") as status:
-            for line in status.read().split('\n'):
-                m = re.match(r"^Package: (.*)", line)
-                if m is not None:
-                    installed_pkgs.append(m.group(1))
-
-        if package_name is not None and not package_name in installed_pkgs:
-            return
-
-        os.environ['D'] = self.target_rootfs
-        os.environ['OFFLINE_ROOT'] = self.target_rootfs
-        os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
-        os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
-        os.environ['INTERCEPT_DIR'] = self.intercepts_dir
-        os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
-
-        for pkg_name in installed_pkgs:
-            for control_script in control_scripts:
-                p_full = os.path.join(info_dir, pkg_name + control_script.suffix)
-                if os.path.exists(p_full):
-                    try:
-                        bb.note("Executing %s for package: %s ..." %
-                                 (control_script.name.lower(), pkg_name))
-                        output = subprocess.check_output([p_full, control_script.argument],
-                                stderr=subprocess.STDOUT).decode("utf-8")
-                        bb.note(output)
-                    except subprocess.CalledProcessError as e:
-                        bb.warn("%s for package %s failed with %d:\n%s" %
-                                (control_script.name, pkg_name, e.returncode,
-                                    e.output.decode("utf-8")))
-                        failed_postinsts_abort([pkg_name], self.d.expand("${T}/log.do_${BB_CURRENTTASK}"))
-
-    def update(self):
-        os.environ['APT_CONFIG'] = self.apt_conf_file
-
-        self.deploy_dir_lock()
-
-        cmd = "%s update" % self.apt_get_cmd
-
-        try:
-            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError as e:
-            bb.fatal("Unable to update the package index files. Command '%s' "
-                     "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
-
-        self.deploy_dir_unlock()
-
-    def install(self, pkgs, attempt_only=False):
-        if attempt_only and len(pkgs) == 0:
-            return
-
-        os.environ['APT_CONFIG'] = self.apt_conf_file
-
-        cmd = "%s %s install --force-yes --allow-unauthenticated --no-remove %s" % \
-              (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
-
-        try:
-            bb.note("Installing the following packages: %s" % ' '.join(pkgs))
-            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError as e:
-            (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. "
-                                              "Command '%s' returned %d:\n%s" %
-                                              (cmd, e.returncode, e.output.decode("utf-8")))
-
-        # rename *.dpkg-new files/dirs
-        for root, dirs, files in os.walk(self.target_rootfs):
-            for dir in dirs:
-                new_dir = re.sub(r"\.dpkg-new", "", dir)
-                if dir != new_dir:
-                    os.rename(os.path.join(root, dir),
-                              os.path.join(root, new_dir))
-
-            for file in files:
-                new_file = re.sub(r"\.dpkg-new", "", file)
-                if file != new_file:
-                    os.rename(os.path.join(root, file),
-                              os.path.join(root, new_file))
-
-
-    def remove(self, pkgs, with_dependencies=True):
-        if not pkgs:
-            return
-
-        if with_dependencies:
-            os.environ['APT_CONFIG'] = self.apt_conf_file
-            cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs))
-        else:
-            cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
-                  " -P --force-depends %s" % \
-                  (bb.utils.which(os.getenv('PATH'), "dpkg"),
-                   self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
-
-        try:
-            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError as e:
-            bb.fatal("Unable to remove packages. Command '%s' "
-                     "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
-
-    def write_index(self):
-        self.deploy_dir_lock()
-
-        result = self.indexer.write_index()
-
-        self.deploy_dir_unlock()
-
-        if result is not None:
-            bb.fatal(result)
-
-    def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
-        if feed_uris == "":
-            return
-
-        sources_conf = os.path.join("%s/etc/apt/sources.list"
-                                    % self.target_rootfs)
-        arch_list = []
-
-        if feed_archs is None:
-            for arch in self.all_arch_list:
-                if not os.path.exists(os.path.join(self.deploy_dir, arch)):
-                    continue
-                arch_list.append(arch)
-        else:
-            arch_list = feed_archs.split()
-
-        feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
-
-        with open(sources_conf, "w+") as sources_file:
-            for uri in feed_uris:
-                if arch_list:
-                    for arch in arch_list:
-                        bb.note('Adding dpkg channel at (%s)' % uri)
-                        sources_file.write("deb %s/%s ./\n" %
-                                           (uri, arch))
-                else:
-                    bb.note('Adding dpkg channel at (%s)' % uri)
-                    sources_file.write("deb %s ./\n" % uri)
-
-    def _create_configs(self, archs, base_archs):
-        base_archs = re.sub(r"_", r"-", base_archs)
-
-        if os.path.exists(self.apt_conf_dir):
-            bb.utils.remove(self.apt_conf_dir, True)
-
-        bb.utils.mkdirhier(self.apt_conf_dir)
-        bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
-        bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
-        bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/")
-
-        arch_list = []
-        for arch in self.all_arch_list:
-            if not os.path.exists(os.path.join(self.deploy_dir, arch)):
-                continue
-            arch_list.append(arch)
-
-        with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
-            priority = 801
-            for arch in arch_list:
-                prefs_file.write(
-                    "Package: *\n"
-                    "Pin: release l=%s\n"
-                    "Pin-Priority: %d\n\n" % (arch, priority))
-
-                priority += 5
-
-            pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or ""
-            for pkg in pkg_exclude.split():
-                prefs_file.write(
-                    "Package: %s\n"
-                    "Pin: release *\n"
-                    "Pin-Priority: -1\n\n" % pkg)
-
-        arch_list.reverse()
-
-        with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
-            for arch in arch_list:
-                sources_file.write("deb file:%s/ ./\n" %
-                                   os.path.join(self.deploy_dir, arch))
-
-        base_arch_list = base_archs.split()
-        multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
-        for variant in multilib_variants.split():
-            localdata = bb.data.createCopy(self.d)
-            variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False)
-            orig_arch = localdata.getVar("DPKG_ARCH")
-            localdata.setVar("DEFAULTTUNE", variant_tune)
-            variant_arch = localdata.getVar("DPKG_ARCH")
-            if variant_arch not in base_arch_list:
-                base_arch_list.append(variant_arch)
-
-        with open(self.apt_conf_file, "w+") as apt_conf:
-            with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
-                for line in apt_conf_sample.read().split("\n"):
-                    match_arch = re.match(r"  Architecture \".*\";$", line)
-                    architectures = ""
-                    if match_arch:
-                        for base_arch in base_arch_list:
-                            architectures += "\"%s\";" % base_arch
-                        apt_conf.write("  Architectures {%s};\n" % architectures);
-                        apt_conf.write("  Architecture \"%s\";\n" % base_archs)
-                    else:
-                        line = re.sub(r"#ROOTFS#", self.target_rootfs, line)
-                        line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
-                        apt_conf.write(line + "\n")
-
-        target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
-        bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
-
-        bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
-
-        if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
-            open(os.path.join(target_dpkg_dir, "status"), "w+").close()
-        if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
-            open(os.path.join(target_dpkg_dir, "available"), "w+").close()
-
-    def remove_packaging_data(self):
-        bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True)
-        bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
-
-    def fix_broken_dependencies(self):
-        os.environ['APT_CONFIG'] = self.apt_conf_file
-
-        cmd = "%s %s --allow-unauthenticated -f install" % (self.apt_get_cmd, self.apt_args)
-
-        try:
-            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
-        except subprocess.CalledProcessError as e:
-            bb.fatal("Cannot fix broken dependencies. Command '%s' "
-                     "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
-
-    def list_installed(self):
-        return DpkgPkgsList(self.d, self.target_rootfs).list_pkgs()
-
-    def package_info(self, pkg):
-        """
-        Returns a dictionary with the package info.
-        """
-        cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
-        pkg_info = super(DpkgPM, self).package_info(pkg, cmd)
-
-        pkg_arch = pkg_info[pkg]["pkgarch"]
-        pkg_filename = pkg_info[pkg]["filename"]
-        pkg_info[pkg]["filepath"] = \
-                os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
-
-        return pkg_info
-
-    def extract(self, pkg):
-        """
-        Returns the path to a tmpdir where resides the contents of a package.
-
-        Deleting the tmpdir is responsability of the caller.
-        """
-        pkg_info = self.package_info(pkg)
-        if not pkg_info:
-            bb.fatal("Unable to get information for package '%s' while "
-                     "trying to extract the package."  % pkg)
-
-        tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
-        bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
-
-        return tmp_dir
 
 def generate_index_files(d):
     classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
diff --git a/meta/lib/oe/package_managers/deb/package_manager.py b/meta/lib/oe/package_managers/deb/package_manager.py
new file mode 100644
index 0000000000..6e5cb54f4d
--- /dev/null
+++ b/meta/lib/oe/package_managers/deb/package_manager.py
@@ -0,0 +1,491 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from oe.package_manager import *
+
+class DpkgIndexer(Indexer):
+    def _create_configs(self):
+        bb.utils.mkdirhier(self.apt_conf_dir)
+        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial"))
+        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d"))
+        bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d"))
+
+        with open(os.path.join(self.apt_conf_dir, "preferences"),
+                "w") as prefs_file:
+            pass
+        with open(os.path.join(self.apt_conf_dir, "sources.list"),
+                "w+") as sources_file:
+            pass
+
+        with open(self.apt_conf_file, "w") as apt_conf:
+            with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
+                "apt", "apt.conf.sample")) as apt_conf_sample:
+                for line in apt_conf_sample.read().split("\n"):
+                    line = re.sub(r"#ROOTFS#", "/dev/null", line)
+                    line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
+                    apt_conf.write(line + "\n")
+
+    def write_index(self):
+        self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"),
+                "apt-ftparchive")
+        self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
+        self._create_configs()
+
+        os.environ['APT_CONFIG'] = self.apt_conf_file
+
+        pkg_archs = self.d.getVar('PACKAGE_ARCHS')
+        if pkg_archs is not None:
+            arch_list = pkg_archs.split()
+        sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS')
+        if sdk_pkg_archs is not None:
+            for a in sdk_pkg_archs.split():
+                if a not in pkg_archs:
+                    arch_list.append(a)
+
+        all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
+        arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
+
+        apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
+        gzip = bb.utils.which(os.getenv('PATH'), "gzip")
+
+        index_cmds = []
+        deb_dirs_found = False
+        for arch in arch_list:
+            arch_dir = os.path.join(self.deploy_dir, arch)
+            if not os.path.isdir(arch_dir):
+                continue
+
+            cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
+
+            cmd += "%s -fcn Packages > Packages.gz;" % gzip
+
+            with open(os.path.join(arch_dir, "Release"), "w+") as release:
+                release.write("Label: %s\n" % arch)
+
+            cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
+
+            index_cmds.append(cmd)
+
+            deb_dirs_found = True
+
+        if not deb_dirs_found:
+            bb.note("There are no packages in %s" % self.deploy_dir)
+            return
+
+        oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
+        if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
+            raise NotImplementedError('Package feed signing not implementd for dpkg')
+
+class DpkgPkgsList(PkgsList):
+
+    def list_pkgs(self):
+        cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
+               "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
+               "-W"]
+
+        cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n")
+
+        try:
+            cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8")
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Cannot get the installed packages list. Command '%s' "
+                     "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
+
+        return opkg_query(cmd_output)
+
+class OpkgDpkgPM(PackageManager):
+    def __init__(self, d, target_rootfs):
+        """
+        This is an abstract class. Do not instantiate this directly.
+        """
+        super(OpkgDpkgPM, self).__init__(d, target_rootfs)
+
+    def package_info(self, pkg, cmd):
+        """
+        Returns a dictionary with the package info.
+
+        This method extracts the common parts for Opkg and Dpkg
+        """
+
+        try:
+            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Unable to list available packages. Command '%s' "
+                     "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
+        return opkg_query(output)
+
+    def extract(self, pkg, pkg_info):
+        """
+        Returns the path to a tmpdir where resides the contents of a package.
+
+        Deleting the tmpdir is responsability of the caller.
+
+        This method extracts the common parts for Opkg and Dpkg
+        """
+
+        ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
+        tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
+        pkg_path = pkg_info[pkg]["filepath"]
+
+        if not os.path.isfile(pkg_path):
+            bb.fatal("Unable to extract package for '%s'."
+                     "File %s doesn't exists" % (pkg, pkg_path))
+
+        tmp_dir = tempfile.mkdtemp()
+        current_dir = os.getcwd()
+        os.chdir(tmp_dir)
+        data_tar = 'data.tar.xz'
+
+        try:
+            cmd = [ar_cmd, 'x', pkg_path]
+            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+            cmd = [tar_cmd, 'xf', data_tar]
+            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            bb.utils.remove(tmp_dir, recurse=True)
+            bb.fatal("Unable to extract %s package. Command '%s' "
+                     "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
+        except OSError as e:
+            bb.utils.remove(tmp_dir, recurse=True)
+            bb.fatal("Unable to extract %s package. Command '%s' "
+                     "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
+
+        bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
+        bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
+        bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
+        os.chdir(current_dir)
+
+        return tmp_dir
+
+    def _handle_intercept_failure(self, registered_pkgs):
+        self.mark_packages("unpacked", registered_pkgs.split())
+
+class DpkgPM(OpkgDpkgPM):
+    def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
+        super(DpkgPM, self).__init__(d, target_rootfs)
+        self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), deb_repo_workdir)
+
+        create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_DEB"), "package_write_deb", filterbydependencies)
+
+        if apt_conf_dir is None:
+            self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
+        else:
+            self.apt_conf_dir = apt_conf_dir
+        self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
+        self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
+        self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache")
+
+        self.apt_args = d.getVar("APT_ARGS")
+
+        self.all_arch_list = archs.split()
+        all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
+        self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
+
+        self._create_configs(archs, base_archs)
+
+        self.indexer = DpkgIndexer(self.d, self.deploy_dir)
+
+    def mark_packages(self, status_tag, packages=None):
+        """
+        This function will change a package's status in /var/lib/dpkg/status file.
+        If 'packages' is None then the new_status will be applied to all
+        packages
+        """
+        status_file = self.target_rootfs + "/var/lib/dpkg/status"
+
+        with open(status_file, "r") as sf:
+            with open(status_file + ".tmp", "w+") as tmp_sf:
+                if packages is None:
+                    tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
+                                        r"Package: \1\n\2Status: \3%s" % status_tag,
+                                        sf.read()))
+                else:
+                    if type(packages).__name__ != "list":
+                        raise TypeError("'packages' should be a list object")
+
+                    status = sf.read()
+                    for pkg in packages:
+                        status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
+                                        r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
+                                        status)
+
+                    tmp_sf.write(status)
+
+        os.rename(status_file + ".tmp", status_file)
+
+    def run_pre_post_installs(self, package_name=None):
+        """
+        Run the pre/post installs for package "package_name". If package_name is
+        None, then run all pre/post install scriptlets.
+        """
+        info_dir = self.target_rootfs + "/var/lib/dpkg/info"
+        ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"])
+        control_scripts = [
+                ControlScript(".preinst", "Preinstall", "install"),
+                ControlScript(".postinst", "Postinstall", "configure")]
+        status_file = self.target_rootfs + "/var/lib/dpkg/status"
+        installed_pkgs = []
+
+        with open(status_file, "r") as status:
+            for line in status.read().split('\n'):
+                m = re.match(r"^Package: (.*)", line)
+                if m is not None:
+                    installed_pkgs.append(m.group(1))
+
+        if package_name is not None and not package_name in installed_pkgs:
+            return
+
+        os.environ['D'] = self.target_rootfs
+        os.environ['OFFLINE_ROOT'] = self.target_rootfs
+        os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
+        os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
+        os.environ['INTERCEPT_DIR'] = self.intercepts_dir
+        os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
+
+        for pkg_name in installed_pkgs:
+            for control_script in control_scripts:
+                p_full = os.path.join(info_dir, pkg_name + control_script.suffix)
+                if os.path.exists(p_full):
+                    try:
+                        bb.note("Executing %s for package: %s ..." %
+                                 (control_script.name.lower(), pkg_name))
+                        output = subprocess.check_output([p_full, control_script.argument],
+                                stderr=subprocess.STDOUT).decode("utf-8")
+                        bb.note(output)
+                    except subprocess.CalledProcessError as e:
+                        bb.warn("%s for package %s failed with %d:\n%s" %
+                                (control_script.name, pkg_name, e.returncode,
+                                    e.output.decode("utf-8")))
+                        failed_postinsts_abort([pkg_name], self.d.expand("${T}/log.do_${BB_CURRENTTASK}"))
+
+    def update(self):
+        os.environ['APT_CONFIG'] = self.apt_conf_file
+
+        self.deploy_dir_lock()
+
+        cmd = "%s update" % self.apt_get_cmd
+
+        try:
+            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Unable to update the package index files. Command '%s' "
+                     "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
+
+        self.deploy_dir_unlock()
+
+    def install(self, pkgs, attempt_only=False):
+        if attempt_only and len(pkgs) == 0:
+            return
+
+        os.environ['APT_CONFIG'] = self.apt_conf_file
+
+        cmd = "%s %s install --force-yes --allow-unauthenticated --no-remove %s" % \
+              (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
+
+        try:
+            bb.note("Installing the following packages: %s" % ' '.join(pkgs))
+            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. "
+                                              "Command '%s' returned %d:\n%s" %
+                                              (cmd, e.returncode, e.output.decode("utf-8")))
+
+        # rename *.dpkg-new files/dirs
+        for root, dirs, files in os.walk(self.target_rootfs):
+            for dir in dirs:
+                new_dir = re.sub(r"\.dpkg-new", "", dir)
+                if dir != new_dir:
+                    os.rename(os.path.join(root, dir),
+                              os.path.join(root, new_dir))
+
+            for file in files:
+                new_file = re.sub(r"\.dpkg-new", "", file)
+                if file != new_file:
+                    os.rename(os.path.join(root, file),
+                              os.path.join(root, new_file))
+
+
+    def remove(self, pkgs, with_dependencies=True):
+        if not pkgs:
+            return
+
+        if with_dependencies:
+            os.environ['APT_CONFIG'] = self.apt_conf_file
+            cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs))
+        else:
+            cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
+                  " -P --force-depends %s" % \
+                  (bb.utils.which(os.getenv('PATH'), "dpkg"),
+                   self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
+
+        try:
+            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Unable to remove packages. Command '%s' "
+                     "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
+
+    def write_index(self):
+        self.deploy_dir_lock()
+
+        result = self.indexer.write_index()
+
+        self.deploy_dir_unlock()
+
+        if result is not None:
+            bb.fatal(result)
+
+    def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
+        if feed_uris == "":
+            return
+
+        sources_conf = os.path.join("%s/etc/apt/sources.list"
+                                    % self.target_rootfs)
+        arch_list = []
+
+        if feed_archs is None:
+            for arch in self.all_arch_list:
+                if not os.path.exists(os.path.join(self.deploy_dir, arch)):
+                    continue
+                arch_list.append(arch)
+        else:
+            arch_list = feed_archs.split()
+
+        feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
+
+        with open(sources_conf, "w+") as sources_file:
+            for uri in feed_uris:
+                if arch_list:
+                    for arch in arch_list:
+                        bb.note('Adding dpkg channel at (%s)' % uri)
+                        sources_file.write("deb %s/%s ./\n" %
+                                           (uri, arch))
+                else:
+                    bb.note('Adding dpkg channel at (%s)' % uri)
+                    sources_file.write("deb %s ./\n" % uri)
+
+    def _create_configs(self, archs, base_archs):
+        base_archs = re.sub(r"_", r"-", base_archs)
+
+        if os.path.exists(self.apt_conf_dir):
+            bb.utils.remove(self.apt_conf_dir, True)
+
+        bb.utils.mkdirhier(self.apt_conf_dir)
+        bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
+        bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
+        bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/")
+
+        arch_list = []
+        for arch in self.all_arch_list:
+            if not os.path.exists(os.path.join(self.deploy_dir, arch)):
+                continue
+            arch_list.append(arch)
+
+        with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
+            priority = 801
+            for arch in arch_list:
+                prefs_file.write(
+                    "Package: *\n"
+                    "Pin: release l=%s\n"
+                    "Pin-Priority: %d\n\n" % (arch, priority))
+
+                priority += 5
+
+            pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or ""
+            for pkg in pkg_exclude.split():
+                prefs_file.write(
+                    "Package: %s\n"
+                    "Pin: release *\n"
+                    "Pin-Priority: -1\n\n" % pkg)
+
+        arch_list.reverse()
+
+        with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
+            for arch in arch_list:
+                sources_file.write("deb file:%s/ ./\n" %
+                                   os.path.join(self.deploy_dir, arch))
+
+        base_arch_list = base_archs.split()
+        multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
+        for variant in multilib_variants.split():
+            localdata = bb.data.createCopy(self.d)
+            variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False)
+            orig_arch = localdata.getVar("DPKG_ARCH")
+            localdata.setVar("DEFAULTTUNE", variant_tune)
+            variant_arch = localdata.getVar("DPKG_ARCH")
+            if variant_arch not in base_arch_list:
+                base_arch_list.append(variant_arch)
+
+        with open(self.apt_conf_file, "w+") as apt_conf:
+            with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
+                for line in apt_conf_sample.read().split("\n"):
+                    match_arch = re.match(r"  Architecture \".*\";$", line)
+                    architectures = ""
+                    if match_arch:
+                        for base_arch in base_arch_list:
+                            architectures += "\"%s\";" % base_arch
+                        apt_conf.write("  Architectures {%s};\n" % architectures);
+                        apt_conf.write("  Architecture \"%s\";\n" % base_archs)
+                    else:
+                        line = re.sub(r"#ROOTFS#", self.target_rootfs, line)
+                        line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
+                        apt_conf.write(line + "\n")
+
+        target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
+        bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
+
+        bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
+
+        if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
+            open(os.path.join(target_dpkg_dir, "status"), "w+").close()
+        if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
+            open(os.path.join(target_dpkg_dir, "available"), "w+").close()
+
+    def remove_packaging_data(self):
+        bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True)
+        bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
+
+    def fix_broken_dependencies(self):
+        os.environ['APT_CONFIG'] = self.apt_conf_file
+
+        cmd = "%s %s --allow-unauthenticated -f install" % (self.apt_get_cmd, self.apt_args)
+
+        try:
+            subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Cannot fix broken dependencies. Command '%s' "
+                     "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
+
+    def list_installed(self):
+        return DpkgPkgsList(self.d, self.target_rootfs).list_pkgs()
+
+    def package_info(self, pkg):
+        """
+        Returns a dictionary with the package info.
+        """
+        cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
+        pkg_info = super(DpkgPM, self).package_info(pkg, cmd)
+
+        pkg_arch = pkg_info[pkg]["pkgarch"]
+        pkg_filename = pkg_info[pkg]["filename"]
+        pkg_info[pkg]["filepath"] = \
+                os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
+
+        return pkg_info
+
+    def extract(self, pkg):
+        """
+        Returns the path to a tmpdir where resides the contents of a package.
+
+        Deleting the tmpdir is responsability of the caller.
+        """
+        pkg_info = self.package_info(pkg)
+        if not pkg_info:
+            bb.fatal("Unable to get information for package '%s' while "
+                     "trying to extract the package."  % pkg)
+
+        tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
+        bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
+
+        return tmp_dir
+
diff --git a/meta/lib/oe/package_managers/deb/rootfs.py b/meta/lib/oe/package_managers/deb/rootfs.py
index 544558ba7d..d2a392abb3 100644
--- a/meta/lib/oe/package_managers/deb/rootfs.py
+++ b/meta/lib/oe/package_managers/deb/rootfs.py
@@ -3,7 +3,6 @@
 #
 
 from oe.rootfs import Rootfs
-from oe.package_manager import DpkgPM
 from oe.manifest import Manifest
 from oe.utils import execute_pre_post_process
 import re
@@ -131,6 +130,7 @@ class DpkgRootfs(DpkgOpkgRootfs):
         bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
         from oe.package_managers.deb.manifest import DpkgManifest
         self.manifest = DpkgManifest(d, manifest_dir)
+        from oe.package_managers.deb.package_manager import DpkgPM
         self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'),
                          d.getVar('PACKAGE_ARCHS'),
                          d.getVar('DPKG_ARCH'))
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index d7c84f4e9c..a14e2b34b5 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -391,6 +391,7 @@ def image_list_installed_packages(d, rootfs_dir=None):
     img_type = d.getVar('IMAGE_PKGTYPE')
     from oe.package_managers.rpm.package_manager import RpmPkgsList
     from oe.package_managers.ipk.package_manager import OpkgPkgsList
+    from oe.package_managers.deb.package_manager import DpkgPkgsList
     if img_type == "rpm":
         return RpmPkgsList(d, rootfs_dir).list_pkgs()
     elif img_type == "ipk":
-- 
2.20.1


  parent reply	other threads:[~2020-07-03 11:44 UTC|newest]

Thread overview: 19+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2020-07-03 11:43 [PATCH v4] Add package managers as a plugin Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 01/12] Move rpm manifest to its own subdir Fredrik Gustafsson
2020-07-09 10:42   ` Fredrik Gustafsson
2020-07-09 16:19     ` Paul Barker
2020-07-14  7:02       ` Fredrik Gustafsson
2020-07-18 15:29         ` Paul Barker
2020-07-22 13:33           ` Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 02/12] Move ipk " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 03/12] Move deb " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 04/12] Move rpm rootfs to its own dir Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 05/12] Move ipk " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 06/12] Move deb " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 07/12] Move rpm sdk " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 08/12] Move ipk " Fredrik Gustafsson
2020-07-03 11:43 ` [PATCH v4 09/12] Move deb " Fredrik Gustafsson
2020-07-03 11:44 ` [PATCH v4 10/12] Move rpm package manager " Fredrik Gustafsson
2020-07-03 11:44 ` [PATCH v4 11/12] Move ipk " Fredrik Gustafsson
2020-07-03 11:44 ` Fredrik Gustafsson [this message]
2020-07-03 12:03 ` ✗ patchtest: failure for "[v4] Move rpm manifest to its ..." and 11 more Patchwork

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=20200703114402.8850-13-fredrigu@axis.com \
    --to=fredrik.gustafsson@axis.com \
    --cc=fredrigu@axis.com \
    --cc=openembedded-core@lists.openembedded.org \
    --cc=tools-cfpbuild-internal@axis.com \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line before the message body.
This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.