From: "Fabian Groffen" <grobian@gentoo.org>
To: gentoo-commits@lists.gentoo.org
Subject: [gentoo-commits] proj/portage:prefix commit in: /
Date: Fri, 27 May 2011 17:41:47 +0000 (UTC) [thread overview]
Message-ID: <abd5adceac4b5f95de66be7516d4f29f24f00d02.grobian@gentoo> (raw)
commit: abd5adceac4b5f95de66be7516d4f29f24f00d02
Author: Fabian Groffen <grobian <AT> gentoo <DOT> org>
AuthorDate: Fri May 27 17:39:37 2011 +0000
Commit: Fabian Groffen <grobian <AT> gentoo <DOT> org>
CommitDate: Fri May 27 17:39:37 2011 +0000
URL: http://git.overlays.gentoo.org/gitweb/?p=proj/portage.git;a=commit;h=abd5adce
Merge remote-tracking branch 'overlays-gentoo-org/master' into prefix
Ported changes to LinkageMapELF to the other LinkageMaps
Conflicts:
bin/etc-update
bin/glsa-check
bin/regenworld
pym/portage/dbapi/vartree.py
bin/ebuild.sh | 7 +
bin/etc-update | 8 +-
bin/glsa-check | 4 +-
bin/regenworld | 6 +-
bin/repoman | 2 +-
cnf/make.conf.sparc.diff | 2 +-
doc/package/ebuild/eapi/4.docbook | 4 +-
make.conf.txt | 719 --------------------
man/ebuild.5 | 4 +-
man/emerge.1 | 15 +-
man/make.conf.5 | 5 +-
pym/_emerge/AsynchronousLock.py | 49 ++-
pym/_emerge/AsynchronousTask.py | 5 +-
pym/_emerge/Binpkg.py | 3 +-
pym/_emerge/BinpkgFetcher.py | 7 +-
pym/_emerge/Blocker.py | 12 +-
pym/_emerge/BlockerDB.py | 9 +-
pym/_emerge/DepPriority.py | 2 +-
pym/_emerge/DepPrioritySatisfiedRange.py | 31 +-
pym/_emerge/EbuildBuild.py | 3 +-
pym/_emerge/EbuildBuildDir.py | 29 +-
pym/_emerge/EbuildMerge.py | 23 +-
pym/_emerge/EbuildPhase.py | 34 +-
pym/_emerge/FakeVartree.py | 21 +-
pym/_emerge/Package.py | 59 ++-
pym/_emerge/PackageUninstall.py | 102 +++-
pym/_emerge/Scheduler.py | 24 +-
pym/_emerge/Task.py | 31 +-
pym/_emerge/actions.py | 30 +-
pym/_emerge/depgraph.py | 617 ++++++++++++++----
pym/_emerge/help.py | 16 +-
pym/_emerge/main.py | 17 +-
pym/_emerge/resolver/backtracking.py | 7 +-
pym/_emerge/resolver/output_helpers.py | 4 +-
pym/_emerge/unmerge.py | 74 ++-
pym/portage/const.py | 2 +-
pym/portage/cvstree.py | 6 +-
pym/portage/dbapi/_MergeProcess.py | 43 +-
pym/portage/dbapi/vartree.py | 315 ++++++---
pym/portage/mail.py | 11 +-
pym/portage/output.py | 4 +-
pym/portage/package/ebuild/doebuild.py | 121 ++--
pym/portage/package/ebuild/getmaskingstatus.py | 7 +-
pym/portage/tests/ebuild/test_config.py | 4 +-
pym/portage/tests/locks/test_asynchronous_lock.py | 95 +++-
pym/portage/tests/resolver/ResolverPlayground.py | 99 +++-
pym/portage/tests/resolver/test_autounmask.py | 51 ++-
.../tests/resolver/test_circular_dependencies.py | 3 +-
pym/portage/tests/resolver/test_depth.py | 8 +-
pym/portage/tests/resolver/test_merge_order.py | 386 +++++++++++
pym/portage/tests/resolver/test_multirepo.py | 3 +
.../tests/resolver/test_old_dep_chain_display.py | 2 +
pym/portage/tests/resolver/test_simple.py | 2 +-
pym/portage/tests/resolver/test_slot_collisions.py | 3 +-
pym/portage/update.py | 4 +-
pym/portage/util/__init__.py | 29 +-
pym/portage/util/_dyn_libs/LinkageMapELF.py | 13 +-
pym/portage/util/_dyn_libs/LinkageMapMachO.py | 13 +-
pym/portage/util/_dyn_libs/LinkageMapPeCoff.py | 11 +-
pym/portage/util/_dyn_libs/LinkageMapXCoff.py | 11 +-
pym/portage/util/digraph.py | 10 +-
pym/portage/util/movefile.py | 5 +-
pym/portage/xml/metadata.py | 4 +-
63 files changed, 1948 insertions(+), 1302 deletions(-)
diff --cc bin/etc-update
index 5fbd345,2369f04..2054389
--- a/bin/etc-update
+++ b/bin/etc-update
@@@ -1,5 -1,5 +1,5 @@@
-#!/bin/bash
+#!@PORTAGE_BASH@
- # Copyright 1999-2007 Gentoo Foundation
+ # Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Author Brandon Low <lostlogic@gentoo.org>
diff --cc bin/glsa-check
index 64209ab,2f2d555..4f50a1f
--- a/bin/glsa-check
+++ b/bin/glsa-check
@@@ -1,5 -1,5 +1,5 @@@
-#!/usr/bin/python
+#!@PREFIX_PORTAGE_PYTHON@
- # Copyright 2008-2009 Gentoo Foundation
+ # Copyright 2008-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
diff --cc bin/regenworld
index e0e9774,6b5af4c..9e0e291
--- a/bin/regenworld
+++ b/bin/regenworld
@@@ -1,5 -1,5 +1,5 @@@
-#!/usr/bin/python
+#!@PREFIX_PORTAGE_PYTHON@
- # Copyright 1999-2010 Gentoo Foundation
+ # Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
diff --cc pym/portage/const.py
index 6057520,e91c009..00a53e4
--- a/pym/portage/const.py
+++ b/pym/portage/const.py
@@@ -132,10 -88,9 +132,10 @@@ EBUILD_PHASES = ("pretend",
SUPPORTED_FEATURES = frozenset([
"assume-digests", "binpkg-logs", "buildpkg", "buildsyspkg", "candy",
"ccache", "chflags", "collision-protect", "compress-build-logs",
- "digest", "distcc", "distlocks", "ebuild-locks", "fakeroot",
+ "digest", "distcc", "distcc-pump", "distlocks", "ebuild-locks", "fakeroot",
"fail-clean", "fixpackages", "force-mirror", "getbinpkg",
"installsources", "keeptemp", "keepwork", "fixlafiles", "lmirror",
+ "macossandbox", "macosprefixsandbox", "macosusersandbox",
"metadata-transfer", "mirror", "multilib-strict", "news",
"noauto", "noclean", "nodoc", "noinfo", "noman",
"nostrip", "notitles", "parallel-fetch", "parallel-install",
diff --cc pym/portage/dbapi/vartree.py
index 581300f,e742358..e0f0856
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@@ -2347,7 -2386,7 +2407,7 @@@ class dblink(object)
def path_to_node(path):
node = path_node_map.get(path)
if node is None:
- node = linkmap._LibGraphNode(path, root)
- node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
++ node = linkmap._LibGraphNode(linkmap._obj_key(path))
alt_path_node = lib_graph.get(node)
if alt_path_node is not None:
node = alt_path_node
@@@ -2512,15 -2552,7 +2573,15 @@@
def path_to_node(path):
node = path_node_map.get(path)
if node is None:
- node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
+ chost = self.settings.get('CHOST')
+ if chost.find('darwin') >= 0:
- node = LinkageMapMachO._LibGraphNode(path, root)
++ node = LinkageMapMachO._LibGraphNode(linkmap._obj_key(path))
+ elif chost.find('interix') >= 0 or chost.find('winnt') >= 0:
- node = LinkageMapPeCoff._LibGraphNode(path, root)
++ node = LinkageMapPeCoff._LibGraphNode(linkmap._obj_key(path))
+ elif chost.find('aix') >= 0:
- node = LinkageMapXCoff._LibGraphNode(path, root)
++ node = LinkageMapXCoff._LibGraphNode(linkmap._obj_key(path))
+ else:
- node = LinkageMap._LibGraphNode(path, root)
++ node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
alt_path_node = lib_graph.get(node)
if alt_path_node is not None:
node = alt_path_node
diff --cc pym/portage/util/_dyn_libs/LinkageMapMachO.py
index cbdf6c2,fef75b6..7ed004a
--- a/pym/portage/util/_dyn_libs/LinkageMapMachO.py
+++ b/pym/portage/util/_dyn_libs/LinkageMapMachO.py
@@@ -59,7 -60,7 +59,7 @@@ class LinkageMapMachO(object)
"""Helper class used as _obj_properties keys for objects."""
- __slots__ = ("__weakref__", "_key")
- __slots__ = ("_key",)
++ __slots__ = ("_key")
def __init__(self, obj, root):
"""
diff --cc pym/portage/util/_dyn_libs/LinkageMapPeCoff.py
index c90947e,0000000..25e8a45
mode 100644,000000..100644
--- a/pym/portage/util/_dyn_libs/LinkageMapPeCoff.py
+++ b/pym/portage/util/_dyn_libs/LinkageMapPeCoff.py
@@@ -1,267 -1,0 +1,274 @@@
+# Copyright 1998-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import errno
+import logging
+import subprocess
+
+import portage
+from portage import _encodings
+from portage import _os_merge
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage.cache.mappings import slot_dict_class
+from portage.exception import CommandNotFound
+from portage.localization import _
+from portage.util import getlibpaths
+from portage.util import grabfile
+from portage.util import normalize_path
+from portage.util import writemsg_level
+from portage.const import EPREFIX
+from portage.util._dyn_libs.LinkageMapELF import LinkageMapELF
+
+class LinkageMapPeCoff(LinkageMapELF):
+
+ """Models dynamic linker dependencies."""
+
+ # NEEDED.PECOFF.1 has effectively the _same_ format as NEEDED.ELF.2,
+ # but we keep up the relation "scanelf" -> "NEEDED.ELF", "readpecoff" ->
+ # "NEEDED.PECOFF", "scanmacho" -> "NEEDED.MACHO", etc. others will follow.
+ _needed_aux_key = "NEEDED.PECOFF.1"
+
+ class _ObjectKey(LinkageMapELF._ObjectKey):
+
+ """Helper class used as _obj_properties keys for objects."""
+
+ def _generate_object_key(self, obj, root):
+ """
+ Generate object key for a given object. This is different from the
+ Linux implementation, since some systems (e.g. interix) don't have
+ "inodes", thus the inode field is always zero, or a random value,
+ making it inappropriate for identifying a file... :)
+
+ @param object: path to a file
+ @type object: string (example: '/usr/bin/bar')
+ @rtype: 2-tuple of types (bool, string)
+ @return:
+ 2-tuple of boolean indicating existance, and absolut path
+ """
+
+ os = _os_merge
+
+ try:
+ _unicode_encode(obj,
+ encoding=_encodings['merge'], errors='strict')
+ except UnicodeEncodeError:
+ # The package appears to have been merged with a
+ # different value of sys.getfilesystemencoding(),
+ # so fall back to utf_8 if appropriate.
+ try:
+ _unicode_encode(obj,
+ encoding=_encodings['fs'], errors='strict')
+ except UnicodeEncodeError:
+ pass
+ else:
+ os = portage.os
+
+ abs_path = os.path.join(root, obj.lstrip(os.sep))
+ try:
+ object_stat = os.stat(abs_path)
+ except OSError:
+ return (False, os.path.realpath(abs_path))
+ # On Interix, the inode field may always be zero, since the
+ # filesystem (NTFS) has no inodes ...
+ return (True, os.path.realpath(abs_path))
+
+ def file_exists(self):
+ """
+ Determine if the file for this key exists on the filesystem.
+
+ @rtype: Boolean
+ @return:
+ 1. True if the file exists.
+ 2. False if the file does not exist or is a broken symlink.
+
+ """
+ return self._key[0]
+
+ class _LibGraphNode(_ObjectKey):
+ __slots__ = ("alt_paths",)
+
- def __init__(self, obj, root):
- LinkageMapPeCoff._ObjectKey.__init__(self, obj, root)
++ def __init__(self, key):
++ """
++ Create a _LibGraphNode from an existing _ObjectKey.
++ This re-uses the _key attribute in order to avoid repeating
++ any previous stat calls, which helps to avoid potential race
++ conditions due to inconsistent stat results when the
++ file system is being modified concurrently.
++ """
++ self._key = key._key
+ self.alt_paths = set()
+
+ def __str__(self):
+ return str(sorted(self.alt_paths))
+
+ def rebuild(self, exclude_pkgs=None, include_file=None,
+ preserve_paths=None):
+ """
+ Raises CommandNotFound if there are preserved libs
+ and the readpecoff binary is not available.
+
+ @param exclude_pkgs: A set of packages that should be excluded from
+ the LinkageMap, since they are being unmerged and their NEEDED
+ entries are therefore irrelevant and would only serve to corrupt
+ the LinkageMap.
+ @type exclude_pkgs: set
+ @param include_file: The path of a file containing NEEDED entries for
+ a package which does not exist in the vardbapi yet because it is
+ currently being merged.
+ @type include_file: String
+ @param preserve_paths: Libraries preserved by a package instance that
+ is currently being merged. They need to be explicitly passed to the
+ LinkageMap, since they are not registered in the
+ PreservedLibsRegistry yet.
+ @type preserve_paths: set
+ """
+
+ os = _os_merge
+ root = self._root
+ root_len = len(root) - 1
+ self._clear_cache()
+ self._defpath.update(getlibpaths(self._root))
+ libs = self._libs
+ obj_properties = self._obj_properties
+
+ lines = []
+
+ # Data from include_file is processed first so that it
+ # overrides any data from previously installed files.
+ if include_file is not None:
+ for line in grabfile(include_file):
+ lines.append((include_file, line))
+
+ aux_keys = [self._needed_aux_key]
+ can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
+ if can_lock:
+ self._dbapi.lock()
+ try:
+ for cpv in self._dbapi.cpv_all():
+ if exclude_pkgs is not None and cpv in exclude_pkgs:
+ continue
+ needed_file = self._dbapi.getpath(cpv,
+ filename=self._needed_aux_key)
+ for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
+ lines.append((needed_file, line))
+ finally:
+ if can_lock:
+ self._dbapi.unlock()
+
+ # have to call readpecoff for preserved libs here as they aren't
+ # registered in NEEDED.PECOFF.1 files
+ plibs = set()
+ if preserve_paths is not None:
+ plibs.update(preserve_paths)
+ if self._dbapi._plib_registry and \
+ self._dbapi._plib_registry.hasEntries():
+ for cpv, items in \
+ self._dbapi._plib_registry.getPreservedLibs().items():
+ if exclude_pkgs is not None and cpv in exclude_pkgs:
+ # These preserved libs will either be unmerged,
+ # rendering them irrelevant, or they will be
+ # preserved in the replacement package and are
+ # already represented via the preserve_paths
+ # parameter.
+ continue
+ plibs.update(items)
+ if plibs:
+ args = ["readpecoff", self._dbapi.settings.get('CHOST')]
+ args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
+ for x in plibs)
+ try:
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ except EnvironmentError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ raise CommandNotFound(args[0])
+ else:
+ for l in proc.stdout:
+ try:
+ l = _unicode_decode(l,
+ encoding=_encodings['content'], errors='strict')
+ except UnicodeDecodeError:
+ l = _unicode_decode(l,
+ encoding=_encodings['content'], errors='replace')
+ writemsg_level(_("\nError decoding characters " \
+ "returned from readpecoff: %s\n\n") % (l,),
+ level=logging.ERROR, noiselevel=-1)
+ l = l[3:].rstrip("\n")
+ if not l:
+ continue
+ fields = l.split(";")
+ if len(fields) < 5:
+ writemsg_level(_("\nWrong number of fields " \
+ "returned from readpecoff: %s\n\n") % (l,),
+ level=logging.ERROR, noiselevel=-1)
+ continue
+ fields[1] = fields[1][root_len:]
+ plibs.discard(fields[1])
+ lines.append(("readpecoff", ";".join(fields)))
+ proc.wait()
+
+ if plibs:
+ # Preserved libraries that did not appear in the scanelf output.
+ # This is known to happen with statically linked libraries.
+ # Generate dummy lines for these, so we can assume that every
+ # preserved library has an entry in self._obj_properties. This
+ # is important in order to prevent findConsumers from raising
+ # an unwanted KeyError.
+ for x in plibs:
+ lines.append(("plibs", ";".join(['', x, '', '', ''])))
+
+ for location, l in lines:
+ l = l.rstrip("\n")
+ if not l:
+ continue
+ fields = l.split(";")
+ if len(fields) < 5:
+ writemsg_level(_("\nWrong number of fields " \
+ "in %s: %s\n\n") % (location, l),
+ level=logging.ERROR, noiselevel=-1)
+ continue
+ arch = fields[0]
+ obj = fields[1]
+ soname = fields[2]
+ path = set([normalize_path(x) \
+ for x in filter(None, fields[3].replace(
+ "${ORIGIN}", os.path.dirname(obj)).replace(
+ "$ORIGIN", os.path.dirname(obj)).split(":"))])
+ needed = [x for x in fields[4].split(",") if x]
+
+ obj_key = self._obj_key(obj)
+ indexed = True
+ myprops = obj_properties.get(obj_key)
+ if myprops is None:
+ indexed = False
+ myprops = (arch, needed, path, soname, set())
+ obj_properties[obj_key] = myprops
+ # All object paths are added into the obj_properties tuple.
+ myprops[4].add(obj)
+
+ # Don't index the same file more that once since only one
+ # set of data can be correct and therefore mixing data
+ # may corrupt the index (include_file overrides previously
+ # installed).
+ if indexed:
+ continue
+
+ arch_map = libs.get(arch)
+ if arch_map is None:
+ arch_map = {}
+ libs[arch] = arch_map
+ if soname:
+ soname_map = arch_map.get(soname)
+ if soname_map is None:
+ soname_map = self._soname_map_class(
+ providers=set(), consumers=set())
+ arch_map[soname] = soname_map
+ soname_map.providers.add(obj_key)
+ for needed_soname in needed:
+ soname_map = arch_map.get(needed_soname)
+ if soname_map is None:
+ soname_map = self._soname_map_class(
+ providers=set(), consumers=set())
+ arch_map[needed_soname] = soname_map
+ soname_map.consumers.add(obj_key)
diff --cc pym/portage/util/_dyn_libs/LinkageMapXCoff.py
index 0e930fe,0000000..782cc54
mode 100644,000000..100644
--- a/pym/portage/util/_dyn_libs/LinkageMapXCoff.py
+++ b/pym/portage/util/_dyn_libs/LinkageMapXCoff.py
@@@ -1,319 -1,0 +1,326 @@@
+# Copyright 1998-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import errno
+import logging
+import subprocess
+
+import portage
+from portage import _encodings
+from portage import _os_merge
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage.cache.mappings import slot_dict_class
+from portage.exception import CommandNotFound
+from portage.localization import _
+from portage.util import getlibpaths
+from portage.util import grabfile
+from portage.util import normalize_path
+from portage.util import writemsg_level
+from portage.const import EPREFIX, BASH_BINARY
+from portage.util._dyn_libs.LinkageMapELF import LinkageMapELF
+
+class LinkageMapXCoff(LinkageMapELF):
+
+ """Models dynamic linker dependencies."""
+
+ _needed_aux_key = "NEEDED.XCOFF.1"
+
+ class _ObjectKey(LinkageMapELF._ObjectKey):
+
+ def __init__(self, obj, root):
+ LinkageMapELF._ObjectKey.__init__(self, obj, root)
+
+ def _generate_object_key(self, obj, root):
+ """
+ Generate object key for a given object.
+
+ @param object: path to a file
+ @type object: string (example: '/usr/bin/bar')
+ @rtype: 2-tuple of types (long, int) if object exists. string if
+ object does not exist.
+ @return:
+ 1. 2-tuple of object's inode and device from a stat call, if object
+ exists.
+ 2. realpath of object if object does not exist.
+
+ """
+
+ os = _os_merge
+
+ try:
+ _unicode_encode(obj,
+ encoding=_encodings['merge'], errors='strict')
+ except UnicodeEncodeError:
+ # The package appears to have been merged with a
+ # different value of sys.getfilesystemencoding(),
+ # so fall back to utf_8 if appropriate.
+ try:
+ _unicode_encode(obj,
+ encoding=_encodings['fs'], errors='strict')
+ except UnicodeEncodeError:
+ pass
+ else:
+ os = portage.os
+
+ abs_path = os.path.join(root, obj.lstrip(os.sep))
+ try:
+ object_stat = os.stat(abs_path)
+ except OSError:
+ # Use the realpath as the key if the file does not exists on the
+ # filesystem.
+ return os.path.realpath(abs_path)
+ # Return a tuple of the device and inode, as well as the basename,
+ # because of hardlinks the device and inode might be identical.
+ return (object_stat.st_dev, object_stat.st_ino, os.path.basename(abs_path.rstrip(os.sep)))
+
+ def file_exists(self):
+ """
+ Determine if the file for this key exists on the filesystem.
+
+ @rtype: Boolean
+ @return:
+ 1. True if the file exists.
+ 2. False if the file does not exist or is a broken symlink.
+
+ """
+ return isinstance(self._key, tuple)
+
+ class _LibGraphNode(_ObjectKey):
+ __slots__ = ("alt_paths",)
+
- def __init__(self, obj, root):
- LinkageMapXCoff._ObjectKey.__init__(self, obj, root)
++ def __init__(self, key):
++ """
++ Create a _LibGraphNode from an existing _ObjectKey.
++ This re-uses the _key attribute in order to avoid repeating
++ any previous stat calls, which helps to avoid potential race
++ conditions due to inconsistent stat results when the
++ file system is being modified concurrently.
++ """
++ self._key = key._key
+ self.alt_paths = set()
+
+ def __str__(self):
+ return str(sorted(self.alt_paths))
+
+ def rebuild(self, exclude_pkgs=None, include_file=None,
+ preserve_paths=None):
+ """
+ Raises CommandNotFound if there are preserved libs
+ and the scanelf binary is not available.
+
+ @param exclude_pkgs: A set of packages that should be excluded from
+ the LinkageMap, since they are being unmerged and their NEEDED
+ entries are therefore irrelevant and would only serve to corrupt
+ the LinkageMap.
+ @type exclude_pkgs: set
+ @param include_file: The path of a file containing NEEDED entries for
+ a package which does not exist in the vardbapi yet because it is
+ currently being merged.
+ @type include_file: String
+ @param preserve_paths: Libraries preserved by a package instance that
+ is currently being merged. They need to be explicitly passed to the
+ LinkageMap, since they are not registered in the
+ PreservedLibsRegistry yet.
+ @type preserve_paths: set
+ """
+
+ os = _os_merge
+ root = self._root
+ root_len = len(root) - 1
+ self._clear_cache()
+ self._defpath.update(getlibpaths(self._root))
+ libs = self._libs
+ obj_properties = self._obj_properties
+
+ lines = []
+
+ # Data from include_file is processed first so that it
+ # overrides any data from previously installed files.
+ if include_file is not None:
+ for line in grabfile(include_file):
+ lines.append((include_file, line))
+
+ aux_keys = [self._needed_aux_key]
+ can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
+ if can_lock:
+ self._dbapi.lock()
+ try:
+ for cpv in self._dbapi.cpv_all():
+ if exclude_pkgs is not None and cpv in exclude_pkgs:
+ continue
+ needed_file = self._dbapi.getpath(cpv,
+ filename=self._needed_aux_key)
+ for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
+ lines.append((needed_file, line))
+ finally:
+ if can_lock:
+ self._dbapi.unlock()
+
+ # have to call scanelf for preserved libs here as they aren't
+ # registered in NEEDED.XCOFF.1 files
+ plibs = set()
+ if preserve_paths is not None:
+ plibs.update(preserve_paths)
+ if self._dbapi._plib_registry and \
+ self._dbapi._plib_registry.hasEntries():
+ for cpv, items in \
+ self._dbapi._plib_registry.getPreservedLibs().items():
+ if exclude_pkgs is not None and cpv in exclude_pkgs:
+ # These preserved libs will either be unmerged,
+ # rendering them irrelevant, or they will be
+ # preserved in the replacement package and are
+ # already represented via the preserve_paths
+ # parameter.
+ continue
+ plibs.update(items)
+ if plibs:
+ for x in plibs:
+ args = [BASH_BINARY, "-c", ':'
+ + '; member="' + x + '"'
+ + '; archive=${member}'
+ + '; if [[ ${member##*/} == .*"["*"]" ]]'
+ + '; then member=${member%/.*}/${member##*/.}'
+ + '; archive=${member%[*}'
+ + '; fi'
+ + '; member=${member#${archive}}'
+ + '; [[ -r ${archive} ]] || chmod a+r "${archive}"'
+ + '; eval $(aixdll-query "${archive}${member}" FILE MEMBER FLAGS FORMAT RUNPATH DEPLIBS)'
+ + '; [[ -n ${member} ]] && needed=${FILE##*/} || needed='
+ + '; for deplib in ${DEPLIBS}'
+ + '; do eval deplib=${deplib}'
+ + '; if [[ ${deplib} != "." && ${deplib} != ".." ]]'
+ + '; then needed="${needed}${needed:+,}${deplib}"'
+ + '; fi'
+ + '; done'
+ + '; [[ -n ${MEMBER} ]] && MEMBER="[${MEMBER}]"'
+ + '; [[ " ${FLAGS} " == *" SHROBJ "* ]] && soname=${FILE##*/}${MEMBER} || soname='
+ + '; echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}}${MEMBER};${soname};${RUNPATH};${needed}"'
+ + '; [[ -z ${member} && -n ${MEMBER} ]] && echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}};${FILE##*/};;"'
+ ]
+ try:
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ except EnvironmentError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ raise CommandNotFound(args[0])
+ else:
+ for l in proc.stdout:
+ try:
+ l = _unicode_decode(l,
+ encoding=_encodings['content'], errors='strict')
+ except UnicodeDecodeError:
+ l = _unicode_decode(l,
+ encoding=_encodings['content'], errors='replace')
+ writemsg_level(_("\nError decoding characters " \
+ "returned from aixdll-query: %s\n\n") % (l,),
+ level=logging.ERROR, noiselevel=-1)
+ l = l.rstrip("\n")
+ if not l:
+ continue
+ fields = l.split(";")
+ if len(fields) < 5:
+ writemsg_level(_("\nWrong number of fields " \
+ "returned from aixdll-query: %s\n\n") % (l,),
+ level=logging.ERROR, noiselevel=-1)
+ continue
+ fields[1] = fields[1][root_len:]
+ plibs.discard(fields[1])
+ lines.append(("aixdll-query", ";".join(fields)))
+ proc.wait()
+
+ if plibs:
+ # Preserved libraries that did not appear in the bash
+ # aixdll-query code output. This is known to happen with
+ # statically linked libraries. Generate dummy lines for
+ # these, so we can assume that every preserved library has
+ # an entry in self._obj_properties. This is important in
+ # order to prevent findConsumers from raising an unwanted
+ # KeyError.
+ for x in plibs:
+ lines.append(("plibs", ";".join(['', x, '', '', ''])))
+
+ for location, l in lines:
+ l = l.rstrip("\n")
+ if not l:
+ continue
+ fields = l.split(";")
+ if len(fields) < 5:
+ writemsg_level(_("\nWrong number of fields " \
+ "in %s: %s\n\n") % (location, l),
+ level=logging.ERROR, noiselevel=-1)
+ continue
+ arch = fields[0]
+
+ def as_contentmember(obj):
+ if obj.endswith("]"):
+ if obj.find("/") >= 0:
+ return obj[:obj.rfind("/")] + "/." + obj[obj.rfind("/")+1:]
+ return "." + obj
+ return obj
+
+ obj = as_contentmember(fields[1])
+ soname = as_contentmember(fields[2])
+ path = set([normalize_path(x) \
+ for x in filter(None, fields[3].replace(
+ "${ORIGIN}", os.path.dirname(obj)).replace(
+ "$ORIGIN", os.path.dirname(obj)).split(":"))])
+ needed = [as_contentmember(x) for x in fields[4].split(",") if x]
+
+ obj_key = self._obj_key(obj)
+ indexed = True
+ myprops = obj_properties.get(obj_key)
+ if myprops is None:
+ indexed = False
+ myprops = (arch, needed, path, soname, set())
+ obj_properties[obj_key] = myprops
+ # All object paths are added into the obj_properties tuple.
+ myprops[4].add(obj)
+
+ # Don't index the same file more that once since only one
+ # set of data can be correct and therefore mixing data
+ # may corrupt the index (include_file overrides previously
+ # installed).
+ if indexed:
+ continue
+
+ arch_map = libs.get(arch)
+ if arch_map is None:
+ arch_map = {}
+ libs[arch] = arch_map
+ if soname:
+ soname_map = arch_map.get(soname)
+ if soname_map is None:
+ soname_map = self._soname_map_class(
+ providers=set(), consumers=set())
+ arch_map[soname] = soname_map
+ soname_map.providers.add(obj_key)
+ for needed_soname in needed:
+ soname_map = arch_map.get(needed_soname)
+ if soname_map is None:
+ soname_map = self._soname_map_class(
+ providers=set(), consumers=set())
+ arch_map[needed_soname] = soname_map
+ soname_map.consumers.add(obj_key)
+
+ def getSoname(self, obj):
+ """
+ Return the soname associated with an object.
+
+ @param obj: absolute path to an object
+ @type obj: string (example: '/usr/bin/bar')
+ @rtype: string
+ @return: soname as a string
+
+ """
+ if not self._libs:
+ self.rebuild()
+ if isinstance(obj, self._ObjectKey):
+ obj_key = obj
+ if obj_key not in self._obj_properties:
+ raise KeyError("%s not in object list" % obj_key)
+ return self._obj_properties[obj_key][3]
+ if obj not in self._obj_key_cache:
+ raise KeyError("%s not in object list" % obj)
+ return self._obj_properties[self._obj_key_cache[obj]][3]
+
next reply other threads:[~2011-05-27 17:41 UTC|newest]
Thread overview: 195+ messages / expand[flat|nested] mbox.gz Atom feed top
2011-05-27 17:41 Fabian Groffen [this message]
-- strict thread matches above, loose matches on Subject: below --
2024-02-25 9:40 [gentoo-commits] proj/portage:prefix commit in: / Fabian Groffen
2024-02-22 7:27 Fabian Groffen
2024-01-18 10:22 Fabian Groffen
2024-01-18 9:36 Fabian Groffen
2023-12-03 10:10 Fabian Groffen
2023-12-03 9:54 Fabian Groffen
2023-12-03 9:54 Fabian Groffen
2023-12-03 9:54 Fabian Groffen
2023-11-24 20:18 Fabian Groffen
2023-11-24 20:06 Fabian Groffen
2023-11-24 20:06 Fabian Groffen
2023-06-22 8:47 Fabian Groffen
2023-06-17 9:04 Fabian Groffen
2023-06-17 8:41 Fabian Groffen
2022-07-28 17:38 Fabian Groffen
2022-07-27 19:20 Fabian Groffen
2022-07-26 19:39 Fabian Groffen
2022-07-25 15:20 Fabian Groffen
2022-07-24 19:27 Fabian Groffen
2022-07-24 14:01 Fabian Groffen
2022-07-24 9:45 Fabian Groffen
2022-01-14 10:40 Fabian Groffen
2022-01-14 10:32 Fabian Groffen
2021-07-06 7:10 Fabian Groffen
2021-04-16 13:37 Fabian Groffen
2021-01-24 9:02 Fabian Groffen
2021-01-04 10:48 Fabian Groffen
2020-12-07 17:28 Fabian Groffen
2020-12-07 16:46 Fabian Groffen
2020-11-23 7:48 Fabian Groffen
2020-11-22 11:15 Fabian Groffen
2020-09-26 11:29 Fabian Groffen
2020-08-02 12:33 Fabian Groffen
2020-06-02 18:55 Fabian Groffen
2020-01-08 19:14 Fabian Groffen
2019-07-01 13:11 Fabian Groffen
2019-05-30 9:20 Fabian Groffen
2019-02-28 12:31 Fabian Groffen
2019-01-11 10:19 Fabian Groffen
2019-01-07 10:22 Fabian Groffen
2018-12-23 11:14 Fabian Groffen
2018-12-12 18:54 Fabian Groffen
2018-08-04 6:56 Fabian Groffen
2018-06-25 8:34 Fabian Groffen
2018-06-17 14:38 Fabian Groffen
2018-06-17 14:38 Fabian Groffen
2018-05-28 15:24 Fabian Groffen
2018-05-25 19:44 Fabian Groffen
2018-05-25 19:44 Fabian Groffen
2018-05-18 19:46 Fabian Groffen
2017-12-12 8:19 Fabian Groffen
2017-10-29 14:51 Fabian Groffen
2017-10-03 7:32 Fabian Groffen
2017-09-22 10:08 Fabian Groffen
2017-08-21 13:27 Fabian Groffen
2017-08-13 7:21 Fabian Groffen
2017-05-23 13:34 Fabian Groffen
2017-03-25 9:12 Fabian Groffen
2017-03-24 19:09 Fabian Groffen
2017-03-24 7:43 Fabian Groffen
2017-03-23 17:46 Fabian Groffen
2017-03-23 17:32 Fabian Groffen
2017-03-23 17:23 Fabian Groffen
2017-03-23 15:38 Fabian Groffen
2017-03-17 8:25 Fabian Groffen
2017-03-02 8:48 Fabian Groffen
2017-03-02 8:18 Fabian Groffen
2017-02-23 14:05 Fabian Groffen
2017-01-27 15:08 Fabian Groffen
2017-01-27 15:08 Fabian Groffen
2016-03-20 19:31 Fabian Groffen
2016-02-21 16:17 Fabian Groffen
2016-02-21 16:17 Fabian Groffen
2016-02-18 19:35 Fabian Groffen
2016-02-18 19:35 Fabian Groffen
2015-06-20 7:12 Fabian Groffen
2015-06-09 18:30 Fabian Groffen
2015-06-09 18:01 Fabian Groffen
2015-06-04 19:47 Fabian Groffen
2015-04-05 9:15 Fabian Groffen
2014-11-12 17:31 Fabian Groffen
2014-10-02 18:48 Fabian Groffen
2014-09-28 17:52 Fabian Groffen
2014-05-06 19:32 Fabian Groffen
2014-05-06 19:18 Fabian Groffen
2014-04-22 19:52 Fabian Groffen
2014-02-06 21:09 Fabian Groffen
2014-01-06 9:47 Fabian Groffen
2013-09-24 17:29 Fabian Groffen
2013-09-20 17:59 Fabian Groffen
2013-09-18 18:34 Fabian Groffen
2013-09-13 18:02 Fabian Groffen
2013-08-10 20:54 Fabian Groffen
2013-07-10 5:31 Fabian Groffen
2013-07-08 19:32 Fabian Groffen
2013-06-29 5:41 Fabian Groffen
2013-06-27 17:20 Fabian Groffen
2013-06-12 9:02 Fabian Groffen
2013-06-09 15:53 Fabian Groffen
2013-05-04 18:55 Fabian Groffen
2013-04-02 16:57 Fabian Groffen
2013-03-31 19:03 Fabian Groffen
2013-03-31 19:00 Fabian Groffen
2013-03-24 8:36 Fabian Groffen
2013-03-23 19:54 Fabian Groffen
2013-02-28 19:29 Fabian Groffen
2013-02-07 20:01 Fabian Groffen
2013-01-27 21:41 Fabian Groffen
2013-01-27 21:41 Fabian Groffen
2013-01-13 10:26 Fabian Groffen
2013-01-10 21:02 Fabian Groffen
2013-01-05 18:14 Fabian Groffen
2012-12-26 14:48 Fabian Groffen
2012-12-02 15:47 Fabian Groffen
2012-12-02 15:36 Fabian Groffen
2012-12-02 15:33 Fabian Groffen
2012-12-02 15:33 Fabian Groffen
2012-12-02 15:33 Fabian Groffen
2012-12-02 13:12 Fabian Groffen
2012-12-02 12:59 Fabian Groffen
2012-11-04 10:48 Fabian Groffen
2012-10-22 17:25 Fabian Groffen
2012-10-02 12:02 Fabian Groffen
2012-09-30 11:22 Fabian Groffen
2012-09-26 18:26 Fabian Groffen
2012-09-12 18:18 Fabian Groffen
2012-09-09 7:40 Fabian Groffen
2012-09-06 18:14 Fabian Groffen
2012-08-27 6:44 Fabian Groffen
2012-08-12 7:50 Fabian Groffen
2012-07-19 16:25 Fabian Groffen
2012-07-06 7:05 Fabian Groffen
2012-04-23 19:23 Fabian Groffen
2012-04-03 18:04 Fabian Groffen
2012-03-31 19:31 Fabian Groffen
2012-03-01 20:32 Fabian Groffen
2012-02-19 9:58 Fabian Groffen
2012-02-09 8:01 Fabian Groffen
2012-01-10 17:45 Fabian Groffen
2011-12-31 16:45 Fabian Groffen
2011-12-26 9:12 Fabian Groffen
2011-12-23 9:51 Fabian Groffen
2011-12-22 9:51 Fabian Groffen
2011-12-19 18:30 Fabian Groffen
2011-12-14 15:25 Fabian Groffen
2011-12-10 11:28 Fabian Groffen
2011-12-09 20:33 Fabian Groffen
2011-12-02 20:31 Fabian Groffen
2011-12-02 19:20 Fabian Groffen
2011-12-02 19:19 Fabian Groffen
2011-12-02 19:18 Fabian Groffen
2011-12-02 18:03 Fabian Groffen
2011-10-21 17:34 Fabian Groffen
2011-10-21 17:34 Fabian Groffen
2011-10-20 20:28 Fabian Groffen
2011-10-20 17:08 Fabian Groffen
2011-10-20 16:38 Fabian Groffen
2011-10-17 18:36 Fabian Groffen
2011-10-16 13:59 Fabian Groffen
2011-10-15 18:27 Fabian Groffen
2011-10-13 6:52 Fabian Groffen
2011-09-23 18:38 Fabian Groffen
2011-09-23 18:23 Fabian Groffen
2011-09-20 18:25 Fabian Groffen
2011-09-14 18:43 Fabian Groffen
2011-09-14 18:38 Fabian Groffen
2011-09-13 17:41 Fabian Groffen
2011-08-31 18:39 Fabian Groffen
2011-08-30 18:45 Fabian Groffen
2011-08-29 19:03 Fabian Groffen
2011-08-25 20:25 Fabian Groffen
2011-08-20 17:50 Fabian Groffen
2011-07-26 17:35 Fabian Groffen
2011-07-17 9:48 Fabian Groffen
2011-07-17 8:12 Fabian Groffen
2011-07-01 17:44 Fabian Groffen
2011-06-14 15:39 Fabian Groffen
2011-06-06 17:12 Fabian Groffen
2011-05-28 8:29 Fabian Groffen
2011-05-14 13:59 Fabian Groffen
2011-05-02 17:41 Fabian Groffen
2011-04-24 12:08 Fabian Groffen
2011-04-15 18:27 Fabian Groffen
2011-04-15 18:27 Fabian Groffen
2011-03-28 16:52 Fabian Groffen
2011-03-23 19:26 Fabian Groffen
2011-03-17 19:08 Fabian Groffen
2011-03-13 14:45 Fabian Groffen
2011-03-09 19:44 Fabian Groffen
2011-02-26 21:15 Fabian Groffen
2011-02-10 18:46 Fabian Groffen
2011-02-10 18:44 Fabian Groffen
2011-02-10 18:20 Fabian Groffen
2011-02-05 12:25 Fabian Groffen
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=abd5adceac4b5f95de66be7516d4f29f24f00d02.grobian@gentoo \
--to=grobian@gentoo.org \
--cc=gentoo-commits@lists.gentoo.org \
--cc=gentoo-dev@lists.gentoo.org \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox