public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
Search results ordered by [date|relevance]  view[summary|nested|Atom feed]
thread overview below | download mbox.gz: |
* [gentoo-commits] portage r11744 - in main/branches/prefix: bin pym/_emerge pym/portage pym/portage/dbapi
@ 2008-10-29 20:02 99% Fabian Groffen (grobian)
  0 siblings, 0 replies; 1+ results
From: Fabian Groffen (grobian) @ 2008-10-29 20:02 UTC (permalink / raw
  To: gentoo-commits

Author: grobian
Date: 2008-10-29 20:02:41 +0000 (Wed, 29 Oct 2008)
New Revision: 11744

Modified:
   main/branches/prefix/bin/repoman
   main/branches/prefix/pym/_emerge/__init__.py
   main/branches/prefix/pym/portage/__init__.py
   main/branches/prefix/pym/portage/dbapi/vartree.py
Log:
   Merged from trunk -r11736:11743

   | 11737   | Fix graph.get() so that it works as intended, returning the  |
   | zmedico | node corresponding to the given key.                         |
   
   | 11738   | Remove manifest1 digest-* autoadd code. Thanks to grobian.   |
   | zmedico |                                                              |
   
   | 11739   | Update the auto-add message to say "Manifest" instead of     |
   | zmedico | "digests". Thanks to grobian.                                |
   
   | 11740   | Bug #238957 - When removing unneeded preserved libs inside   |
   | zmedico | dblink.unmerge(), use a digraph to properly track consumer   |
   |         | relationships between preserved libs. This fixes cases where |
   |         | preserved libs failed to be removed due to being consumed by |
   |         | other preserved libs.                                        |
   
   | 11741   | Fix $ROOT handling inside LinkageMap.findConsumers().        |
   | zmedico |                                                              |
   
   | 11742   | Fix interaction between LinkageMap.rebuild() and the package |
   | zmedico | replacement process in order to avoid problems with stale or |
   |         | unaccounted NEEDED. This solves a LinkageMap corruption      |
   |         | issue which caused findConsumers to return false positive    |
   |         | inside dblink.unmerge().                                     |
   
   | 11743   | Make config.setcpv() store the ebuild metadata inside        |
   | zmedico | self.configdict["pkg"], and reuse this metadata inside       |
   |         | doebuild() in order to avoid redundant portdbapi.aux_get()   |
   |         | calls.                                                       |


Modified: main/branches/prefix/bin/repoman
===================================================================
--- main/branches/prefix/bin/repoman	2008-10-29 17:03:35 UTC (rev 11743)
+++ main/branches/prefix/bin/repoman	2008-10-29 20:02:41 UTC (rev 11744)
@@ -1665,17 +1665,9 @@
 				# It's a manifest... auto add
 				myautoadd+=[myunadded[x]]
 				del myunadded[x]
-			elif len(xs[-1])>=7:
-				if xs[-1][:7]=="digest-":
-					del xs[-2]
-					myeb="/".join(xs[:-1]+[xs[-1][7:]])+".ebuild"
-					if os.path.exists(myeb):
-						# Ebuild exists for digest... So autoadd it.
-						myautoadd+=[myunadded[x]]
-						del myunadded[x]
-		
+
 	if myautoadd:
-		print ">>> Auto-Adding missing digests..."
+		print ">>> Auto-Adding missing Manifest(s)..."
 		if options.pretend:
 			if vcs == "cvs":
 				print "(cvs add "+" ".join(myautoadd)+")"

Modified: main/branches/prefix/pym/_emerge/__init__.py
===================================================================
--- main/branches/prefix/pym/_emerge/__init__.py	2008-10-29 17:03:35 UTC (rev 11743)
+++ main/branches/prefix/pym/_emerge/__init__.py	2008-10-29 20:02:41 UTC (rev 11744)
@@ -10012,6 +10012,7 @@
 		# Since config.setcpv() isn't guaranteed to call config.reset() due to
 		# performance reasons, call it here to make sure all settings from the
 		# previous package get flushed out (such as PORTAGE_LOG_FILE).
+		temp_settings.reload()
 		temp_settings.reset()
 		return temp_settings
 

Modified: main/branches/prefix/pym/portage/__init__.py
===================================================================
--- main/branches/prefix/pym/portage/__init__.py	2008-10-29 17:03:35 UTC (rev 11743)
+++ main/branches/prefix/pym/portage/__init__.py	2008-10-29 20:02:41 UTC (rev 11744)
@@ -354,14 +354,14 @@
 		relationship to the parent, the relationship is left as hard."""
 		
 		if node not in self.nodes:
-			self.nodes[node] = ({}, {})
+			self.nodes[node] = ({}, {}, node)
 			self.order.append(node)
 		
 		if not parent:
 			return
 		
 		if parent not in self.nodes:
-			self.nodes[parent] = ({}, {})
+			self.nodes[parent] = ({}, {}, parent)
 			self.order.append(parent)
 		
 		if parent in self.nodes[node][1]:
@@ -442,7 +442,10 @@
 		return node in self.nodes
 
 	def get(self, key, default=None):
-		return self.nodes.get(key, default)
+		node_data = self.nodes.get(key, self)
+		if node_data is self:
+			return default
+		return node_data[2]
 
 	def all_nodes(self):
 		"""Return a list of all nodes in the graph"""
@@ -504,7 +507,7 @@
 		clone = digraph()
 		clone.nodes = {}
 		for k, v in self.nodes.iteritems():
-			clone.nodes[k] = (v[0].copy(), v[1].copy())
+			clone.nodes[k] = (v[0].copy(), v[1].copy(), v[2])
 		clone.order = self.order[:]
 		return clone
 
@@ -1952,19 +1955,33 @@
 
 		if self.mycpv == mycpv:
 			return
-		ebuild_phase = self.get("EBUILD_PHASE")
 		has_changed = False
 		self.mycpv = mycpv
+		cat, pf = catsplit(mycpv)
 		cp = dep_getkey(mycpv)
 		cpv_slot = self.mycpv
 		pkginternaluse = ""
 		iuse = ""
+		env_configdict = self.configdict["env"]
+		pkg_configdict = self.configdict["pkg"]
+		previous_iuse = pkg_configdict.get("IUSE")
+		for k in ("CATEGORY", "PKGUSE", "PF", "PORTAGE_USE"):
+			env_configdict.pop(k, None)
+		pkg_configdict["CATEGORY"] = cat
+		pkg_configdict["PF"] = pf
 		if mydb:
 			if not hasattr(mydb, "aux_get"):
-				slot = mydb["SLOT"]
-				iuse = mydb["IUSE"]
+				pkg_configdict.update(mydb)
 			else:
-				slot, iuse = mydb.aux_get(self.mycpv, ["SLOT", "IUSE"])
+				aux_keys = [k for k in auxdbkeys \
+					if not k.startswith("UNUSED_")]
+				for k, v in izip(aux_keys, mydb.aux_get(self.mycpv, aux_keys)):
+					pkg_configdict[k] = v
+			for k in pkg_configdict:
+				if k != "USE":
+					env_configdict.pop(k, None)
+			slot = pkg_configdict["SLOT"]
+			iuse = pkg_configdict["IUSE"]
 			if pkg is None:
 				cpv_slot = "%s:%s" % (self.mycpv, slot)
 			else:
@@ -2059,22 +2076,13 @@
 			has_changed = True
 		self.configdict["pkg"]["PKGUSE"] = self.puse[:] # For saving to PUSE file
 		self.configdict["pkg"]["USE"]    = self.puse[:] # this gets appended to USE
-		previous_iuse = self.configdict["pkg"].get("IUSE")
-		self.configdict["pkg"]["IUSE"] = iuse
 
-		# Always set known good values for these variables, since
-		# corruption of these can cause problems:
-		cat, pf = catsplit(self.mycpv)
-		self.configdict["pkg"]["CATEGORY"] = cat
-		self.configdict["pkg"]["PF"] = pf
-
 		if has_changed:
 			self.reset(keeping_pkg=1,use_cache=use_cache)
 
-		# If this is not an ebuild phase and reset() has not been called,
-		# it's safe to return early here if IUSE has not changed.
-		if not (has_changed or ebuild_phase) and \
-			previous_iuse == iuse:
+		# If reset() has not been called, it's safe to return
+		# early if IUSE has not changed.
+		if not has_changed and previous_iuse == iuse:
 			return
 
 		# Filter out USE flags that aren't part of IUSE. This has to
@@ -2092,7 +2100,7 @@
 		self.configdict["pkg"]["PORTAGE_IUSE"] = regex
 
 		ebuild_force_test = self.get("EBUILD_FORCE_TEST") == "1"
-		if ebuild_force_test and ebuild_phase and \
+		if ebuild_force_test and \
 			not hasattr(self, "_ebuild_force_test_msg_shown"):
 				self._ebuild_force_test_msg_shown = True
 				writemsg("Forcing test.\n", noiselevel=-1)
@@ -4707,12 +4715,7 @@
 	# so that the caller can override it.
 	tmpdir = mysettings["PORTAGE_TMPDIR"]
 
-	# This variable is a signal to setcpv where it triggers
-	# filtering of USE for the ebuild environment.
-	mysettings["EBUILD_PHASE"] = mydo
-	mysettings.backup_changes("EBUILD_PHASE")
-
-	if mydo != "depend":
+	if mydo != "depend" and mycpv != mysettings.mycpv:
 		"""For performance reasons, setcpv only triggers reset when it
 		detects a package-specific change in config.  For the ebuild
 		environment, a reset call is forced in order to ensure that the
@@ -4776,18 +4779,17 @@
 		mysettings["PORTAGE_QUIET"] = "1"
 
 	if mydo != "depend":
-		eapi, mysettings["INHERITED"], mysettings["SLOT"], mysettings["RESTRICT"]  = \
-			mydbapi.aux_get(mycpv, ["EAPI", "INHERITED", "SLOT", "RESTRICT"])
+		# Metadata vars such as EAPI and RESTRICT are
+		# set by the above config.setcpv() call.
+		eapi = mysettings["EAPI"]
 		if not eapi_is_supported(eapi):
 			# can't do anything with this.
 			raise portage.exception.UnsupportedAPIException(mycpv, eapi)
-		mysettings.pop("EAPI", None)
-		mysettings.configdict["pkg"]["EAPI"] = eapi
 		try:
 			mysettings["PORTAGE_RESTRICT"] = " ".join(flatten(
 				portage.dep.use_reduce(portage.dep.paren_reduce(
-				mysettings.get("RESTRICT","")),
-				uselist=mysettings.get("USE","").split())))
+				mysettings["RESTRICT"]),
+				uselist=mysettings["PORTAGE_USE"].split())))
 		except portage.exception.InvalidDependString:
 			# RESTRICT is validated again inside doebuild, so let this go
 			mysettings["PORTAGE_RESTRICT"] = ""
@@ -5648,20 +5650,35 @@
 
 		mycpv = "/".join((mysettings["CATEGORY"], mysettings["PF"]))
 
-		# Make sure we get the correct tree in case there are overlays.
-		mytree = os.path.realpath(
-			os.path.dirname(os.path.dirname(mysettings["O"])))
-		useflags = mysettings["PORTAGE_USE"].split()
-		try:
-			alist = mydbapi.getFetchMap(mycpv, useflags=useflags, mytree=mytree)
-			aalist = mydbapi.getFetchMap(mycpv, mytree=mytree)
-		except portage.exception.InvalidDependString, e:
-			writemsg("!!! %s\n" % str(e), noiselevel=-1)
-			writemsg("!!! Invalid SRC_URI for '%s'.\n" % mycpv, noiselevel=-1)
-			del e
-			return 1
-		mysettings["A"] = " ".join(alist)
-		mysettings["AA"] = " ".join(aalist)
+		emerge_skip_distfiles = returnpid
+		# Only try and fetch the files if we are going to need them ...
+		# otherwise, if user has FEATURES=noauto and they run `ebuild clean
+		# unpack compile install`, we will try and fetch 4 times :/
+		need_distfiles = not emerge_skip_distfiles and \
+			(mydo in ("fetch", "unpack") or \
+			mydo not in ("digest", "manifest") and "noauto" not in features)
+		alist = mysettings.configdict["pkg"].get("A")
+		aalist = mysettings.configdict["pkg"].get("AA")
+		if need_distfiles or alist is None or aalist is None:
+			# Make sure we get the correct tree in case there are overlays.
+			mytree = os.path.realpath(
+				os.path.dirname(os.path.dirname(mysettings["O"])))
+			useflags = mysettings["PORTAGE_USE"].split()
+			try:
+				alist = mydbapi.getFetchMap(mycpv, useflags=useflags,
+					mytree=mytree)
+				aalist = mydbapi.getFetchMap(mycpv, mytree=mytree)
+			except portage.exception.InvalidDependString, e:
+				writemsg("!!! %s\n" % str(e), noiselevel=-1)
+				writemsg("!!! Invalid SRC_URI for '%s'.\n" % mycpv,
+					noiselevel=-1)
+				del e
+				return 1
+			mysettings.configdict["pkg"]["A"] = " ".join(alist)
+			mysettings.configdict["pkg"]["AA"] = " ".join(aalist)
+		else:
+			alist = set(alist.split())
+			aalist = set(aalist.split())
 		if ("mirror" in features) or fetchall:
 			fetchme = aalist
 			checkme = aalist
@@ -5674,12 +5691,7 @@
 			# so do not check them again.
 			checkme = []
 
-		# Only try and fetch the files if we are going to need them ...
-		# otherwise, if user has FEATURES=noauto and they run `ebuild clean
-		# unpack compile install`, we will try and fetch 4 times :/
-		need_distfiles = (mydo in ("fetch", "unpack") or \
-			mydo not in ("digest", "manifest") and "noauto" not in features)
-		emerge_skip_distfiles = returnpid
+
 		if not emerge_skip_distfiles and \
 			need_distfiles and not fetch(
 			fetchme, mysettings, listonly=listonly, fetchonly=fetchonly):
@@ -5873,8 +5885,7 @@
 	misc_keys = ["LICENSE", "PROPERTIES", "PROVIDE", "RESTRICT", "SRC_URI"]
 	other_keys = ["SLOT"]
 	all_keys = dep_keys + misc_keys + other_keys
-	metadata = dict(izip(all_keys,
-		mydbapi.aux_get(mysettings.mycpv, all_keys)))
+	metadata = mysettings.configdict["pkg"]
 
 	class FakeTree(object):
 		def __init__(self, mydb):

Modified: main/branches/prefix/pym/portage/dbapi/vartree.py
===================================================================
--- main/branches/prefix/pym/portage/dbapi/vartree.py	2008-10-29 17:03:35 UTC (rev 11743)
+++ main/branches/prefix/pym/portage/dbapi/vartree.py	2008-10-29 20:02:41 UTC (rev 11744)
@@ -23,7 +23,7 @@
 	grabfile, grabdict, normalize_path, new_protect_filename, getlibpaths
 from portage.versions import pkgsplit, catpkgsplit, catsplit, best, pkgcmp
 
-from portage import listdir, dep_expand, flatten, key_expand, \
+from portage import listdir, dep_expand, digraph, flatten, key_expand, \
 	doebuild_environment, doebuild, env_update, prepare_build_dirs, \
 	abssymlink, movefile, _movefile, bsd_chflags, cpv_getkey
 
@@ -208,13 +208,25 @@
 			"""
 			return isinstance(self._key, tuple)
 
-	def rebuild(self, include_file=None):
+	class _LibGraphNode(_ObjectKey):
+		__slots__ = ("alt_paths",)
+
+		def __init__(self, obj, root):
+			LinkageMap._ObjectKey.__init__(self, obj, root)
+			self.alt_paths = set()
+
+		def __str__(self):
+			return str(sorted(self.alt_paths))
+
+	def rebuild(self, exclude_pkgs=None, include_file=None):
 		root = self._root
 		libs = {}
 		obj_key_cache = {}
 		obj_properties = {}
 		lines = []
 		for cpv in self._dbapi.cpv_all():
+			if exclude_pkgs is not None and cpv in exclude_pkgs:
+				continue
 			lines += self._dbapi.aux_get(cpv, ["NEEDED.ELF.2"])[0].split('\n')
 		# Cache NEEDED.* files avoid doing excessive IO for every rebuild.
 		self._dbapi.flush_cache()
@@ -565,7 +577,8 @@
 					raise KeyError("%s (%s) not in object list" % (obj_key, obj))
 
 		# Determine the directory(ies) from the set of objects.
-		objs_dirs = set([os.path.dirname(x) for x in objs])
+		objs_dirs = set(os.path.join(self._root,
+			os.path.dirname(x).lstrip(os.sep)) for x in objs)
 
 		# If there is another version of this lib with the
 		# same soname and the master link points to that
@@ -2349,7 +2362,8 @@
 					writemsg("!!! FAILED prerm: %s\n" % retval, noiselevel=-1)
 
 			self._unmerge_pkgfiles(pkgfiles, others_in_slot)
-			
+			self._clear_contents_cache()
+
 			# Remove the registration of preserved libs for this pkg instance
 			plib_registry = self.vartree.dbapi.plib_registry
 			plib_registry.unregister(self.mycpv, self.settings["SLOT"],
@@ -2369,64 +2383,67 @@
 				if retval != os.EX_OK:
 					writemsg("!!! FAILED postrm: %s\n" % retval, noiselevel=-1)
 
-			# regenerate reverse NEEDED map
-			self.vartree.dbapi.linkmap.rebuild()
+			# Skip this if another package in the same slot has just been
+			# merged on top of this package, since the other package has
+			# already called LinkageMap.rebuild() and passed it's NEEDED file
+			# in as an argument.
+			if not others_in_slot:
+				self.vartree.dbapi.linkmap.rebuild(exclude_pkgs=(self.mycpv,))
 
 			# remove preserved libraries that don't have any consumers left
-			# FIXME: this code is quite ugly and can likely be optimized in several ways
+			# Since preserved libraries can be consumers of other preserved
+			# libraries, use a graph to track consumer relationships.
 			plib_dict = plib_registry.getPreservedLibs()
-			for cpv in plib_dict:
-				plib_dict[cpv].sort()
-				# for the loop below to work correctly, we need all
-				# symlinks to come before the actual files, such that
-				# the recorded symlinks (sonames) will be resolved into
-				# their real target before the object is found not to be
-				# in the reverse NEEDED map
-				def symlink_compare(x, y):
-					x = os.path.join(self.myroot, x.lstrip(os.path.sep))
-					y = os.path.join(self.myroot, y.lstrip(os.path.sep))
-					if os.path.islink(x):
-						if os.path.islink(y):
-							return 0
-						else:
-							return -1
-					elif os.path.islink(y):
-						return 1
+			lib_graph = digraph()
+			preserved_nodes = set()
+			root = self.myroot
+			for plibs in plib_dict.itervalues():
+				for f in plibs:
+					preserved_node = LinkageMap._LibGraphNode(f, root)
+					if not preserved_node.file_exists():
+						continue
+					existing_node = lib_graph.get(preserved_node)
+					if existing_node is not None:
+						preserved_node = existing_node
 					else:
-						return 0
+						lib_graph.add(preserved_node, None)
+					preserved_node.alt_paths.add(f)
+					preserved_nodes.add(preserved_node)
+					for c in self.vartree.dbapi.linkmap.findConsumers(f):
+						consumer_node = LinkageMap._LibGraphNode(c, root)
+						if not consumer_node.file_exists():
+							continue
+						# Note that consumers may also be providers.
+						existing_node = lib_graph.get(consumer_node)
+						if existing_node is not None:
+							consumer_node = existing_node
+						consumer_node.alt_paths.add(c)
+						lib_graph.add(preserved_node, consumer_node)
 
-				plib_dict[cpv].sort(symlink_compare)
-				for f in plib_dict[cpv]:
-					f_abs = os.path.join(self.myroot, f.lstrip(os.path.sep))
-					if not os.path.exists(f_abs):
-						continue
-					unlink_list = []
-					consumers = self.vartree.dbapi.linkmap.findConsumers(f)
-					if not consumers:
-						unlink_list.append(f_abs)
+			while not lib_graph.empty():
+				root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
+				if not root_nodes:
+					break
+				lib_graph.difference_update(root_nodes)
+				unlink_list = set()
+				for node in root_nodes:
+					unlink_list.update(node.alt_paths)
+				unlink_list = sorted(unlink_list)
+				for obj in unlink_list:
+					obj = os.path.join(root, obj.lstrip(os.sep))
+					if os.path.islink(obj):
+						obj_type = "sym"
 					else:
-						keep=False
-						for c in consumers:
-							c = os.path.join(self.myroot,
-								c.lstrip(os.path.sep))
-							if c not in self.getcontents():
-								keep=True
-								break
-						if not keep:
-							unlink_list.append(f_abs)
-					for obj in unlink_list:
-						try:
-							if os.path.islink(obj):
-								obj_type = "sym"
-							else:
-								obj_type = "obj"
-							os.unlink(obj)
-							showMessage("<<< !needed   %s %s\n" % (obj_type, obj))
-						except OSError, e:
-							if e.errno == errno.ENOENT:
-								pass
-							else:
-								raise e
+						obj_type = "obj"
+					try:
+						os.unlink(obj)
+					except OSError, e:
+						if e.errno != errno.ENOENT:
+							raise
+						del e
+					else:
+						showMessage("<<< !needed   %s %s\n" % (obj_type, obj))
+
 			plib_registry.pruneNonExisting()
 						
 		finally:
@@ -3556,6 +3573,10 @@
 				gid=portage_gid, mode=02750, mask=02)
 			writedict(cfgfiledict, conf_mem_file)
 
+		exclude_pkgs = set(dblnk.mycpv for dblnk in others_in_slot)
+		self.vartree.dbapi.linkmap.rebuild(exclude_pkgs=exclude_pkgs,
+			include_file=os.path.join(inforoot, "NEEDED.ELF.2"))
+
 		# These caches are populated during collision-protect and the data
 		# they contain is now invalid. It's very important to invalidate
 		# the contents_inodes cache so that FEATURES=unmerge-orphans




^ permalink raw reply	[relevance 99%]

Results 1-1 of 1 | reverse | options above
-- pct% links below jump to the message on this page, permalinks otherwise --
2008-10-29 20:02 99% [gentoo-commits] portage r11744 - in main/branches/prefix: bin pym/_emerge pym/portage pym/portage/dbapi Fabian Groffen (grobian)

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox