public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/
@ 2014-05-05 19:17 Brian Dolbec
  2014-04-02 20:09 ` [gentoo-commits] proj/catalyst:pending " Brian Dolbec
  0 siblings, 1 reply; 8+ messages in thread
From: Brian Dolbec @ 2014-05-05 19:17 UTC (permalink / raw
  To: gentoo-commits

commit:     e337857ffb575302368d13fc63c5d511aa27c244
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Fri Jan  3 18:42:26 2014 +0000
Commit:     Brian Dolbec <brian.dolbec <AT> gmail <DOT> com>
CommitDate: Wed Apr  2 20:04:23 2014 +0000
URL:        http://git.overlays.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=e337857f

Initial separation and creation of a hash_utils.py module

---
 catalyst/hash_utils.py                   | 137 +++++++++++++++++++++++++++++++
 catalyst/main.py                         |  30 ++++---
 catalyst/support.py                      |  62 --------------
 catalyst/targets/generic_stage_target.py |  36 ++++----
 catalyst/targets/livecd_stage2_target.py |   4 +-
 catalyst/targets/stage2_target.py        |   7 +-
 6 files changed, 184 insertions(+), 92 deletions(-)

diff --git a/catalyst/hash_utils.py b/catalyst/hash_utils.py
new file mode 100644
index 0000000..b575ace
--- /dev/null
+++ b/catalyst/hash_utils.py
@@ -0,0 +1,137 @@
+
+import os
+from collections import namedtuple
+from subprocess import Popen, PIPE
+
+from support import CatalystError
+
+
+# Use HashMap.fields for the value legend
+# fields = ["func", "cmd", "args", "id"]
+HASH_DEFINITIONS = {
+	"adler32"  :["calc_hash2", "shash", ["-a", "ADLER32"], "ADLER32"],
+	"crc32"    :["calc_hash2", "shash", ["-a", "CRC32"], "CRC32"],
+	"crc32b"   :["calc_hash2", "shash", ["-a", "CRC32B"], "CRC32B"],
+	"gost"     :["calc_hash2", "shash", ["-a", "GOST"], "GOST"],
+	"haval128" :["calc_hash2", "shash", ["-a", "HAVAL128"], "HAVAL128"],
+	"haval160" :["calc_hash2", "shash", ["-a", "HAVAL160"], "HAVAL160"],
+	"haval192" :["calc_hash2", "shash", ["-a", "HAVAL192"], "HAVAL192"],
+	"haval224" :["calc_hash2", "shash", ["-a", "HAVAL224"], "HAVAL224"],
+	"haval256" :["calc_hash2", "shash", ["-a", "HAVAL256"], "HAVAL256"],
+	"md2"      :["calc_hash2", "shash", ["-a", "MD2"], "MD2"],
+	"md4"      :["calc_hash2", "shash", ["-a", "MD4"], "MD4"],
+	"md5"      :["calc_hash2", "shash", ["-a", "MD5"], "MD5"],
+	"ripemd128":["calc_hash2", "shash", ["-a", "RIPEMD128"], "RIPEMD128"],
+	"ripemd160":["calc_hash2", "shash", ["-a", "RIPEMD160"], "RIPEMD160"],
+	"ripemd256":["calc_hash2", "shash", ["-a", "RIPEMD256"], "RIPEMD256"],
+	"ripemd320":["calc_hash2", "shash", ["-a", "RIPEMD320"], "RIPEMD320"],
+	"sha1"     :["calc_hash2", "shash", ["-a", "SHA1"], "SHA1"],
+	"sha224"   :["calc_hash2", "shash", ["-a", "SHA224"], "SHA224"],
+	"sha256"   :["calc_hash2", "shash", ["-a", "SHA256"], "SHA256"],
+	"sha384"   :["calc_hash2", "shash", ["-a", "SHA384"], "SHA384"],
+	"sha512"   :["calc_hash2", "shash", ["-a", "SHA512"], "SHA512"],
+	"snefru128":["calc_hash2", "shash", ["-a", "SNEFRU128"], "SNEFRU128"],
+	"snefru256":["calc_hash2", "shash", ["-a", "SNEFRU256"], "SNEFRU256"],
+	"tiger"    :["calc_hash2", "shash", ["-a", "TIGER"], "TIGER"],
+	"tiger128" :["calc_hash2", "shash", ["-a", "TIGER128"], "TIGER128"],
+	"tiger160" :["calc_hash2", "shash", ["-a", "TIGER160"], "TIGER160"],
+	"whirlpool":["calc_hash2", "shash", ["-a", "WHIRLPOOL"], "WHIRLPOOL"],
+	}
+
+
+class HashMap(object):
+	'''Class for handling
+	Catalyst's hash generation'''
+
+	fields = ["func", "cmd", "args", "id"]
+
+
+	def __init__(self, hashes=None):
+		'''Class init
+
+		@param hashes: dictionary of Key:[function, cmd, cmd_args, Print string]
+		@param fields: list of ordered field names for the hashes
+			eg: ["func", "cmd", "args", "id"]
+		'''
+		if hashes is None:
+			hashes = {}
+		self.hash_map = {}
+
+		# create the hash definition namedtuple classes
+		for name in list(hashes):
+			obj = namedtuple(name, self.fields)
+			obj.__slots__ = ()
+			self.hash_map[name] = obj._make(hashes[name])
+		del obj
+
+
+	def generate_hash(self, file_, hash_="crc32", verbose=False):
+		'''Prefered method of generating a hash for the passed in file_
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		try:
+			return getattr(self, self.hash_map[hash_].func)(
+				file_,
+				hash_,
+				verbose
+				)
+		except:
+			raise CatalystError,"Error generating hash, is appropriate " + \
+				"utility installed on your system?"
+
+
+	def calc_hash(self, file_, hash_, verbose=False):
+		'''
+		Calculate the hash for "file_"
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		_hash = self.hash_map[hash_]
+		args = [_hash.cmd]
+		args.extend(_hash.args)
+		args.append(file_)
+		source = Popen(args, stdout=PIPE)
+		mylines = source.communicate()[0]
+		mylines=mylines[0].split()
+		result=mylines[0]
+		if verbose:
+			print _hash.id + " (%s) = %s" % (file_, result)
+		return result
+
+
+	def calc_hash2(self, file_, hash_type, verbose=False):
+		'''
+		Calculate the hash for "file_"
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		_hash = self.hash_map[hash_type]
+		args = [_hash.cmd]
+		args.extend(_hash.args)
+		args.append(file_)
+		#print("DEBUG: calc_hash2; args =", args)
+		source = Popen(args, stdout=PIPE)
+		output = source.communicate()
+		lines = output[0].split('\n')
+		#print("DEBUG: calc_hash2; output =", output)
+		header = lines[0]
+		h_f = lines[1].split()
+		hash_result = h_f[0]
+		short_file = os.path.split(h_f[1])[1]
+		result = header + "\n" + hash_result + "  " + short_file + "\n"
+		if verbose:
+			print header + " (%s) = %s" % (short_file, result)
+		return result
+
+
+

diff --git a/catalyst/main.py b/catalyst/main.py
index 6b90989..7bcf2cb 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -22,7 +22,10 @@ from . import __version__
 import catalyst.config
 import catalyst.util
 from catalyst.support import (required_build_targets,
-	valid_build_targets, CatalystError, hash_map, find_binary, LockInUse)
+	valid_build_targets, CatalystError, find_binary, LockInUse)
+
+from hash_utils import HashMap, HASH_DEFINITIONS
+
 
 
 conf_values={}
@@ -345,40 +348,43 @@ def main():
 	# import configuration file and import our main module using those settings
 	parse_config(myconfig)
 
-	# Start checking that digests are valid now that the hash_map was imported
-	# from catalyst.support
+	# initialze our hash and contents generators
+	hash_map = HashMap(HASH_DEFINITIONS)
+	conf_values["hash_map"] = hash_map
+
+	# Start checking that digests are valid now that hash_map is initialized
 	if "digests" in conf_values:
 		for i in conf_values["digests"].split():
-			if i not in hash_map:
+			if i not in HASH_DEFINITIONS:
 				print
 				print i+" is not a valid digest entry"
 				print "Valid digest entries:"
-				print hash_map.keys()
+				print HASH_DEFINITIONS.keys()
 				print
 				print "Catalyst aborting...."
 				sys.exit(2)
-			if find_binary(hash_map[i][1]) == None:
+			if find_binary(hash_map.hash_map[i].cmd) == None:
 				print
-				print "digest="+i
-				print "\tThe "+hash_map[i][1]+\
+				print "digest=" + i
+				print "\tThe " + hash_map.hash_map[i].cmd + \
 					" binary was not found. It needs to be in your system path"
 				print
 				print "Catalyst aborting...."
 				sys.exit(2)
 	if "hash_function" in conf_values:
-		if conf_values["hash_function"] not in hash_map:
+		if conf_values["hash_function"] not in HASH_DEFINITIONS:
 			print
 			print conf_values["hash_function"]+\
 				" is not a valid hash_function entry"
 			print "Valid hash_function entries:"
-			print hash_map.keys()
+			print HASH_DEFINITIONS.keys()
 			print
 			print "Catalyst aborting...."
 			sys.exit(2)
-		if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
+		if find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd) == None:
 			print
 			print "hash_function="+conf_values["hash_function"]
-			print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
+			print "\tThe "+hash_map.hash_map[conf_values["hash_function"]].cmd + \
 				" binary was not found. It needs to be in your system path"
 			print
 			print "Catalyst aborting...."

diff --git a/catalyst/support.py b/catalyst/support.py
index 5e7ce92..308d9c0 100644
--- a/catalyst/support.py
+++ b/catalyst/support.py
@@ -114,68 +114,6 @@ contents_map={
 	"isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
 }
 
-def generate_hash(file,hash_function="crc32",verbose=False):
-	try:
-		return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
-			hash_map[hash_function][3],verbose)
-	except:
-		raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
-
-def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
-	a=os.popen(cmd+" "+cmd_args+" "+file)
-	mylines=a.readlines()
-	a.close()
-	mylines=mylines[0].split()
-	result=mylines[0]
-	if verbose:
-		print id_string+" (%s) = %s" % (file, result)
-	return result
-
-def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
-	a=os.popen(cmd+" "+cmd_args+" "+file)
-	header=a.readline()
-	mylines=a.readline().split()
-	hash=mylines[0]
-	short_file=os.path.split(mylines[1])[1]
-	a.close()
-	result=header+hash+"  "+short_file+"\n"
-	if verbose:
-		print header+" (%s) = %s" % (short_file, result)
-	return result
-
-# This has map must be defined after the function calc_hash
-# It is possible to call different functions from this but they must be defined
-# before hash_map
-# Key,function,cmd,cmd_args,Print string
-hash_map={
-	 "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
-	 "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
-	 "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
-	 "gost":[calc_hash2,"shash","-a GOST","GOST"],\
-	 "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
-	 "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
-	 "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
-	 "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
-	 "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
-	 "md2":[calc_hash2,"shash","-a MD2","MD2"],\
-	 "md4":[calc_hash2,"shash","-a MD4","MD4"],\
-	 "md5":[calc_hash2,"shash","-a MD5","MD5"],\
-	 "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
-	 "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
-	 "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
-	 "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
-	 "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
-	 "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
-	 "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
-	 "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
-	 "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
-	 "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
-	 "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
-	 "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
-	 "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
-	 "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
-	 "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
-	 }
 
 def read_from_clst(file):
 	line = ''

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index eaf2c1f..b6a6200 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -428,10 +428,11 @@ class generic_stage_target(generic_target):
 			if os.path.isfile(self.settings["source_path"]):
 				# XXX: Is this even necessary if the previous check passes?
 				if os.path.exists(self.settings["source_path"]):
-					self.settings["source_path_hash"]=\
-						generate_hash(self.settings["source_path"],\
-						hash_function=self.settings["hash_function"],\
-						verbose=False)
+					self.settings["source_path_hash"] = \
+						self.settings["hash_map"].generate_hash(
+							self.settings["source_path"],
+							hash_ = self.settings["hash_function"],
+							verbose = False)
 		print "Source path set to "+self.settings["source_path"]
 		if os.path.isdir(self.settings["source_path"]):
 			print "\tIf this is not desired, remove this directory or turn off"
@@ -457,18 +458,22 @@ class generic_stage_target(generic_target):
 			self.settings["snapshot"] + ".tar.xz")
 
 		if os.path.exists(self.settings["snapshot_path"]):
-			self.settings["snapshot_path_hash"]=\
-				generate_hash(self.settings["snapshot_path"],\
-				hash_function=self.settings["hash_function"],verbose=False)
+			self.settings["snapshot_path_hash"] = \
+				self.settings["hash_map"].generate_hash(
+					self.settings["snapshot_path"],
+					hash_ = self.settings["hash_function"],
+					verbose = False)
 		else:
 			self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
 				"/snapshots/" + self.settings["snapshot_name"] +
 				self.settings["snapshot"] + ".tar.bz2")
 
 			if os.path.exists(self.settings["snapshot_path"]):
-				self.settings["snapshot_path_hash"]=\
-					generate_hash(self.settings["snapshot_path"],\
-					hash_function=self.settings["hash_function"],verbose=False)
+				self.settings["snapshot_path_hash"] = \
+					self.settings["hash_map"].generate_hash(
+						self.settings["snapshot_path"],
+						hash_ = self.settings["hash_function"],
+						verbose = False)
 
 	def set_snapcache_path(self):
 		if "SNAPCACHE" in self.settings:
@@ -1716,6 +1721,7 @@ class generic_stage_target(generic_target):
 		if os.path.exists(file+".DIGESTS"):
 			os.remove(file+".DIGESTS")
 		if "digests" in self.settings:
+			hash_map = self.settings["hash_map"]
 			if os.path.exists(file):
 				myf=open(file+".DIGESTS","w")
 				keys={}
@@ -1726,14 +1732,14 @@ class generic_stage_target(generic_target):
 				for f in [file, file+'.CONTENTS']:
 					if os.path.exists(f):
 						if "all" in array:
-							for k in hash_map.keys():
-								hash=generate_hash(f,hash_function=k,verbose=\
-									"VERBOSE" in self.settings)
+							for k in list(hash_map.hash_map):
+								hash = hash_map.generate_hash(f, hash_ = k,
+									verbose = "VERBOSE" in self.settings)
 								myf.write(hash)
 						else:
 							for j in array:
-								hash=generate_hash(f,hash_function=j,verbose=\
-									"VERBOSE" in self.settings)
+								hash = hash_map.generate_hash(f, hash_ = j,
+									verbose = "VERBOSE" in self.settings)
 								myf.write(hash)
 				myf.close()
 

diff --git a/catalyst/targets/livecd_stage2_target.py b/catalyst/targets/livecd_stage2_target.py
index 1bfd820..e784844 100644
--- a/catalyst/targets/livecd_stage2_target.py
+++ b/catalyst/targets/livecd_stage2_target.py
@@ -35,7 +35,9 @@ class livecd_stage2_target(generic_stage_target):
 	def set_source_path(self):
 		self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
 		if os.path.isfile(self.settings["source_path"]):
-			self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
+			self.settings["source_path_hash"] = \
+				self.settings["hash_map"].generate_hash(
+					self.settings["source_path"])
 		else:
 			self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
 		if not os.path.exists(self.settings["source_path"]):

diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py
index 15acdee..6377f5d 100644
--- a/catalyst/targets/stage2_target.py
+++ b/catalyst/targets/stage2_target.py
@@ -23,8 +23,11 @@ class stage2_target(generic_stage_target):
 			if os.path.isfile(self.settings["source_path"]):
 				if os.path.exists(self.settings["source_path"]):
 				# XXX: Is this even necessary if the previous check passes?
-					self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
-						hash_function=self.settings["hash_function"],verbose=False)
+					self.settings["source_path_hash"] = \
+						self.settings["hash_map"].generate_hash(
+							self.settings["source_path"],\
+							hash_=self.settings["hash_function"],
+							verbose=False)
 		print "Source path set to "+self.settings["source_path"]
 		if os.path.isdir(self.settings["source_path"]):
 			print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/
@ 2015-10-08 22:19 Mike Frysinger
  0 siblings, 0 replies; 8+ messages in thread
From: Mike Frysinger @ 2015-10-08 22:19 UTC (permalink / raw
  To: gentoo-commits

commit:     a9a840f8e53a856de2fec6b242fdd6556bb03c0d
Author:     Mike Frysinger <vapier <AT> gentoo <DOT> org>
AuthorDate: Tue Oct  6 21:50:36 2015 +0000
Commit:     Mike Frysinger <vapier <AT> gentoo <DOT> org>
CommitDate: Thu Oct  8 22:07:57 2015 +0000
URL:        https://gitweb.gentoo.org/proj/catalyst.git/commit/?id=a9a840f8

support: punt pointless list_to_string helper

This function is a lot of code that doesn't do a whole lot.
It looks like a blind copy & paste & tweak from list_bashify.
If the input is a string, it returns a copy of it (i.e. it's
pretty much a nop).  If it's a list, it runs ' '.join() on it.
That's all.

But let's not focus on the craziness of implementation.  The
few consumers of this function are already passing in strings
in which case their use is pointless.  Once we delete those
callers, we can delete the implementation.

 catalyst/support.py        | 13 -------------
 catalyst/targets/stage1.py | 10 +++++-----
 catalyst/targets/stage2.py | 10 +++++-----
 3 files changed, 10 insertions(+), 23 deletions(-)

diff --git a/catalyst/support.py b/catalyst/support.py
index 62be63a..a26e49c 100644
--- a/catalyst/support.py
+++ b/catalyst/support.py
@@ -41,19 +41,6 @@ def list_bashify(mylist):
 	return mypack
 
 
-def list_to_string(mylist):
-	if type(mylist)==types.StringType:
-		mypack=[mylist]
-	else:
-		mypack=mylist[:]
-	for x in range(0,len(mypack)):
-		# surround args with quotes for passing to bash,
-		# allows things like "<" to remain intact
-		mypack[x]=mypack[x]
-	mypack = ' '.join(mypack)
-	return mypack
-
-
 class CatalystError(Exception):
 	def __init__(self, message, print_traceback=False):
 		if message:

diff --git a/catalyst/targets/stage1.py b/catalyst/targets/stage1.py
index d5cc298..b5dfac7 100644
--- a/catalyst/targets/stage1.py
+++ b/catalyst/targets/stage1.py
@@ -5,7 +5,7 @@ stage1 target
 
 import os
 
-from catalyst.support import normpath, list_to_string
+from catalyst.support import normpath
 from catalyst.fileops import ensure_dirs
 from catalyst.base.stagebase import StageBase
 
@@ -38,19 +38,19 @@ class stage1(StageBase):
 
 	def override_chost(self):
 		if "chost" in self.settings:
-			self.settings["CHOST"]=list_to_string(self.settings["chost"])
+			self.settings["CHOST"] = self.settings["chost"]
 
 	def override_cflags(self):
 		if "cflags" in self.settings:
-			self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
+			self.settings["CFLAGS"] = self.settings["cflags"]
 
 	def override_cxxflags(self):
 		if "cxxflags" in self.settings:
-			self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
+			self.settings["CXXFLAGS"] = self.settings["cxxflags"]
 
 	def override_ldflags(self):
 		if "ldflags" in self.settings:
-			self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
+			self.settings["LDFLAGS"] = self.settings["ldflags"]
 
 	def set_portage_overlay(self):
 		StageBase.set_portage_overlay(self)

diff --git a/catalyst/targets/stage2.py b/catalyst/targets/stage2.py
index ae1a2f3..e6965cc 100644
--- a/catalyst/targets/stage2.py
+++ b/catalyst/targets/stage2.py
@@ -5,7 +5,7 @@ stage2 target, builds upon previous stage1 tarball
 
 import os
 
-from catalyst.support import normpath, list_to_string
+from catalyst.support import normpath
 from catalyst.base.stagebase import StageBase
 
 
@@ -41,19 +41,19 @@ class stage2(StageBase):
 
 	def override_chost(self):
 		if "chost" in self.settings:
-			self.settings["CHOST"]=list_to_string(self.settings["chost"])
+			self.settings["CHOST"] = self.settings["chost"]
 
 	def override_cflags(self):
 		if "cflags" in self.settings:
-			self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
+			self.settings["CFLAGS"] = self.settings["cflags"]
 
 	def override_cxxflags(self):
 		if "cxxflags" in self.settings:
-			self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
+			self.settings["CXXFLAGS"] = self.settings["cxxflags"]
 
 	def override_ldflags(self):
 		if "ldflags" in self.settings:
-			self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
+			self.settings["LDFLAGS"] = self.settings["ldflags"]
 
 	def set_portage_overlay(self):
 		StageBase.set_portage_overlay(self)


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/
@ 2015-02-26  4:12 Brian Dolbec
  0 siblings, 0 replies; 8+ messages in thread
From: Brian Dolbec @ 2015-02-26  4:12 UTC (permalink / raw
  To: gentoo-commits

commit:     d4bc8e3c5ba7bca5656ea597e3ad013a4e61c8b5
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Sun Jan 20 08:10:03 2013 +0000
Commit:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
CommitDate: Thu Jan  1 05:58:05 2015 +0000
URL:        http://sources.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=d4bc8e3c

Begin splitting up generic_stage_target into smaller code blocks.

This so snapshot_target does not need to import it since most of it was
not used or initialized properly.

---
 catalyst/defaults.py                               |   2 +-
 catalyst/targets/clearbase.py                      | 115 ++++++++++++++
 catalyst/targets/genbase.py                        |  58 +++++++
 catalyst/targets/generic_stage_target.py           | 167 +++------------------
 catalyst/targets/snapshot_target.py                |  16 +-
 .../targets/{generic_target.py => targetbase.py}   |   6 +-
 6 files changed, 208 insertions(+), 156 deletions(-)

diff --git a/catalyst/defaults.py b/catalyst/defaults.py
index b36eff7..f6b2163 100644
--- a/catalyst/defaults.py
+++ b/catalyst/defaults.py
@@ -1,7 +1,7 @@
 
 
 # these should never be touched
-required_build_targets = ["generic_target", "generic_stage_target"]
+required_build_targets = ["targetbase", "generic_stage_target"]
 
 # new build types should be added here
 valid_build_targets = ["stage1_target", "stage2_target", "stage3_target",

diff --git a/catalyst/targets/clearbase.py b/catalyst/targets/clearbase.py
new file mode 100644
index 0000000..8519acc
--- /dev/null
+++ b/catalyst/targets/clearbase.py
@@ -0,0 +1,115 @@
+
+import os
+import shutil
+from stat import ST_UID, ST_GID, ST_MODE
+
+
+from catalyst.support import cmd, countdown
+
+
+class ClearBase(object):
+	"""
+	This class does all of clearing after task completion
+	"""
+	def __init__(self, myspec):
+		self.settings = myspec
+
+
+
+	def clear_autoresume(self):
+		""" Clean resume points since they are no longer needed """
+		if "autoresume" in self.settings["options"]:
+			print "Removing AutoResume Points: ..."
+		myemp=self.settings["autoresume_path"]
+		if os.path.isdir(myemp):
+				if "autoresume" in self.settings["options"]:
+					print "Emptying directory",myemp
+				"""
+				stat the dir, delete the dir, recreate the dir and set
+				the proper perms and ownership
+				"""
+				mystat=os.stat(myemp)
+				if os.uname()[0] == "FreeBSD":
+					cmd("chflags -R noschg "+myemp,\
+						"Could not remove immutable flag for file "\
+						+myemp)
+				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
+				shutil.rmtree(myemp)
+				os.makedirs(myemp,0755)
+				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
+				os.chmod(myemp,mystat[ST_MODE])
+
+
+	def clear_chroot(self):
+		myemp=self.settings["chroot_path"]
+		if os.path.isdir(myemp):
+			print "Emptying directory",myemp
+			"""
+			stat the dir, delete the dir, recreate the dir and set
+			the proper perms and ownership
+			"""
+			mystat=os.stat(myemp)
+			#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
+			""" There's no easy way to change flags recursively in python """
+			if os.uname()[0] == "FreeBSD":
+				os.system("chflags -R noschg "+myemp)
+			shutil.rmtree(myemp)
+			os.makedirs(myemp,0755)
+			os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
+			os.chmod(myemp,mystat[ST_MODE])
+
+
+	def clear_packages(self):
+		if "pkgcache" in self.settings["options"]:
+			print "purging the pkgcache ..."
+
+			myemp=self.settings["pkgcache_path"]
+			if os.path.isdir(myemp):
+				print "Emptying directory",myemp
+				"""
+				stat the dir, delete the dir, recreate the dir and set
+				the proper perms and ownership
+				"""
+				mystat=os.stat(myemp)
+				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
+				shutil.rmtree(myemp)
+				os.makedirs(myemp,0755)
+				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
+				os.chmod(myemp,mystat[ST_MODE])
+
+
+	def clear_kerncache(self):
+		if "kerncache" in self.settings["options"]:
+			print "purging the kerncache ..."
+
+			myemp=self.settings["kerncache_path"]
+			if os.path.isdir(myemp):
+				print "Emptying directory",myemp
+				"""
+				stat the dir, delete the dir, recreate the dir and set
+				the proper perms and ownership
+				"""
+				mystat=os.stat(myemp)
+				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
+				shutil.rmtree(myemp)
+				os.makedirs(myemp,0755)
+				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
+				os.chmod(myemp,mystat[ST_MODE])
+
+
+	def purge(self):
+		countdown(10,"Purging Caches ...")
+		if any(k in self.settings["options"] for k in ("purge","purgeonly","purgetmponly")):
+			print "clearing autoresume ..."
+			self.clear_autoresume()
+
+			print "clearing chroot ..."
+			self.clear_chroot()
+
+			if "PURGETMPONLY" not in self.settings:
+				print "clearing package cache ..."
+				self.clear_packages()
+
+			print "clearing kerncache ..."
+			self.clear_kerncache()
+

diff --git a/catalyst/targets/genbase.py b/catalyst/targets/genbase.py
new file mode 100644
index 0000000..e818781
--- /dev/null
+++ b/catalyst/targets/genbase.py
@@ -0,0 +1,58 @@
+
+
+import os
+
+
+class GenBase(object):
+	"""
+	This class does generation of the contents and digests files.
+	"""
+	def __init__(self,myspec):
+		self.settings = myspec
+
+
+	def gen_contents_file(self,file):
+		if os.path.exists(file+".CONTENTS"):
+			os.remove(file+".CONTENTS")
+		if "contents" in self.settings:
+			contents_map = self.settings["contents_map"]
+			if os.path.exists(file):
+				myf=open(file+".CONTENTS","w")
+				keys={}
+				for i in self.settings["contents"].split():
+					keys[i]=1
+					array=keys.keys()
+					array.sort()
+				for j in array:
+					contents = contents_map.generate_contents(file, j,
+						verbose="VERBOSE" in self.settings)
+					if contents:
+						myf.write(contents)
+				myf.close()
+
+	def gen_digest_file(self,file):
+		if os.path.exists(file+".DIGESTS"):
+			os.remove(file+".DIGESTS")
+		if "digests" in self.settings:
+			hash_map = self.settings["hash_map"]
+			if os.path.exists(file):
+				myf=open(file+".DIGESTS","w")
+				keys={}
+				for i in self.settings["digests"].split():
+					keys[i]=1
+					array=keys.keys()
+					array.sort()
+				for f in [file, file+'.CONTENTS']:
+					if os.path.exists(f):
+						if "all" in array:
+							for k in list(hash_map.hash_map):
+								hash = hash_map.generate_hash(f,hash_=k,
+									verbose = "VERBOSE" in self.settings)
+								myf.write(hash)
+						else:
+							for j in array:
+								hash = hash_map.generate_hash(f,hash_=j,
+									verbose = "VERBOSE" in self.settings)
+								myf.write(hash)
+				myf.close()
+

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index 0b506e2..c6b8dcc 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -1,17 +1,26 @@
-import os,string,imp,types,shutil
-from catalyst.support import *
-from generic_target import *
-from stat import *
-from catalyst.lock import LockDir
 
-from catalyst.defaults import (SOURCE_MOUNT_DEFAULTS, TARGET_MOUNT_DEFAULTS,
-	PORT_LOGDIR_CLEAN)
+import os
+import string
+import imp
+import types
+import shutil
+import sys
+from stat import ST_UID, ST_GID, ST_MODE
 
 # for convienience
 pjoin = os.path.join
 
+from catalyst.defaults import (SOURCE_MOUNT_DEFAULTS, TARGET_MOUNT_DEFAULTS,
+	PORT_LOGDIR_CLEAN)
+from catalyst.support import (CatalystError, msg, file_locate, normpath,
+	touch, cmd, warn, list_bashify, read_makeconf, read_from_clst, ismount)
+from catalyst.targets.targetbase import TargetBase
+from catalyst.targets.clearbase import ClearBase
+from catalyst.targets.genbase import GenBase
+from catalyst.lock import LockDir
+
 
-class generic_stage_target(generic_target):
+class generic_stage_target(TargetBase, ClearBase, GenBase):
 	"""
 	This class does all of the chroot setup, copying of files, etc. It is
 	the driver class for pretty much everything that Catalyst does.
@@ -26,7 +35,9 @@ class generic_stage_target(generic_target):
 			"distcc_hosts","makeopts","pkgcache_path","kerncache_path"])
 
 		self.set_valid_build_kernel_vars(addlargs)
-		generic_target.__init__(self,myspec,addlargs)
+		TargetBase.__init__(self, myspec, addlargs)
+		GenBase.__init__(self, myspec)
+		ClearBase.__init__(self, myspec)
 
 		"""
 		The semantics of subarchmap and machinemap changed a bit in 2.0.3 to
@@ -1616,142 +1627,4 @@ class generic_stage_target(generic_target):
 				self.unbind()
 				raise CatalystError,"build aborting due to livecd_update error."
 
-	def clear_chroot(self):
-		myemp=self.settings["chroot_path"]
-		if os.path.isdir(myemp):
-			print "Emptying directory",myemp
-			"""
-			stat the dir, delete the dir, recreate the dir and set
-			the proper perms and ownership
-			"""
-			mystat=os.stat(myemp)
-			#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
-			""" There's no easy way to change flags recursively in python """
-			if os.uname()[0] == "FreeBSD":
-				os.system("chflags -R noschg "+myemp)
-			shutil.rmtree(myemp)
-			os.makedirs(myemp,0755)
-			os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
-			os.chmod(myemp,mystat[ST_MODE])
-
-	def clear_packages(self):
-		if "pkgcache" in self.settings["options"]:
-			print "purging the pkgcache ..."
-
-			myemp=self.settings["pkgcache_path"]
-			if os.path.isdir(myemp):
-				print "Emptying directory",myemp
-				"""
-				stat the dir, delete the dir, recreate the dir and set
-				the proper perms and ownership
-				"""
-				mystat=os.stat(myemp)
-				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
-				shutil.rmtree(myemp)
-				os.makedirs(myemp,0755)
-				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
-				os.chmod(myemp,mystat[ST_MODE])
-
-	def clear_kerncache(self):
-		if "kerncache" in self.settings["options"]:
-			print "purging the kerncache ..."
-
-			myemp=self.settings["kerncache_path"]
-			if os.path.isdir(myemp):
-				print "Emptying directory",myemp
-				"""
-				stat the dir, delete the dir, recreate the dir and set
-				the proper perms and ownership
-				"""
-				mystat=os.stat(myemp)
-				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
-				shutil.rmtree(myemp)
-				os.makedirs(myemp,0755)
-				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
-				os.chmod(myemp,mystat[ST_MODE])
-
-	def clear_autoresume(self):
-		""" Clean resume points since they are no longer needed """
-		if "autoresume" in self.settings["options"]:
-			print "Removing AutoResume Points: ..."
-		myemp=self.settings["autoresume_path"]
-		if os.path.isdir(myemp):
-				if "autoresume" in self.settings["options"]:
-					print "Emptying directory",myemp
-				"""
-				stat the dir, delete the dir, recreate the dir and set
-				the proper perms and ownership
-				"""
-				mystat=os.stat(myemp)
-				if os.uname()[0] == "FreeBSD":
-					cmd("chflags -R noschg "+myemp,\
-						"Could not remove immutable flag for file "\
-						+myemp)
-				#cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
-				shutil.rmtree(myemp)
-				os.makedirs(myemp,0755)
-				os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
-				os.chmod(myemp,mystat[ST_MODE])
-
-	def gen_contents_file(self,file):
-		if os.path.exists(file+".CONTENTS"):
-			os.remove(file+".CONTENTS")
-		if "contents" in self.settings:
-			contents_map = self.settings["contents_map"]
-			if os.path.exists(file):
-				myf=open(file+".CONTENTS","w")
-				keys={}
-				for i in self.settings["contents"].split():
-					keys[i]=1
-					array=keys.keys()
-					array.sort()
-				for j in array:
-					contents = contents_map.generate_contents(file, j,
-						verbose="VERBOSE" in self.settings)
-					if contents:
-						myf.write(contents)
-				myf.close()
-
-	def gen_digest_file(self,file):
-		if os.path.exists(file+".DIGESTS"):
-			os.remove(file+".DIGESTS")
-		if "digests" in self.settings:
-			hash_map = self.settings["hash_map"]
-			if os.path.exists(file):
-				myf=open(file+".DIGESTS","w")
-				keys={}
-				for i in self.settings["digests"].split():
-					keys[i]=1
-					array=keys.keys()
-					array.sort()
-				for f in [file, file+'.CONTENTS']:
-					if os.path.exists(f):
-						if "all" in array:
-							for k in list(hash_map.hash_map):
-								hash = hash_map.generate_hash(f, hash_ = k,
-									verbose = "VERBOSE" in self.settings)
-								myf.write(hash)
-						else:
-							for j in array:
-								hash = hash_map.generate_hash(f, hash_ = j,
-									verbose = "VERBOSE" in self.settings)
-								myf.write(hash)
-				myf.close()
-
-	def purge(self):
-		countdown(10,"Purging Caches ...")
-		if any(k in self.settings["options"] for k in ("purge","purgeonly","purgetmponly")):
-			print "clearing autoresume ..."
-			self.clear_autoresume()
-
-			print "clearing chroot ..."
-			self.clear_chroot()
-
-			if "PURGETMPONLY" not in self.settings:
-				print "clearing package cache ..."
-				self.clear_packages()
-
-			print "clearing kerncache ..."
-			self.clear_kerncache()
-
 # vim: ts=4 sw=4 sta et sts=4 ai

diff --git a/catalyst/targets/snapshot_target.py b/catalyst/targets/snapshot_target.py
index 50133ec..3289bbd 100644
--- a/catalyst/targets/snapshot_target.py
+++ b/catalyst/targets/snapshot_target.py
@@ -3,10 +3,15 @@ Snapshot target
 """
 
 import os
-from catalyst.support import *
-from generic_stage_target import *
+import shutil
+from stat import ST_UID, ST_GID, ST_MODE
 
-class snapshot_target(generic_stage_target):
+
+from catalyst.support import normpath, cmd
+from catalyst.targets.targetbase import TargetBase
+from catalyst.targets.genbase import GenBase
+
+class snapshot_target(TargetBase, GenBase):
 	"""
 	Builder class for snapshots.
 	"""
@@ -14,8 +19,9 @@ class snapshot_target(generic_stage_target):
 		self.required_values=["version_stamp","target"]
 		self.valid_values=["version_stamp","target"]
 
-		generic_target.__init__(self,myspec,addlargs)
-		self.settings=myspec
+		TargetBase.__init__(self, myspec, addlargs)
+		GenBase.__init__(self,myspec)
+		#self.settings=myspec
 		self.settings["target_subpath"]="portage"
 		st=self.settings["storedir"]
 		self.settings["snapshot_path"] = normpath(st + "/snapshots/"

diff --git a/catalyst/targets/generic_target.py b/catalyst/targets/targetbase.py
similarity index 64%
rename from catalyst/targets/generic_target.py
rename to catalyst/targets/targetbase.py
index 382f1c7..e0c03df 100644
--- a/catalyst/targets/generic_target.py
+++ b/catalyst/targets/targetbase.py
@@ -2,11 +2,11 @@ import os
 
 from catalyst.support import *
 
-class generic_target:
+class TargetBase(object):
 	"""
-	The toplevel class for generic_stage_target. This is about as generic as we get.
+	The toplevel class for all targets. This is about as generic as we get.
 	"""
-	def __init__(self,myspec,addlargs):
+	def __init__(self, myspec, addlargs):
 		addl_arg_parse(myspec,addlargs,self.required_values,self.valid_values)
 		self.settings=myspec
 		self.env = {


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:pending commit in: catalyst/targets/, catalyst/
@ 2015-01-01  5:59 Brian Dolbec
  2015-02-26  4:12 ` [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/ Brian Dolbec
  0 siblings, 1 reply; 8+ messages in thread
From: Brian Dolbec @ 2015-01-01  5:59 UTC (permalink / raw
  To: gentoo-commits

commit:     36610754ba2daca723064021001c5128d83e8f21
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Tue Feb 12 03:43:37 2013 +0000
Commit:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
CommitDate: Thu Jan  1 05:58:05 2015 +0000
URL:        http://sources.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=36610754

Remove redundant /bin/bash additions in cmd() calls

Remove old spawn_* functions copied from portage.
cmd() now uses subprocess.Popen().

---
 catalyst/support.py                      | 301 +++----------------------------
 catalyst/targets/generic_stage_target.py |  26 +--
 catalyst/targets/grp_target.py           |   2 +-
 catalyst/targets/netboot2_target.py      |   4 +-
 catalyst/targets/netboot_target.py       |   8 +-
 catalyst/targets/tinderbox_target.py     |   2 +-
 6 files changed, 42 insertions(+), 301 deletions(-)

diff --git a/catalyst/support.py b/catalyst/support.py
index e2d64a1..aaacaa9 100644
--- a/catalyst/support.py
+++ b/catalyst/support.py
@@ -4,9 +4,10 @@ import string
 import os
 import types
 import re
-import signal
 import traceback
 import time
+from subprocess import Popen
+
 
 from catalyst.defaults import verbosity, valid_config_file_values
 
@@ -31,34 +32,6 @@ except:
 spawned_pids = []
 
 
-def cleanup(pids,block_exceptions=True):
-	"""function to go through and reap the list of pids passed to it"""
-	global spawned_pids
-	if type(pids) == int:
-		pids = [pids]
-	for x in pids:
-		try:
-			os.kill(x,signal.SIGTERM)
-			if os.waitpid(x,os.WNOHANG)[1] == 0:
-				# feisty bugger, still alive.
-				os.kill(x,signal.SIGKILL)
-				os.waitpid(x,0)
-		except OSError, oe:
-			if block_exceptions:
-				pass
-			if oe.errno not in (10,3):
-				raise oe
-		except SystemExit:
-					raise
-		except Exception:
-			if block_exceptions:
-				pass
-		try:
-			spawned_pids.remove(x)
-		except IndexError:
-			pass
-
-
 # a function to turn a string of non-printable characters
 # into a string of hex characters
 def hexify(str):
@@ -79,8 +52,8 @@ def read_from_clst(file):
 		return -1
 		#raise CatalystError, "Could not open file "+file
 	for line in myf.readlines():
-	    #line = string.replace(line, "\n", "") # drop newline
-	    myline = myline + line
+		#line = string.replace(line, "\n", "") # drop newline
+		myline = myline + line
 	myf.close()
 	return myline
 
@@ -145,259 +118,27 @@ def find_binary(myc):
 	return None
 
 
-def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
-	"""spawn mycommand as an arguement to bash"""
-	args=[BASH_BINARY]
-	if not opt_name:
-		opt_name=mycommand.split()[0]
-	if "BASH_ENV" not in env:
-		env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
-	if debug:
-		args.append("-x")
-	args.append("-c")
-	args.append(mycommand)
-	return spawn(args,env=env,opt_name=opt_name,**keywords)
-
-
-def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
-	collect_fds=[1],fd_pipes=None,**keywords):
-	"""call spawn, collecting the output to fd's specified in collect_fds list
-	emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
-	requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
-	'lets let log only stdin and let stderr slide by'.
-
-	emulate_gso was deprecated from the day it was added, so convert your code over.
-	spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
-	global selinux_capable
-	pr,pw=os.pipe()
-
-	if fd_pipes==None:
-			fd_pipes={}
-			fd_pipes[0] = 0
-
-	for x in collect_fds:
-			fd_pipes[x] = pw
-	keywords["returnpid"]=True
-
-	mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
-	os.close(pw)
-	if type(mypid) != types.ListType:
-			os.close(pr)
-			return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
-
-	fd=os.fdopen(pr,"r")
-	mydata=fd.readlines()
-	fd.close()
-	if emulate_gso:
-			mydata=string.join(mydata)
-			if len(mydata) and mydata[-1] == "\n":
-					mydata=mydata[:-1]
-	retval=os.waitpid(mypid[0],0)[1]
-	cleanup(mypid)
-	if raw_exit_code:
-			return [retval,mydata]
-	retval=process_exit_code(retval)
-	return [retval, mydata]
-
-
-# base spawn function
-def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
-	 uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
-	 selinux_context=None, raise_signals=False, func_call=False):
-	"""base fork/execve function.
-	mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
-	environment, use the appropriate spawn call.  This is a straight fork/exec code path.
-	Can either have a tuple, or a string passed in.  If uid/gid/groups/umask specified, it changes
-	the forked process to said value.  If path_lookup is on, a non-absolute command will be converted
-	to an absolute command, otherwise it returns None.
-
-	selinux_context is the desired context, dependant on selinux being available.
-	opt_name controls the name the processor goes by.
-	fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
-	current fd's raw fd #, desired #.
-
-	func_call is a boolean for specifying to execute a python function- use spawn_func instead.
-	raise_signals is questionable.  Basically throw an exception if signal'd.  No exception is thrown
-	if raw_input is on.
-
-	logfile overloads the specified fd's to write to a tee process which logs to logfile
-	returnpid returns the relevant pids (a list, including the logging process if logfile is on).
-
-	non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
-	raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
-
-	myc=''
-	if not func_call:
-		if type(mycommand)==types.StringType:
-			mycommand=mycommand.split()
-		myc = mycommand[0]
-		if not os.access(myc, os.X_OK):
-			if not path_lookup:
-				return None
-			myc = find_binary(myc)
-			if myc == None:
-				return None
-	mypid=[]
-	if logfile:
-		pr,pw=os.pipe()
-		mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
-		retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
-		if retval != 0:
-			# he's dead jim.
-			if raw_exit_code:
-				return retval
-			return process_exit_code(retval)
-
-		if fd_pipes == None:
-			fd_pipes={}
-			fd_pipes[0] = 0
-		fd_pipes[1]=pw
-		fd_pipes[2]=pw
-
-	if not opt_name:
-		opt_name = mycommand[0]
-	myargs=[opt_name]
-	myargs.extend(mycommand[1:])
-	global spawned_pids
-	mypid.append(os.fork())
-	if mypid[-1] != 0:
-		#log the bugger.
-		spawned_pids.extend(mypid)
-
-	if mypid[-1] == 0:
-		if func_call:
-			spawned_pids = []
-
-		# this may look ugly, but basically it moves file descriptors around to ensure no
-		# handles that are needed are accidentally closed during the final dup2 calls.
-		trg_fd=[]
-		if type(fd_pipes)==types.DictType:
-			src_fd=[]
-			k=fd_pipes.keys()
-			k.sort()
-
-			#build list of which fds will be where, and where they are at currently
-			for x in k:
-				trg_fd.append(x)
-				src_fd.append(fd_pipes[x])
-
-			# run through said list dup'ing descriptors so that they won't be waxed
-			# by other dup calls.
-			for x in range(0,len(trg_fd)):
-				if trg_fd[x] == src_fd[x]:
-					continue
-				if trg_fd[x] in src_fd[x+1:]:
-					os.close(trg_fd[x])
-
-			# transfer the fds to their final pre-exec position.
-			for x in range(0,len(trg_fd)):
-				if trg_fd[x] != src_fd[x]:
-					os.dup2(src_fd[x], trg_fd[x])
-		else:
-			trg_fd=[0,1,2]
-
-		# wax all open descriptors that weren't requested be left open.
-		for x in range(0,max_fd_limit):
-			if x not in trg_fd:
-				try:
-					os.close(x)
-				except SystemExit, e:
-					raise
-				except:
-					pass
-
-		# note this order must be preserved- can't change gid/groups if you change uid first.
-		if selinux_capable and selinux_context:
-			import selinux
-			selinux.setexec(selinux_context)
-		if gid:
-			os.setgid(gid)
-		if groups:
-			os.setgroups(groups)
-		if uid:
-			os.setuid(uid)
-		if umask:
-			os.umask(umask)
-		else:
-			os.umask(022)
-
-		try:
-			#print "execing", myc, myargs
-			if func_call:
-				# either use a passed in func for interpretting the results, or return if no exception.
-				# note the passed in list, and dict are expanded.
-				if len(mycommand) == 4:
-					os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
-				try:
-					mycommand[0](*mycommand[1],**mycommand[2])
-				except Exception,e:
-					print "caught exception",e," in forked func",mycommand[0]
-				sys.exit(0)
-
-			os.execve(myc,myargs,env)
-		except SystemExit, e:
-			raise
-		except Exception, e:
-			if not func_call:
-				raise str(e)+":\n   "+myc+" "+string.join(myargs)
-			print "func call failed"
-
-		# If the execve fails, we need to report it, and exit
-		# *carefully* --- report error here
-		os._exit(1)
-		sys.exit(1)
-		return # should never get reached
-
-	# if we were logging, kill the pipes.
-	if logfile:
-			os.close(pr)
-			os.close(pw)
-
-	if returnpid:
-			return mypid
-
-	# loop through pids (typically one, unless logging), either waiting on their death, or waxing them
-	# if the main pid (mycommand) returned badly.
-	while len(mypid):
-		retval=os.waitpid(mypid[-1],0)[1]
-		if retval != 0:
-			cleanup(mypid[0:-1],block_exceptions=False)
-			# at this point we've killed all other kid pids generated via this call.
-			# return now.
-			if raw_exit_code:
-				return retval
-			return process_exit_code(retval,throw_signals=raise_signals)
-		else:
-			mypid.pop(-1)
-	cleanup(mypid)
-	return 0
-
-
-def cmd(mycmd,myexc="",env={}):
+def cmd(mycmd, myexc="", env={}, debug=False):
 	try:
 		sys.stdout.flush()
-		retval=spawn_bash(mycmd,env)
-		if retval != 0:
-			raise CatalystError,myexc
+		args=[BASH_BINARY]
+		if "BASH_ENV" not in env:
+			env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
+		if debug:
+			args.append("-x")
+		args.append("-c")
+		args.append(mycmd)
+
+		if debug:
+			print "cmd(); args =", args
+		proc = Popen(args, env=env)
+		if proc.wait() != 0:
+			raise CatalystError("cmd() NON-zero return value from: %s" % myexc,
+				print_traceback=False)
 	except:
 		raise
 
 
-def process_exit_code(retval,throw_signals=False):
-	"""process a waitpid returned exit code, returning exit code if it exit'd, or the
-	signal if it died from signalling
-	if throw_signals is on, it raises a SystemExit if the process was signaled.
-	This is intended for usage with threads, although at the moment you can't signal individual
-	threads in python, only the master thread, so it's a questionable option."""
-	if (retval & 0xff)==0:
-		return retval >> 8 # return exit code
-	else:
-		if throw_signals:
-			#use systemexit, since portage is stupid about exception catching.
-			raise SystemExit()
-		return (retval & 0xff) << 8 # interrupted by signal
-
-
 def file_locate(settings,filelist,expand=1):
 	#if expand=1, non-absolute paths will be accepted and
 	# expanded to os.getcwd()+"/"+localpath if file exists
@@ -459,8 +200,8 @@ def parse_makeconf(mylines):
 			mobj=pat.match(myline)
 			pos += 1
 			if mobj.group(2):
-			    clean_string = re.sub(r"\"",r"",mobj.group(2))
-			    mymakeconf[mobj.group(1)]=clean_string
+				clean_string = re.sub(r"\"",r"",mobj.group(2))
+				mymakeconf[mobj.group(1)]=clean_string
 	return mymakeconf
 
 

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index c6b8dcc..296eee3 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -1136,7 +1136,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 		else:
 			if "fsscript" in self.settings:
 				if os.path.exists(self.settings["controller_file"]):
-					cmd("/bin/bash "+self.settings["controller_file"]+\
+					cmd(self.settings["controller_file"]+\
 						" fsscript","fsscript script failed.",env=self.env)
 					touch(fsscript_resume)
 
@@ -1147,7 +1147,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 			print "Resume point detected, skipping rcupdate operation..."
 		else:
 			if os.path.exists(self.settings["controller_file"]):
-				cmd("/bin/bash "+self.settings["controller_file"]+" rc-update",\
+				cmd(self.settings["controller_file"]+" rc-update",\
 					"rc-update script failed.",env=self.env)
 				touch(rcupdate_resume)
 
@@ -1183,7 +1183,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 				"Could not remove stray files in /etc",env=self.env)
 
 		if os.path.exists(self.settings["controller_file"]):
-			cmd("/bin/bash "+self.settings["controller_file"]+" clean",\
+			cmd(self.settings["controller_file"]+" clean",\
 				"clean script failed.",env=self.env)
 			touch(clean_resume)
 
@@ -1232,7 +1232,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 					os.system("rm -rf "+self.settings["chroot_path"]+x)
 				try:
 					if os.path.exists(self.settings["controller_file"]):
-						cmd("/bin/bash "+self.settings["controller_file"]+\
+						cmd(self.settings["controller_file"]+\
 							" clean","Clean  failed.",env=self.env)
 						touch(remove_resume)
 				except:
@@ -1247,7 +1247,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 		else:
 			try:
 				if os.path.exists(self.settings["controller_file"]):
-					cmd("/bin/bash "+self.settings["controller_file"]+\
+					cmd(self.settings["controller_file"]+\
 						" preclean","preclean script failed.",env=self.env)
 					touch(preclean_resume)
 
@@ -1289,7 +1289,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 		else:
 			try:
 				if os.path.exists(self.settings["controller_file"]):
-					cmd("/bin/bash "+self.settings["controller_file"]+" run",\
+					cmd(self.settings["controller_file"]+" run",\
 						"run script failed.",env=self.env)
 					touch(run_local_resume)
 
@@ -1388,7 +1388,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 
 				""" Before cleaning, unmerge stuff """
 				try:
-					cmd("/bin/bash "+self.settings["controller_file"]+\
+					cmd(self.settings["controller_file"]+\
 						" unmerge "+ myunmerge,"Unmerge script failed.",\
 						env=self.env)
 					print "unmerge shell script"
@@ -1405,7 +1405,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 			print "Resume point detected, skipping target_setup operation..."
 		else:
 			print "Setting up filesystems per filesystem type"
-			cmd("/bin/bash "+self.settings["controller_file"]+\
+			cmd(self.settings["controller_file"]+\
 				" target_image_setup "+ self.settings["target_path"],\
 				"target_image_setup script failed.",env=self.env)
 			touch(target_setup_resume)
@@ -1434,7 +1434,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 		else:
 			""" Create the ISO """
 			if "iso" in self.settings:
-				cmd("/bin/bash "+self.settings["controller_file"]+" iso "+\
+				cmd(self.settings["controller_file"]+" iso "+\
 					self.settings["iso"],"ISO creation script failed.",\
 					env=self.env)
 				self.gen_contents_file(self.settings["iso"])
@@ -1461,7 +1461,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 						list_bashify(self.settings[self.settings["spec_prefix"]\
 						+"/packages"])
 					try:
-						cmd("/bin/bash "+self.settings["controller_file"]+\
+						cmd(self.settings["controller_file"]+\
 							" build_packages "+mypack,\
 							"Error in attempt to build packages",env=self.env)
 						touch(build_packages_resume)
@@ -1486,7 +1486,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 					"""
 					Execute the script that sets up the kernel build environment
 					"""
-					cmd("/bin/bash "+self.settings["controller_file"]+\
+					cmd(self.settings["controller_file"]+\
 						" pre-kmerge ","Runscript pre-kmerge failed",\
 						env=self.env)
 					for kname in mynames:
@@ -1603,7 +1603,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 			print "Resume point detected, skipping bootloader operation..."
 		else:
 			try:
-				cmd("/bin/bash "+self.settings["controller_file"]+\
+				cmd(self.settings["controller_file"]+\
 					" bootloader " + self.settings["target_path"],\
 					"Bootloader script failed.",env=self.env)
 				touch(bootloader_resume)
@@ -1619,7 +1619,7 @@ class generic_stage_target(TargetBase, ClearBase, GenBase):
 			print "Resume point detected, skipping build_packages operation..."
 		else:
 			try:
-				cmd("/bin/bash "+self.settings["controller_file"]+\
+				cmd(self.settings["controller_file"]+\
 					" livecd-update","livecd-update failed.",env=self.env)
 				touch(livecd_update_resume)
 

diff --git a/catalyst/targets/grp_target.py b/catalyst/targets/grp_target.py
index a8309a8..033db75 100644
--- a/catalyst/targets/grp_target.py
+++ b/catalyst/targets/grp_target.py
@@ -54,7 +54,7 @@ class grp_target(generic_stage_target):
 			# example call: "grp.sh run pkgset cd1 xmms vim sys-apps/gleep"
 			mypackages=list_bashify(self.settings["grp/"+pkgset+"/packages"])
 			try:
-				cmd("/bin/bash "+self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
+				cmd(self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
 					+" "+pkgset+" "+mypackages,env=self.env)
 
 			except CatalystError:

diff --git a/catalyst/targets/netboot2_target.py b/catalyst/targets/netboot2_target.py
index 8809dd0..ea07d76 100644
--- a/catalyst/targets/netboot2_target.py
+++ b/catalyst/targets/netboot2_target.py
@@ -87,7 +87,7 @@ class netboot2_target(generic_stage_target):
 					myfiles.append(self.settings["netboot2/extra_files"])
 
 			try:
-				cmd("/bin/bash "+self.settings["controller_file"]+\
+				cmd(self.settings["controller_file"]+\
 					" image " + list_bashify(myfiles),env=self.env)
 			except CatalystError:
 				self.unbind()
@@ -112,7 +112,7 @@ class netboot2_target(generic_stage_target):
 		# no auto resume here as we always want the
 		# freshest images moved
 		try:
-			cmd("/bin/bash "+self.settings["controller_file"]+\
+			cmd(self.settings["controller_file"]+\
 				" final",env=self.env)
 			print ">>> Netboot Build Finished!"
 		except CatalystError:

diff --git a/catalyst/targets/netboot_target.py b/catalyst/targets/netboot_target.py
index 9d01b7e..ae1eb04 100644
--- a/catalyst/targets/netboot_target.py
+++ b/catalyst/targets/netboot_target.py
@@ -59,7 +59,7 @@ class netboot_target(generic_stage_target):
 #		if "netboot/packages" in self.settings:
 #			mypack=list_bashify(self.settings["netboot/packages"])
 #		try:
-#			cmd("/bin/bash "+self.settings["controller_file"]+" packages "+mypack,env=self.env)
+#			cmd(self.settings["controller_file"]+" packages "+mypack,env=self.env)
 #		except CatalystError:
 #			self.unbind()
 #			raise CatalystError,"netboot build aborting due to error."
@@ -71,7 +71,7 @@ class netboot_target(generic_stage_target):
 		else:
 			mycmd = ""
 		try:
-			cmd("/bin/bash "+self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
+			cmd(self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
 		except CatalystError:
 			self.unbind()
 			raise CatalystError,"netboot build aborting due to error."
@@ -99,7 +99,7 @@ class netboot_target(generic_stage_target):
 				myfiles.append(self.settings["netboot/extra_files"])
 
 		try:
-			cmd("/bin/bash "+self.settings["controller_file"]+\
+			cmd(self.settings["controller_file"]+\
 				" image " + list_bashify(myfiles),env=self.env)
 		except CatalystError:
 			self.unbind()
@@ -108,7 +108,7 @@ class netboot_target(generic_stage_target):
 	def create_netboot_files(self):
 		# finish it all up
 		try:
-			cmd("/bin/bash "+self.settings["controller_file"]+" finish",env=self.env)
+			cmd(self.settings["controller_file"]+" finish",env=self.env)
 		except CatalystError:
 			self.unbind()
 			raise CatalystError,"netboot build aborting due to error."

diff --git a/catalyst/targets/tinderbox_target.py b/catalyst/targets/tinderbox_target.py
index 1d31989..ea11d3f 100644
--- a/catalyst/targets/tinderbox_target.py
+++ b/catalyst/targets/tinderbox_target.py
@@ -21,7 +21,7 @@ class tinderbox_target(generic_stage_target):
 		# example call: "grp.sh run xmms vim sys-apps/gleep"
 		try:
 			if os.path.exists(self.settings["controller_file"]):
-			    cmd("/bin/bash "+self.settings["controller_file"]+" run "+\
+			    cmd(self.settings["controller_file"]+" run "+\
 				list_bashify(self.settings["tinderbox/packages"]),"run script failed.",env=self.env)
 
 		except CatalystError:


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:pending commit in: catalyst/, catalyst/targets/
@ 2015-01-01  5:59 Brian Dolbec
  2015-02-26  4:12 ` [gentoo-commits] proj/catalyst:master " Brian Dolbec
  0 siblings, 1 reply; 8+ messages in thread
From: Brian Dolbec @ 2015-01-01  5:59 UTC (permalink / raw
  To: gentoo-commits

commit:     e3114e65ffdaaefe84dfe9a7678a7333280bd37f
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Sun Jan 20 01:28:51 2013 +0000
Commit:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
CommitDate: Thu Jan  1 05:58:05 2015 +0000
URL:        http://sources.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=e3114e65

Some options cleanup, unifying their use, reducing redundancy.

fix options being reset by a config file

---
 catalyst/defaults.py                     |  22 ++++-
 catalyst/main.py                         |  96 ++++++---------------
 catalyst/targets/generic_stage_target.py | 142 ++++++++++++++++++-------------
 catalyst/targets/grp_target.py           |   2 +-
 catalyst/targets/livecd_stage1_target.py |   2 +-
 catalyst/targets/livecd_stage2_target.py |   8 +-
 catalyst/targets/netboot2_target.py      |  10 +--
 catalyst/targets/snapshot_target.py      |   4 +-
 catalyst/targets/stage2_target.py        |   2 +-
 catalyst/targets/stage4_target.py        |   4 +-
 10 files changed, 142 insertions(+), 150 deletions(-)

diff --git a/catalyst/defaults.py b/catalyst/defaults.py
index b83e4f5..b36eff7 100644
--- a/catalyst/defaults.py
+++ b/catalyst/defaults.py
@@ -13,10 +13,9 @@ valid_build_targets = ["stage1_target", "stage2_target", "stage3_target",
 required_config_file_values = ["storedir", "sharedir", "distdir", "portdir"]
 
 valid_config_file_values = required_config_file_values[:]
-valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC",
-	"ICECREAM", "ENVSCRIPT", "AUTORESUME", "FETCH", "CLEAR_AUTORESUME",
-	"options", "DEBUG", "VERBOSE", "PURGE", "PURGEONLY", "SNAPCACHE",
-	"snapshot_cache", "hash_function", "digests", "contents", "SEEDCACHE"
+valid_config_file_values.extend([ "distcc", "envscript",
+	"options", "DEBUG", "VERBOSE",
+	"snapshot_cache", "hash_function", "digests", "contents"
 	])
 
 verbosity = 1
@@ -65,3 +64,18 @@ SOURCE_MOUNT_DEFAULTS = {
 	"shm": "shmfs",
 	}
 
+# legend:  key: message
+option_messages = {
+	"autoresume": "Autoresuming support enabled.",
+	"ccache": "Compiler cache support enabled.",
+	"clear-autoresume": "Cleaning autoresume flags support enabled.",
+	#"compress": "Compression enabled.",
+	"distcc": "Distcc support enabled.",
+	"icecream": "Icecream compiler cluster support enabled.",
+	"kerncache": "Kernel cache support enabled.",
+	"pkgcache": "Package cache support enabled.",
+	"purge": "Purge support enabled.",
+	"seedcache": "Seed cache support enabled.",
+	"snapcache": "Snapshot cache support enabled.",
+	#"tarball": "Tarball creation enabled.",
+	}

diff --git a/catalyst/main.py b/catalyst/main.py
index 5748d31..81d48b5 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -22,13 +22,12 @@ from . import __version__
 import catalyst.config
 import catalyst.util
 from catalyst.support import CatalystError, find_binary, LockInUse
-from catalyst.defaults import (required_build_targets, valid_build_targets,
-	confdefaults)
+from catalyst.defaults import (confdefaults, option_messages,
+	required_build_targets, valid_build_targets)
 from hash_utils import HashMap, HASH_DEFINITIONS
 from contents import ContentsMap, CONTENTS_DEFINITIONS
 
 
-
 conf_values={}
 
 def usage():
@@ -106,7 +105,10 @@ def parse_config(myconfig):
 	for x in list(confdefaults):
 		if x in myconf:
 			print "Setting",x,"to config file value \""+myconf[x]+"\""
-			conf_values[x]=myconf[x]
+			if x == 'options':
+				conf_values[x] = set(myconf[x].split())
+			else:
+				conf_values[x]=myconf[x]
 		else:
 			print "Setting",x,"to default value \""+confdefaults[x]+"\""
 			conf_values[x]=confdefaults[x]
@@ -114,74 +116,23 @@ def parse_config(myconfig):
 	# add our python base directory to use for loading target arch's
 	conf_values["PythonDir"] = __selfpath__
 
-	# parse out the rest of the options from the config file
-	if "autoresume" in string.split(conf_values["options"]):
-		print "Autoresuming support enabled."
-		conf_values["AUTORESUME"]="1"
-
-	if "bindist" in string.split(conf_values["options"]):
-		print "Binary redistribution enabled"
-		conf_values["BINDIST"]="1"
-	else:
-		print "Bindist is not enabled in catalyst.conf"
-		print "Binary redistribution of generated stages/isos may be prohibited by law."
-		print "Please see the use description for bindist on any package you are including."
-
-	if "ccache" in string.split(conf_values["options"]):
-		print "Compiler cache support enabled."
-		conf_values["CCACHE"]="1"
-
-	if "clear-autoresume" in string.split(conf_values["options"]):
-		print "Cleaning autoresume flags support enabled."
-		conf_values["CLEAR_AUTORESUME"]="1"
-
-	if "distcc" in string.split(conf_values["options"]):
-		print "Distcc support enabled."
-		conf_values["DISTCC"]="1"
-
-	if "icecream" in string.split(conf_values["options"]):
-		print "Icecream compiler cluster support enabled."
-		conf_values["ICECREAM"]="1"
-
-	if "kerncache" in string.split(conf_values["options"]):
-		print "Kernel cache support enabled."
-		conf_values["KERNCACHE"]="1"
 
-	if "pkgcache" in string.split(conf_values["options"]):
-		print "Package cache support enabled."
-		conf_values["PKGCACHE"]="1"
+	# print out any options messages
+	for opt in conf_values['options']:
+		if opt in option_messages:
+			print option_messages[opt]
 
-	if "preserve_libs" in string.split(conf_values["options"]):
-		print "Preserving libs during unmerge."
-		conf_values["PRESERVE_LIBS"]="1"
+	for key in ["digests", "envscript", "var_tmpfs_portage", "port_logdir"]:
+		if key in myconf:
+			conf_values[key] = myconf[key]
 
-	if "purge" in string.split(conf_values["options"]):
-		print "Purge support enabled."
-		conf_values["PURGE"]="1"
-
-	if "seedcache" in string.split(conf_values["options"]):
-		print "Seed cache support enabled."
-		conf_values["SEEDCACHE"]="1"
-
-	if "snapcache" in string.split(conf_values["options"]):
-		print "Snapshot cache support enabled."
-		conf_values["SNAPCACHE"]="1"
-
-	if "digests" in myconf:
-		conf_values["digests"]=myconf["digests"]
 	if "contents" in myconf:
 		# replace '-' with '_' (for compatibility with existing configs)
 		conf_values["contents"] = myconf["contents"].replace("-", '_')
 
 	if "envscript" in myconf:
 		print "Envscript support enabled."
-		conf_values["ENVSCRIPT"]=myconf["envscript"]
-
-	if "var_tmpfs_portage" in myconf:
-		conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
 
-	if "port_logdir" in myconf:
-		conf_values["port_logdir"]=myconf["port_logdir"];
 
 def import_modules():
 	# import catalyst's own modules
@@ -272,6 +223,8 @@ def main():
 		usage()
 		sys.exit(2)
 
+	options = set()
+
 	run = False
 	for o, a in opts:
 		if o in ("-h", "--help"):
@@ -283,8 +236,8 @@ def main():
 			sys.exit(1)
 
 		if o in ("-d", "--debug"):
-			conf_values["DEBUG"]="1"
-			conf_values["VERBOSE"]="1"
+			conf_values["DEBUG"] = True
+			conf_values["VERBOSE"] = True
 
 		if o in ("-c", "--config"):
 			myconfig=a
@@ -301,7 +254,7 @@ def main():
 			myspecfile=a
 
 		if o in ("-F", "--fetchonly"):
-			conf_values["FETCH"]="1"
+			options.add("fetch")
 
 		if o in ("-v", "--verbose"):
 			conf_values["VERBOSE"]="1"
@@ -317,16 +270,18 @@ def main():
 				mycmdline.append("version_stamp="+a)
 
 		if o in ("-p", "--purge"):
-			conf_values["PURGE"] = "1"
+			options.add("purge")
 
 		if o in ("-P", "--purgeonly"):
-			conf_values["PURGEONLY"] = "1"
+			options.add("purgeonly")
 
 		if o in ("-T", "--purgetmponly"):
-			conf_values["PURGETMPONLY"] = "1"
+			options.add("purgetmponly")
 
 		if o in ("-a", "--clear-autoresume"):
-			conf_values["CLEAR_AUTORESUME"] = "1"
+			options.add("clear-autoresume")
+
+	#print "MAIN: cli options =", options
 
 	if not run:
 		print "!!! catalyst: please specify one of either -f or -C\n"
@@ -336,6 +291,9 @@ def main():
 	# import configuration file and import our main module using those settings
 	parse_config(myconfig)
 
+	conf_values["options"].update(options)
+	#print "MAIN: conf_values['options'] =", conf_values["options"]
+
 	# initialize our contents generator
 	contents_map = ContentsMap(CONTENTS_DEFINITIONS)
 	conf_values["contents_map"] = contents_map

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index 1f26e65..0b506e2 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -189,7 +189,7 @@ class generic_stage_target(generic_target):
 		self.mountmap = SOURCE_MOUNT_DEFAULTS.copy()
 		# update them from settings
 		self.mountmap["distdir"] = self.settings["distdir"]
-		if "SNAPCACHE" not in self.settings:
+		if "snapcache" not in self.settings["options"]:
 			self.mounts.remove("portdir")
 			self.mountmap["portdir"] = None
 		else:
@@ -207,21 +207,21 @@ class generic_stage_target(generic_target):
 		Configure any user specified options (either in catalyst.conf or on
 		the command line).
 		"""
-		if "PKGCACHE" in self.settings:
+		if "pkgcache" in self.settings["options"]:
 			self.set_pkgcache_path()
 			print "Location of the package cache is "+\
 				self.settings["pkgcache_path"]
 			self.mounts.append("packagedir")
 			self.mountmap["packagedir"] = self.settings["pkgcache_path"]
 
-		if "KERNCACHE" in self.settings:
+		if "kerncache" in self.settings["options"]:
 			self.set_kerncache_path()
 			print "Location of the kerncache is "+\
 				self.settings["kerncache_path"]
 			self.mounts.append("kerncache")
 			self.mountmap["kerncache"] = self.settings["kerncache_path"]
 
-		if "CCACHE" in self.settings:
+		if "ccache" in self.settings["options"]:
 			if "CCACHE_DIR" in os.environ:
 				ccdir=os.environ["CCACHE_DIR"]
 				del os.environ["CCACHE_DIR"]
@@ -236,7 +236,7 @@ class generic_stage_target(generic_target):
 			""" for the chroot: """
 			self.env["CCACHE_DIR"] = self.target_mounts["ccache"]
 
-		if "ICECREAM" in self.settings:
+		if "icecream" in self.settings["options"]:
 			self.mounts.append("icecream")
 			self.mountmap["icecream"] = self.settings["icecream"]
 			self.env["PATH"] = self.target_mounts["icecream"] + ":" + \
@@ -314,7 +314,7 @@ class generic_stage_target(generic_target):
 			"/builds/"+self.settings["target_subpath"]+".tar.bz2")
 		setup_target_path_resume = pjoin(self.settings["autoresume_path"],
 			"setup_target_path")
-		if "AUTORESUME" in self.settings and \
+		if "autoresume" in self.settings["options"] and \
 				os.path.exists(setup_target_path_resume):
 			print \
 				"Resume point detected, skipping target path setup operation..."
@@ -391,7 +391,7 @@ class generic_stage_target(generic_target):
 				del self.settings[self.settings["spec_prefix"]+"/fsops"]
 
 	def set_source_path(self):
-		if "SEEDCACHE" in self.settings\
+		if "seedcache" in self.settings["options"]\
 			and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
 				self.settings["source_subpath"]+"/")):
 			self.settings["source_path"]=normpath(self.settings["storedir"]+\
@@ -450,7 +450,7 @@ class generic_stage_target(generic_target):
 						verbose = False)
 
 	def set_snapcache_path(self):
-		if "SNAPCACHE" in self.settings:
+		if "snapcache" in self.settings["options"]:
 			self.settings["snapshot_cache_path"] = \
 				normpath(self.settings["snapshot_cache"] + "/" +
 					self.settings["snapshot"])
@@ -474,7 +474,7 @@ class generic_stage_target(generic_target):
 			%(self.settings["target"], self.settings["subarch"],
 				self.settings["version_stamp"])
 			))
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			print "The autoresume path is " + self.settings["autoresume_path"]
 		if not os.path.exists(self.settings["autoresume_path"]):
 			os.makedirs(self.settings["autoresume_path"],0755)
@@ -501,8 +501,8 @@ class generic_stage_target(generic_target):
 				"base_dirs","bind","chroot_setup","setup_environment",\
 				"run_local","preclean","unbind","clean"]
 #		if "TARBALL" in self.settings or \
-#			"FETCH" not in self.settings:
-		if "FETCH" not in self.settings:
+#			"fetch" not in self.settings["options"]:
+		if "fetch" not in self.settings["options"]:
 			self.settings["action_sequence"].append("capture")
 		self.settings["action_sequence"].append("clear_autoresume")
 
@@ -662,7 +662,7 @@ class generic_stage_target(generic_target):
 		unpack_resume = pjoin(self.settings["autoresume_path"], "unpack")
 		clst_unpack_hash=read_from_clst(unpack_resume)
 
-		if "SEEDCACHE" in self.settings:
+		if "seedcache" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]):
 				""" SEEDCACHE Is a directory, use rsync """
 				unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
@@ -704,7 +704,7 @@ class generic_stage_target(generic_target):
 			error_msg="Tarball extraction of "+self.settings["source_path"]+\
 				" to "+self.settings["chroot_path"]+" failed."
 
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]) \
 				and os.path.exists(unpack_resume):
 				""" Autoresume is valid, SEEDCACHE is valid """
@@ -730,7 +730,7 @@ class generic_stage_target(generic_target):
 				invalid_snapshot=True
 		else:
 			""" No autoresume, SEEDCACHE """
-			if "SEEDCACHE" in self.settings:
+			if "seedcache" in self.settings["options"]:
 				""" SEEDCACHE so let's run rsync and let it clean up """
 				if os.path.isdir(self.settings["source_path"]):
 					unpack=True
@@ -754,7 +754,7 @@ class generic_stage_target(generic_target):
 			self.mount_safety_check()
 
 			if invalid_snapshot:
-				if "AUTORESUME" in self.settings:
+				if "autoresume" in self.settings["options"]:
 					print "No Valid Resume point detected, cleaning up..."
 
 				self.clear_autoresume()
@@ -766,11 +766,11 @@ class generic_stage_target(generic_target):
 			if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
 				os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
 
-			if "PKGCACHE" in self.settings:
+			if "pkgcache" in self.settings["options"]:
 				if not os.path.exists(self.settings["pkgcache_path"]):
 					os.makedirs(self.settings["pkgcache_path"],0755)
 
-			if "KERNCACHE" in self.settings:
+			if "kerncache" in self.settings["options"]:
 				if not os.path.exists(self.settings["kerncache_path"]):
 					os.makedirs(self.settings["kerncache_path"],0755)
 
@@ -792,7 +792,7 @@ class generic_stage_target(generic_target):
 			"unpack_portage")
 		snapshot_hash=read_from_clst(unpack_portage_resume)
 
-		if "SNAPCACHE" in self.settings:
+		if "snapcache" in self.settings["options"]:
 			snapshot_cache_hash=\
 				read_from_clst(self.settings["snapshot_cache_path"] + "/" +
 					"catalyst-hash")
@@ -824,7 +824,7 @@ class generic_stage_target(generic_target):
 					self.settings["chroot_path"]+"/usr"
 			unpack_errmsg="Error unpacking snapshot"
 
-			if "AUTORESUME" in self.settings \
+			if "autoresume" in self.settings["options"] \
 				and os.path.exists(self.settings["chroot_path"]+\
 					self.settings["portdir"]) \
 				and os.path.exists(unpack_portage_resume) \
@@ -834,7 +834,7 @@ class generic_stage_target(generic_target):
 					unpack=False
 
 		if unpack:
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				self.snapshot_lock_object.write_lock()
 			if os.path.exists(destdir):
 				print cleanup_msg
@@ -846,7 +846,7 @@ class generic_stage_target(generic_target):
 			print "Unpacking portage tree (This can take a long time) ..."
 			cmd(unpack_cmd,unpack_errmsg,env=self.env)
 
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				myf=open(self.settings["snapshot_cache_path"] +
 					"/" + "catalyst-hash","w")
 				myf.write(self.settings["snapshot_path_hash"])
@@ -857,13 +857,13 @@ class generic_stage_target(generic_target):
 				myf.write(self.settings["snapshot_path_hash"])
 				myf.close()
 
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				self.snapshot_lock_object.unlock()
 
 	def config_profile_link(self):
 		config_protect_link_resume = pjoin(self.settings["autoresume_path"],
 			"config_profile_link")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(config_protect_link_resume):
 			print \
 				"Resume point detected, skipping config_profile_link operation..."
@@ -883,7 +883,7 @@ class generic_stage_target(generic_target):
 	def setup_confdir(self):
 		setup_confdir_resume = pjoin(self.settings["autoresume_path"],
 			"setup_confdir")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(setup_confdir_resume):
 			print "Resume point detected, skipping setup_confdir operation..."
 		else:
@@ -935,7 +935,7 @@ class generic_stage_target(generic_target):
 
 			src=self.mountmap[x]
 			#print "bind(); src =", src
-			if "SNAPCACHE" in self.settings and x == "portdir":
+			if "snapcache" in self.settings["options"] and x == "portdir":
 				self.snapshot_lock_object.read_lock()
 			if os.uname()[0] == "FreeBSD":
 				if src == "/dev":
@@ -989,7 +989,7 @@ class generic_stage_target(generic_target):
 					ouch=1
 					warn("Couldn't umount bind mount: " + target)
 
-			if "SNAPCACHE" in self.settings and x == "/usr/portage":
+			if "snapcache" in self.settings["options"] and x == "/usr/portage":
 				try:
 					"""
 					It's possible the snapshot lock object isn't created yet.
@@ -1018,7 +1018,8 @@ class generic_stage_target(generic_target):
 		self.override_ldflags()
 		chroot_setup_resume = pjoin(self.settings["autoresume_path"],
 			"chroot_setup")
-		if "AUTORESUME" in self.settings and os.path.exists(chroot_setup_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(chroot_setup_resume):
 			print "Resume point detected, skipping chroot_setup operation..."
 		else:
 			print "Setting up chroot..."
@@ -1029,10 +1030,10 @@ class generic_stage_target(generic_target):
 				"Could not copy resolv.conf into place.",env=self.env)
 
 			""" Copy over the envscript, if applicable """
-			if "ENVSCRIPT" in self.settings:
-				if not os.path.exists(self.settings["ENVSCRIPT"]):
+			if "envscript" in self.settings:
+				if not os.path.exists(self.settings["envscript"]):
 					raise CatalystError,\
-						"Can't find envscript "+self.settings["ENVSCRIPT"]
+						"Can't find envscript "+self.settings["envscript"]
 
 				print "\nWarning!!!!"
 				print "\tOverriding certain env variables may cause catastrophic failure."
@@ -1042,7 +1043,7 @@ class generic_stage_target(generic_target):
 				print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
 				print "\tYou have been warned\n"
 
-				cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
+				cmd("cp "+self.settings["envscript"]+" "+\
 					self.settings["chroot_path"]+"/tmp/envscript",\
 					"Could not copy envscript into place.",env=self.env)
 
@@ -1118,7 +1119,8 @@ class generic_stage_target(generic_target):
 
 	def fsscript(self):
 		fsscript_resume = pjoin(self.settings["autoresume_path"], "fsscript")
-		if "AUTORESUME" in self.settings and os.path.exists(fsscript_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(fsscript_resume):
 			print "Resume point detected, skipping fsscript operation..."
 		else:
 			if "fsscript" in self.settings:
@@ -1129,7 +1131,7 @@ class generic_stage_target(generic_target):
 
 	def rcupdate(self):
 		rcupdate_resume = pjoin(self.settings["autoresume_path"], "rcupdate")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(rcupdate_resume):
 			print "Resume point detected, skipping rcupdate operation..."
 		else:
@@ -1140,7 +1142,7 @@ class generic_stage_target(generic_target):
 
 	def clean(self):
 		clean_resume = pjoin(self.settings["autoresume_path"], "clean")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(clean_resume):
 			print "Resume point detected, skipping clean operation..."
 		else:
@@ -1176,7 +1178,8 @@ class generic_stage_target(generic_target):
 
 	def empty(self):
 		empty_resume = pjoin(self.settings["autoresume_path"], "empty")
-		if "AUTORESUME" in self.settings and os.path.exists(empty_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(empty_resume):
 			print "Resume point detected, skipping empty operation..."
 		else:
 			if self.settings["spec_prefix"]+"/empty" in self.settings:
@@ -1204,7 +1207,8 @@ class generic_stage_target(generic_target):
 
 	def remove(self):
 		remove_resume = pjoin(self.settings["autoresume_path"], "remove")
-		if "AUTORESUME" in self.settings and os.path.exists(remove_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(remove_resume):
 			print "Resume point detected, skipping remove operation..."
 		else:
 			if self.settings["spec_prefix"]+"/rm" in self.settings:
@@ -1226,7 +1230,8 @@ class generic_stage_target(generic_target):
 
 	def preclean(self):
 		preclean_resume = pjoin(self.settings["autoresume_path"], "preclean")
-		if "AUTORESUME" in self.settings and os.path.exists(preclean_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(preclean_resume):
 			print "Resume point detected, skipping preclean operation..."
 		else:
 			try:
@@ -1241,7 +1246,8 @@ class generic_stage_target(generic_target):
 
 	def capture(self):
 		capture_resume = pjoin(self.settings["autoresume_path"], "capture")
-		if "AUTORESUME" in self.settings and os.path.exists(capture_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(capture_resume):
 			print "Resume point detected, skipping capture operation..."
 		else:
 			""" Capture target in a tarball """
@@ -1266,7 +1272,8 @@ class generic_stage_target(generic_target):
 
 	def run_local(self):
 		run_local_resume = pjoin(self.settings["autoresume_path"], "run_local")
-		if "AUTORESUME" in self.settings and os.path.exists(run_local_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(run_local_resume):
 			print "Resume point detected, skipping run_local operation..."
 		else:
 			try:
@@ -1285,7 +1292,14 @@ class generic_stage_target(generic_target):
 		fixed. We need this to use the os.system() call since we can't
 		specify our own environ
 		"""
-		for x in self.settings.keys():
+		#print "setup_environment(); settings =", list(self.settings)
+		for x in list(self.settings):
+			#print "setup_environment(); processing:", x
+			if x == "options":
+				#self.env['clst_' + x] = ' '.join(self.settings[x])
+				for opt in self.settings[x]:
+					self.env['clst_' + opt.upper()] = "true"
+				continue
 			""" Sanitize var names by doing "s|/-.|_|g" """
 			varname="clst_"+string.replace(x,"/","_")
 			varname=string.replace(varname,"-","_")
@@ -1314,10 +1328,10 @@ class generic_stage_target(generic_target):
 		""" Check for mounts right away and abort if we cannot unmount them """
 		self.mount_safety_check()
 
-		if "CLEAR_AUTORESUME" in self.settings:
+		if "clear-autoresume" in self.settings["options"]:
 			self.clear_autoresume()
 
-		if "PURGETMPONLY" in self.settings:
+		if "purgetmponly" in self.settings["options"]:
 			self.purge()
 			return
 
@@ -1325,7 +1339,7 @@ class generic_stage_target(generic_target):
 			self.purge()
 			return
 
-		if "PURGE" in self.settings:
+		if "purge" in self.settings["options"]:
 			self.purge()
 
 		for x in self.settings["action_sequence"]:
@@ -1341,7 +1355,8 @@ class generic_stage_target(generic_target):
 
 	def unmerge(self):
 		unmerge_resume = pjoin(self.settings["autoresume_path"], "unmerge")
-		if "AUTORESUME" in self.settings and os.path.exists(unmerge_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(unmerge_resume):
 			print "Resume point detected, skipping unmerge operation..."
 		else:
 			if self.settings["spec_prefix"]+"/unmerge" in self.settings:
@@ -1374,7 +1389,8 @@ class generic_stage_target(generic_target):
 	def target_setup(self):
 		target_setup_resume = pjoin(self.settings["autoresume_path"],
 			"target_setup")
-		if "AUTORESUME" in self.settings and os.path.exists(target_setup_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(target_setup_resume):
 			print "Resume point detected, skipping target_setup operation..."
 		else:
 			print "Setting up filesystems per filesystem type"
@@ -1386,8 +1402,8 @@ class generic_stage_target(generic_target):
 	def setup_overlay(self):
 		setup_overlay_resume = pjoin(self.settings["autoresume_path"],
 			"setup_overlay")
-		if "AUTORESUME" in self.settings and \
-				os.path.exists(setup_overlay_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(setup_overlay_resume):
 			print "Resume point detected, skipping setup_overlay operation..."
 		else:
 			if self.settings["spec_prefix"]+"/overlay" in self.settings:
@@ -1401,7 +1417,8 @@ class generic_stage_target(generic_target):
 
 	def create_iso(self):
 		create_iso_resume = pjoin(self.settings["autoresume_path"], "create_iso")
-		if "AUTORESUME" in self.settings and os.path.exists(create_iso_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(create_iso_resume):
 			print "Resume point detected, skipping create_iso operation..."
 		else:
 			""" Create the ISO """
@@ -1419,12 +1436,12 @@ class generic_stage_target(generic_target):
 	def build_packages(self):
 		build_packages_resume = pjoin(self.settings["autoresume_path"],
 			"build_packages")
-		if "AUTORESUME" in self.settings and \
-				os.path.exists(build_packages_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(build_packages_resume):
 			print "Resume point detected, skipping build_packages operation..."
 		else:
 			if self.settings["spec_prefix"]+"/packages" in self.settings:
-				if "AUTORESUME" in self.settings \
+				if "autoresume" in self.settings["options"] \
 					and os.path.exists(self.settings["autoresume_path"]+\
 						"build_packages"):
 					print "Resume point detected, skipping build_packages operation..."
@@ -1443,10 +1460,11 @@ class generic_stage_target(generic_target):
 							"build aborting due to error."
 
 	def build_kernel(self):
-		"Build all configured kernels"
+		'''Build all configured kernels'''
 		build_kernel_resume = pjoin(self.settings["autoresume_path"],
 			"build_kernel")
-		if "AUTORESUME" in self.settings and os.path.exists(build_kernel_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(build_kernel_resume):
 			print "Resume point detected, skipping build_kernel operation..."
 		else:
 			if "boot/kernel" in self.settings:
@@ -1472,7 +1490,8 @@ class generic_stage_target(generic_target):
 		"Build a single configured kernel by name"
 		kname_resume = pjoin(self.settings["autoresume_path"],
 			"build_kernel_" + kname)
-		if "AUTORESUME" in self.settings and os.path.exists(kname_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(kname_resume):
 			print "Resume point detected, skipping build_kernel for "+kname+" operation..."
 			return
 		self._copy_kernel_config(kname=kname)
@@ -1568,7 +1587,8 @@ class generic_stage_target(generic_target):
 
 	def bootloader(self):
 		bootloader_resume = pjoin(self.settings["autoresume_path"], "bootloader")
-		if "AUTORESUME" in self.settings and os.path.exists(bootloader_resume):
+		if "autoresume" in self.settings["options"] \
+			and os.path.exists(bootloader_resume):
 			print "Resume point detected, skipping bootloader operation..."
 		else:
 			try:
@@ -1583,7 +1603,7 @@ class generic_stage_target(generic_target):
 	def livecd_update(self):
 		livecd_update_resume = pjoin(self.settings["autoresume_path"],
 			"livecd_update")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(livecd_update_resume):
 			print "Resume point detected, skipping build_packages operation..."
 		else:
@@ -1615,7 +1635,7 @@ class generic_stage_target(generic_target):
 			os.chmod(myemp,mystat[ST_MODE])
 
 	def clear_packages(self):
-		if "PKGCACHE" in self.settings:
+		if "pkgcache" in self.settings["options"]:
 			print "purging the pkgcache ..."
 
 			myemp=self.settings["pkgcache_path"]
@@ -1633,7 +1653,7 @@ class generic_stage_target(generic_target):
 				os.chmod(myemp,mystat[ST_MODE])
 
 	def clear_kerncache(self):
-		if "KERNCACHE" in self.settings:
+		if "kerncache" in self.settings["options"]:
 			print "purging the kerncache ..."
 
 			myemp=self.settings["kerncache_path"]
@@ -1652,11 +1672,11 @@ class generic_stage_target(generic_target):
 
 	def clear_autoresume(self):
 		""" Clean resume points since they are no longer needed """
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			print "Removing AutoResume Points: ..."
 		myemp=self.settings["autoresume_path"]
 		if os.path.isdir(myemp):
-				if "AUTORESUME" in self.settings:
+				if "autoresume" in self.settings["options"]:
 					print "Emptying directory",myemp
 				"""
 				stat the dir, delete the dir, recreate the dir and set
@@ -1720,7 +1740,7 @@ class generic_stage_target(generic_target):
 
 	def purge(self):
 		countdown(10,"Purging Caches ...")
-		if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
+		if any(k in self.settings["options"] for k in ("purge","purgeonly","purgetmponly")):
 			print "clearing autoresume ..."
 			self.clear_autoresume()
 

diff --git a/catalyst/targets/grp_target.py b/catalyst/targets/grp_target.py
index 8e70042..a8309a8 100644
--- a/catalyst/targets/grp_target.py
+++ b/catalyst/targets/grp_target.py
@@ -36,7 +36,7 @@ class grp_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 			print "Resume point detected, skipping target path setup operation..."
 		else:

diff --git a/catalyst/targets/livecd_stage1_target.py b/catalyst/targets/livecd_stage1_target.py
index ac846ec..6273c9e 100644
--- a/catalyst/targets/livecd_stage1_target.py
+++ b/catalyst/targets/livecd_stage1_target.py
@@ -25,7 +25,7 @@ class livecd_stage1_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:

diff --git a/catalyst/targets/livecd_stage2_target.py b/catalyst/targets/livecd_stage2_target.py
index e784844..11b1219 100644
--- a/catalyst/targets/livecd_stage2_target.py
+++ b/catalyst/targets/livecd_stage2_target.py
@@ -48,7 +48,7 @@ class livecd_stage2_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -90,7 +90,7 @@ class livecd_stage2_target(generic_stage_target):
 			error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
 			invalid_snapshot=False
 
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]) and \
 				os.path.exists(self.settings["autoresume_path"]+"unpack"):
 				print "Resume point detected, skipping unpack operation..."
@@ -114,7 +114,7 @@ class livecd_stage2_target(generic_stage_target):
 			if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
 				os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
 
-			if "PKGCACHE" in self.settings:
+			if "pkgcache" in self.settings["options"]:
 				if not os.path.exists(self.settings["pkgcache_path"]):
 					os.makedirs(self.settings["pkgcache_path"],0755)
 
@@ -136,7 +136,7 @@ class livecd_stage2_target(generic_stage_target):
 				"config_profile_link","setup_confdir","portage_overlay",\
 				"bind","chroot_setup","setup_environment","run_local",\
 				"build_kernel"]
-		if "FETCH" not in self.settings:
+		if "fetch" not in self.settings["options"]:
 			self.settings["action_sequence"] += ["bootloader","preclean",\
 				"livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
 				"unbind","remove","empty","target_setup",\

diff --git a/catalyst/targets/netboot2_target.py b/catalyst/targets/netboot2_target.py
index 2b3cd20..8809dd0 100644
--- a/catalyst/targets/netboot2_target.py
+++ b/catalyst/targets/netboot2_target.py
@@ -45,7 +45,7 @@ class netboot2_target(generic_stage_target):
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
 			self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -63,7 +63,7 @@ class netboot2_target(generic_stage_target):
 		myfiles=[]
 
 		# check for autoresume point
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -96,7 +96,7 @@ class netboot2_target(generic_stage_target):
 			touch(self.settings["autoresume_path"]+"copy_files_to_image")
 
 	def setup_overlay(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 		and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
 			print "Resume point detected, skipping setup_overlay operation..."
 		else:
@@ -120,7 +120,7 @@ class netboot2_target(generic_stage_target):
 			raise CatalystError,"Failed to move kernel images!"
 
 	def remove(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"remove"):
 			print "Resume point detected, skipping remove operation..."
 		else:
@@ -132,7 +132,7 @@ class netboot2_target(generic_stage_target):
 					os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
 
 	def empty(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"empty"):
 			print "Resume point detected, skipping empty operation..."
 		else:

diff --git a/catalyst/targets/snapshot_target.py b/catalyst/targets/snapshot_target.py
index d1b9e40..50133ec 100644
--- a/catalyst/targets/snapshot_target.py
+++ b/catalyst/targets/snapshot_target.py
@@ -32,11 +32,11 @@ class snapshot_target(generic_stage_target):
 		pass
 
 	def run(self):
-		if "PURGEONLY" in self.settings:
+		if "purgeonly" in self.settings["options"]:
 			self.purge()
 			return
 
-		if "PURGE" in self.settings:
+		if "purge" in self.settings["options"]:
 			self.purge()
 
 		self.setup()

diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py
index 6377f5d..94d4a1e 100644
--- a/catalyst/targets/stage2_target.py
+++ b/catalyst/targets/stage2_target.py
@@ -16,7 +16,7 @@ class stage2_target(generic_stage_target):
 		generic_stage_target.__init__(self,spec,addlargs)
 
 	def set_source_path(self):
-		if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
+		if "seedcache" in self.settings["options"] and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
 			self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
 		else:
 			self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")

diff --git a/catalyst/targets/stage4_target.py b/catalyst/targets/stage4_target.py
index 9168f2e..e2b8a79 100644
--- a/catalyst/targets/stage4_target.py
+++ b/catalyst/targets/stage4_target.py
@@ -32,8 +32,8 @@ class stage4_target(generic_stage_target):
 					"clean"]
 
 #		if "TARBALL" in self.settings or \
-#			"FETCH" not in self.settings:
-		if "FETCH" not in self.settings:
+#			"fetch" not in self.settings['options']:
+		if "fetch" not in self.settings['options']:
 			self.settings["action_sequence"].append("capture")
 		self.settings["action_sequence"].append("clear_autoresume")
 


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/
@ 2014-05-05 19:17 Brian Dolbec
  0 siblings, 0 replies; 8+ messages in thread
From: Brian Dolbec @ 2014-05-05 19:17 UTC (permalink / raw
  To: gentoo-commits

commit:     264f30494cde1eb44aae89a9872bf12936856252
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Sat Jan  4 01:04:10 2014 +0000
Commit:     Brian Dolbec <brian.dolbec <AT> gmail <DOT> com>
CommitDate: Wed Apr  2 20:04:24 2014 +0000
URL:        http://git.overlays.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=264f3049

Move generic_stage_targets.py constants to defaults.py

Rename the source and target mounts defaults to remove
the double plural.

---
 catalyst/defaults.py                     | 27 +++++++++++++++++++++++++
 catalyst/targets/generic_stage_target.py | 34 ++++----------------------------
 2 files changed, 31 insertions(+), 30 deletions(-)

diff --git a/catalyst/defaults.py b/catalyst/defaults.py
index b1dbda4..748d1dd 100644
--- a/catalyst/defaults.py
+++ b/catalyst/defaults.py
@@ -21,3 +21,30 @@ valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC",
 
 verbosity = 1
 
+PORT_LOGDIR_CLEAN = \
+	'find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
+
+TARGET_MOUNT_DEFAULTS = {
+	"ccache": "/var/tmp/ccache",
+	"dev": "/dev",
+	"devpts": "/dev/pts",
+	"distdir": "/usr/portage/distfiles",
+	"icecream": "/usr/lib/icecc/bin",
+	"kerncache": "/tmp/kerncache",
+	"packagedir": "/usr/portage/packages",
+	"portdir": "/usr/portage",
+	"port_tmpdir": "/var/tmp/portage",
+	"port_logdir": "/var/log/portage",
+	"proc": "/proc",
+	"shm": "/dev/shm",
+	}
+
+SOURCE_MOUNT_DEFAULTS = {
+	"dev": "/dev",
+	"devpts": "/dev/pts",
+	"distdir": "/usr/portage/distfiles",
+	"portdir": "/usr/portage",
+	"port_tmpdir": "tmpfs",
+	"proc": "/proc",
+	"shm": "shmfs",
+	}

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index de4842c..05c61e8 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -4,34 +4,8 @@ from generic_target import *
 from stat import *
 from catalyst.lock import LockDir
 
-
-PORT_LOGDIR_CLEAN = \
-	'find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
-
-TARGET_MOUNTS_DEFAULTS = {
-	"ccache": "/var/tmp/ccache",
-	"dev": "/dev",
-	"devpts": "/dev/pts",
-	"distdir": "/usr/portage/distfiles",
-	"icecream": "/usr/lib/icecc/bin",
-	"kerncache": "/tmp/kerncache",
-	"packagedir": "/usr/portage/packages",
-	"portdir": "/usr/portage",
-	"port_tmpdir": "/var/tmp/portage",
-	"port_logdir": "/var/log/portage",
-	"proc": "/proc",
-	"shm": "/dev/shm",
-	}
-
-SOURCE_MOUNTS_DEFAULTS = {
-	"dev": "/dev",
-	"devpts": "/dev/pts",
-	"distdir": "/usr/portage/distfiles",
-	"portdir": "/usr/portage",
-	"port_tmpdir": "tmpfs",
-	"proc": "/proc",
-	"shm": "shmfs",
-	}
+from catalyst.defaults import (SOURCE_MOUNT_DEFAULTS, TARGET_MOUNT_DEFAULTS,
+	PORT_LOGDIR_CLEAN)
 
 # for convienience
 pjoin = os.path.join
@@ -208,11 +182,11 @@ class generic_stage_target(generic_target):
 
 		""" Setup our mount points """
 		# initialize our target mounts.
-		self.target_mounts = TARGET_MOUNTS_DEFAULTS.copy()
+		self.target_mounts = TARGET_MOUNT_DEFAULTS.copy()
 
 		self.mounts = ["proc", "dev", "portdir", "distdir", "port_tmpdir"]
 		# initialize our source mounts
-		self.mountmap = SOURCE_MOUNTS_DEFAULTS.copy()
+		self.mountmap = SOURCE_MOUNT_DEFAULTS.copy()
 		# update them from settings
 		self.mountmap["distdir"] = self.settings["distdir"]
 		if "SNAPCACHE" not in self.settings:


^ permalink raw reply related	[flat|nested] 8+ messages in thread
* [gentoo-commits] proj/catalyst:pending commit in: catalyst/, catalyst/targets/
@ 2014-04-02 20:09 Brian Dolbec
  2014-05-05 19:17 ` [gentoo-commits] proj/catalyst:master " Brian Dolbec
  0 siblings, 1 reply; 8+ messages in thread
From: Brian Dolbec @ 2014-04-02 20:09 UTC (permalink / raw
  To: gentoo-commits

commit:     6c8aac54653310e32672e7d2d5f4eef963a19c7c
Author:     Brian Dolbec <dolsen <AT> gentoo <DOT> org>
AuthorDate: Fri Jan  3 18:46:55 2014 +0000
Commit:     Brian Dolbec <brian.dolbec <AT> gmail <DOT> com>
CommitDate: Wed Apr  2 20:04:23 2014 +0000
URL:        http://git.overlays.gentoo.org/gitweb/?p=proj/catalyst.git;a=commit;h=6c8aac54

Initial separation and creation of contents.py

---
 catalyst/contents.py                     | 87 ++++++++++++++++++++++++++++++++
 catalyst/main.py                         |  8 ++-
 catalyst/support.py                      | 52 -------------------
 catalyst/targets/generic_stage_target.py |  3 +-
 4 files changed, 96 insertions(+), 54 deletions(-)

diff --git a/catalyst/contents.py b/catalyst/contents.py
new file mode 100644
index 0000000..79ef9a6
--- /dev/null
+++ b/catalyst/contents.py
@@ -0,0 +1,87 @@
+
+from collections import namedtuple
+from subprocess import Popen, PIPE
+
+from support import CatalystError, warn
+
+
+# use ContentsMap.fields for the value legend
+# Key:[function, cmd]
+CONTENTS_DEFINITIONS = {
+	# 'find' is disabled because it requires the source path, which is not
+	# always available
+	#"find"		:["calc_contents","find %(path)s"],
+	"tar_tv":["calc_contents","tar tvf %(file)s"],
+	"tar_tvz":["calc_contents","tar tvzf %(file)s"],
+	"tar_tvj":["calc_contents","tar -I lbzip2 -tvf %(file)s"],
+	"isoinfo_l":["calc_contents","isoinfo -l -i %(file)s"],
+	# isoinfo_f should be a last resort only
+	"isoinfo_f":["calc_contents","isoinfo -f -i %(file)s"],
+}
+
+
+class ContentsMap(object):
+	'''Class to encompass all known commands to list
+	the contents of an archive'''
+
+
+	fields = ['func', 'cmd']
+
+
+	def __init__(self, defs=None):
+		'''Class init
+
+		@param defs: dictionary of Key:[function, cmd]
+		'''
+		if defs is None:
+			defs = {}
+		#self.contents = {}
+		self.contents_map = {}
+
+		# create the archive type namedtuple classes
+		for name in list(defs):
+			#obj = self.contents[name] = namedtuple(name, self.fields)
+			obj = namedtuple(name, self.fields)
+			obj.__slots__ = ()
+			self.contents_map[name] = obj._make(defs[name])
+		del obj
+
+
+	def generate_contents(self, file_, getter="auto", verbose=False):
+		try:
+			archive = getter
+			if archive == 'auto' and file_.endswith('.iso'):
+				archive = 'isoinfo_l'
+			if (archive in ['tar_tv','auto']):
+				if file_.endswith('.tgz') or file_.endswith('.tar.gz'):
+					archive = 'tar_tvz'
+				elif file_.endswith('.tbz2') or file_.endswith('.tar.bz2'):
+					archive = 'tar_tvj'
+				elif file_.endswith('.tar'):
+					archive = 'tar_tv'
+
+			if archive == 'auto':
+				warn('File %r has unknown type for automatic detection.'
+					% (file_, ))
+				return None
+			else:
+				getter = archive
+				func = getattr(self, '_%s_' % self.contents_map[getter].func)
+				return func(file_, self.contents_map[getter].cmd, verbose)
+		except:
+			raise CatalystError,\
+				"Error generating contents, is appropriate utility " +\
+				"(%s) installed on your system?" \
+				% (self.contents_map[getter].cmd)
+
+
+	@staticmethod
+	def _calc_contents_(file_, cmd, verbose):
+		_cmd = (cmd % {'file': file_ }).split()
+		proc = Popen(_cmd, stdout=PIPE, stderr=PIPE)
+		results = proc.communicate()
+		result = "\n".join(results)
+		if verbose:
+			print result
+		return result
+

diff --git a/catalyst/main.py b/catalyst/main.py
index 7bcf2cb..4146bca 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -25,6 +25,7 @@ from catalyst.support import (required_build_targets,
 	valid_build_targets, CatalystError, find_binary, LockInUse)
 
 from hash_utils import HashMap, HASH_DEFINITIONS
+from contents import ContentsMap, CONTENTS_DEFINITIONS
 
 
 
@@ -184,7 +185,8 @@ def parse_config(myconfig):
 	if "digests" in myconf:
 		conf_values["digests"]=myconf["digests"]
 	if "contents" in myconf:
-		conf_values["contents"]=myconf["contents"]
+		# replace '-' with '_' (for compatibility with existing configs)
+		conf_values["contents"] = myconf["contents"].replace("-", '_')
 
 	if "envscript" in myconf:
 		print "Envscript support enabled."
@@ -348,6 +350,10 @@ def main():
 	# import configuration file and import our main module using those settings
 	parse_config(myconfig)
 
+	# initialize our contents generator
+	contents_map = ContentsMap(CONTENTS_DEFINITIONS)
+	conf_values["contents_map"] = contents_map
+
 	# initialze our hash and contents generators
 	hash_map = HashMap(HASH_DEFINITIONS)
 	conf_values["hash_map"] = hash_map

diff --git a/catalyst/support.py b/catalyst/support.py
index 308d9c0..e25394e 100644
--- a/catalyst/support.py
+++ b/catalyst/support.py
@@ -62,58 +62,6 @@ def hexify(str):
 	return r
 # hexify()
 
-def generate_contents(file,contents_function="auto",verbose=False):
-	try:
-		_ = contents_function
-		if _ == 'auto' and file.endswith('.iso'):
-			_ = 'isoinfo-l'
-		if (_ in ['tar-tv','auto']):
-			if file.endswith('.tgz') or file.endswith('.tar.gz'):
-				_ = 'tar-tvz'
-			elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
-				_ = 'tar-tvj'
-			elif file.endswith('.tar'):
-				_ = 'tar-tv'
-
-		if _ == 'auto':
-			warn('File %r has unknown type for automatic detection.' % (file, ))
-			return None
-		else:
-			contents_function = _
-			_ = contents_map[contents_function]
-			return _[0](file,_[1],verbose)
-	except:
-		raise CatalystError,\
-			"Error generating contents, is appropriate utility (%s) installed on your system?" \
-			% (contents_function, )
-
-def calc_contents(file,cmd,verbose):
-	args={ 'file': file }
-	cmd=cmd % dict(args)
-	a=os.popen(cmd)
-	mylines=a.readlines()
-	a.close()
-	result="".join(mylines)
-	if verbose:
-		print result
-	return result
-
-# This has map must be defined after the function calc_content
-# It is possible to call different functions from this but they must be defined
-# before hash_map
-# Key,function,cmd
-contents_map={
-	# 'find' is disabled because it requires the source path, which is not
-	# always available
-	#"find"		:[calc_contents,"find %(path)s"],
-	"tar-tv":[calc_contents,"tar tvf %(file)s"],
-	"tar-tvz":[calc_contents,"tar tvzf %(file)s"],
-	"tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
-	"isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
-	# isoinfo-f should be a last resort only
-	"isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
-}
-
 
 def read_from_clst(file):
 	line = ''

diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index b6a6200..de4842c 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -1703,6 +1703,7 @@ class generic_stage_target(generic_target):
 		if os.path.exists(file+".CONTENTS"):
 			os.remove(file+".CONTENTS")
 		if "contents" in self.settings:
+			contents_map = self.settings["contents_map"]
 			if os.path.exists(file):
 				myf=open(file+".CONTENTS","w")
 				keys={}
@@ -1711,7 +1712,7 @@ class generic_stage_target(generic_target):
 					array=keys.keys()
 					array.sort()
 				for j in array:
-					contents=generate_contents(file,contents_function=j,\
+					contents = contents_map.generate_contents(file, j,
 						verbose="VERBOSE" in self.settings)
 					if contents:
 						myf.write(contents)


^ permalink raw reply related	[flat|nested] 8+ messages in thread

end of thread, other threads:[~2015-10-08 22:19 UTC | newest]

Thread overview: 8+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2014-05-05 19:17 [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/ Brian Dolbec
2014-04-02 20:09 ` [gentoo-commits] proj/catalyst:pending " Brian Dolbec
  -- strict thread matches above, loose matches on Subject: below --
2015-10-08 22:19 [gentoo-commits] proj/catalyst:master " Mike Frysinger
2015-02-26  4:12 Brian Dolbec
2015-01-01  5:59 [gentoo-commits] proj/catalyst:pending commit in: catalyst/targets/, catalyst/ Brian Dolbec
2015-02-26  4:12 ` [gentoo-commits] proj/catalyst:master commit in: catalyst/, catalyst/targets/ Brian Dolbec
2015-01-01  5:59 [gentoo-commits] proj/catalyst:pending " Brian Dolbec
2015-02-26  4:12 ` [gentoo-commits] proj/catalyst:master " Brian Dolbec
2014-05-05 19:17 Brian Dolbec
2014-04-02 20:09 [gentoo-commits] proj/catalyst:pending " Brian Dolbec
2014-05-05 19:17 ` [gentoo-commits] proj/catalyst:master " Brian Dolbec

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox