public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-commits] dev/zorry:master commit in: gobs/pym/, gobs/pym/querys/
@ 2011-07-10 15:31 Magnus Granberg
  0 siblings, 0 replies; only message in thread
From: Magnus Granberg @ 2011-07-10 15:31 UTC (permalink / raw
  To: gentoo-commits

commit:     fc92cf0dbec6ac4097b981e2a9d0a3bbc3f8a052
Author:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sun Jul 10 15:29:44 2011 +0000
Commit:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sun Jul 10 15:29:44 2011 +0000
URL:        http://git.overlays.gentoo.org/gitweb/?p=dev/zorry.git;a=commit;h=fc92cf0d

Updated the code in pym

---
 gobs/pym/ConnectionManager.py |   56 ++++
 gobs/pym/arch.py              |   26 ++-
 gobs/pym/build_log.py         |  150 +++++++++++
 gobs/pym/categories.py        |   29 ++-
 gobs/pym/check_setup.py       |  134 +++++++---
 gobs/pym/flags.py             |  139 +++++++++-
 gobs/pym/manifest.py          |   45 +++-
 gobs/pym/old_cpv.py           |   51 +++--
 gobs/pym/package.py           |  141 +++++++---
 gobs/pym/querys/__init__.py   |    1 +
 gobs/pym/querys/mysql.py      |    1 +
 gobs/pym/querys/pgsql.py      |  586 +++++++++++++++++++++++++++++++++++++++++
 gobs/pym/readconf.py          |    5 +-
 gobs/pym/repoman_gobs.py      |   17 +-
 gobs/pym/text.py              |   83 ++++---
 15 files changed, 1269 insertions(+), 195 deletions(-)

diff --git a/gobs/pym/ConnectionManager.py b/gobs/pym/ConnectionManager.py
new file mode 100644
index 0000000..7d87702
--- /dev/null
+++ b/gobs/pym/ConnectionManager.py
@@ -0,0 +1,56 @@
+#a simple CM build around sie singleton so there can only be 1 CM but you can call the class in different place with out caring about it.
+#when the first object is created of this class, the SQL settings are read from the file and stored in the class for later reuse by the next object and so on.
+#(maybe later add support for connection pools)
+class connectionManager(object):
+    _instance = None   
+
+		      #size of the connection Pool
+    def __new__(cls, settings_dict, numberOfconnections=20, *args, **kwargs):
+        if not cls._instance:
+            cls._instance = super(connectionManager, cls).__new__(cls, *args, **kwargs)
+            #read the sql user/host etc and store it in the local object
+            print settings_dict['sql_host']
+            cls._host=settings_dict['sql_host']
+            cls._user=settings_dict['sql_user']
+            cls._password=settings_dict['sql_passwd']
+            cls._database=settings_dict['sql_db']
+            #shouldnt we include port also?
+            try:
+	      from psycopg2 import pool
+	      cls._connectionNumber=numberOfconnections
+	      #always create 1 connection
+	      cls._pool=pool.ThreadedConnectionPool(1,cls._connectionNumber,host=cls._host,database=cls._database,user=cls._user,password=cls._password)
+	      cls._name='pgsql'
+	      
+	      
+	    except ImportError:
+	      print "Please install a recent version of dev-python/psycopg for Python"
+	      sys.exit(1)
+            #setup connection pool
+        return cls._instance
+    
+    ## returns the name of the database pgsql/mysql etc
+    def getName(self):
+      return self._name
+      
+    def getConnection(self):
+      return self._pool.getconn()
+      
+    def putConnection(self, connection):
+      self._pool.putconn(connection)
+	
+    def closeAllConnections(self):
+      self._pool.closeall()
+
+##how to use this class
+#get a instance of the class (there can only be 1 instance but many pointers (single ton))
+#get the connection
+#conn=cm.getConnection()
+#get a cursor
+#cur=conn.cursor()
+#do stuff
+#cur.execute(stuff)
+#"close a connection" temporarily put it away for reuse
+#cm.putConnection(conn)
+#kill all connections, should only be used just before the program terminates
+#cm.closeAllConnections()

diff --git a/gobs/pym/__init__.py b/gobs/pym/__init__.py
new file mode 100644
index 0000000..e69de29

diff --git a/gobs/pym/arch.py b/gobs/pym/arch.py
index 32feb58..160de7c 100644
--- a/gobs/pym/arch.py
+++ b/gobs/pym/arch.py
@@ -1,19 +1,27 @@
 import portage
 
-class arch(object):
-	
-	def __init__(self, database):
-		self.database = database
-		self.arch_list_tree = portage.archlist
-		self.arch_list_db = database.get_arch_db()
+class gobs_arch(object):
 	
+	def __init__(self, CM):
+		#selective import the pgsql/mysql queries
+		if CM.getName() is 'pgsql':
+			from gobs.querys import pgsql
+			self._dbquerys = pgsql
+		#elif CM.getName() == 'mysql':
+		#	import querys.mysql
+		self._CM = CM
+		self._conn = CM.getConnection()
+
+	def __del__(self):
+		self._CM.putConnection(self._conn)
+
 	def update_arch_db(self):
 		# FIXME: check for new keyword
 		# Add arch db (keywords)
-		if self.arch_list_db is None:
-			arch_list =  self.arch_list_tree
+		if self._dbquerys.get_arch_db(self._conn) is None:
+			arch_list =  portage.archlist
 			for arch in arch_list:
 				if arch[0] not in ["~","-"]:
 					arch_list.append("-" + arch)
 					arch_list.append("-*")
-					self.database.add_new_arch_db(arch_list)
\ No newline at end of file
+					self._dbquerys.add_new_arch_db(self._conn,arch_list)
\ No newline at end of file

diff --git a/gobs/pym/build_log.py b/gobs/pym/build_log.py
new file mode 100644
index 0000000..4f5a801
--- /dev/null
+++ b/gobs/pym/build_log.py
@@ -0,0 +1,150 @@
+import re
+from gobs.text import get_log_text_list
+from gobs.repoman_gobs import gobs_repoman
+import portage
+from gobs.readconf import get_conf_settings
+reader=get_conf_settings()
+gobs_settings_dict=reader.read_gobs_settings_all()
+# make a CM
+from gobs.ConnectionManager import connectionManager
+CM=connectionManager(gobs_settings_dict)
+#selectively import the pgsql/mysql querys
+if CM.getName()=='pgsql':
+	from gobs.querys.pgsql import *
+elif CM.getName()=='mysql':
+	from gobs.querys.mysql import *
+
+class gobs_buildlog(object):
+	
+	def __init__(self, CM, mysettings, build_dict, config_profile):
+		self._mysettings = mysettings
+		self._myportdb = portage.portdbapi(mysettings=self._mysettings)
+		self._build_dict = build_dict
+		self._config_profile = config_profile
+		self._CM = CM
+		self._logfile_text = get_log_text_list(self._mysettings.get("PORTAGE_LOG_FILE"))
+
+	def search_info(self, textline, error_log_list, i):
+		if re.search(" * Package:", textline):
+			print 'Package'
+			error_log_list.append(textline)
+		if re.search(" * Repository:", textline):
+			print 'Repository'
+			error_log_list.append(textline)
+		if re.search(" * Maintainer:", textline):
+			error_log_list.append(textline)
+			print 'Maintainer'
+		if re.search(" * USE:", textline):
+			error_log_list.append(textline)
+			print 'USE'
+		if re.search(" * FEATURES:", textline):
+			error_log_list.append(textline)
+			print 'FEATURES'
+		return error_log_list
+
+	def search_error(self, textline, error_log_list, sum_build_log_list, i):
+		if re.search("Error 1", textline):
+			print 'Error'
+			x = i - 20
+			endline = True
+			error_log_list.append(".....\n")
+			while x != i + 3 and endline:
+				try:
+					error_log_list.append(self._logfile_text[x])
+				except:
+					endline = False
+				else:
+					x = x +1
+		if re.search(" * ERROR:", textline):
+			print 'ERROR'
+			x = i
+			endline= True
+			field = textline.split(" ")
+			sum_build_log_list.append("fail")
+			error_log_list.append(".....\n")
+			while x != i + 10 and endline:
+				try:
+					error_log_list.append(self._logfile_text[x])
+				except:
+					endline = False
+				else:
+					x = x +1
+		return error_log_list, sum_build_log_list
+
+	def search_qa(self, textline, qa_error_list, error_log_list,i):
+		if re.search(" * QA Notice: Package has poor programming", textline):
+			print 'QA Notice'
+			x = i
+			endline= True
+			error_log_list.append(".....\n")
+			while x != i + 3 and endline:
+				try:
+					error_log_list.append(self._logfile_text[x])
+				except:
+					endline = False
+				else:
+					x = x +1
+			qa_error_list.append('QA Notice: Package has poor programming practices')
+			if re.search(" * QA Notice: The following shared libraries lack NEEDED", textline):
+				print 'QA Notice'
+				x = i
+				endline= True
+				error_log_list.append(".....\n")
+				while x != i + 2 and endline:
+					try:
+						error_log_list.append(self._logfile_text[x])
+					except:
+						endline = False
+					else:
+						x = x +1
+				qa_error_list.append('QA Notice: The following shared libraries lack NEEDED entries')
+		return qa_error_list, error_log_list
+
+	def get_buildlog_info(self):
+		init_repoman = gobs_repoman(self._mysettings, self._myportdb)
+		# FIXME to support more errors and stuff
+		i = 0
+		build_log_dict = {}
+		error_log_list = []
+		qa_error_list = []
+		repoman_error_list = []
+		sum_build_log_list = []
+		for textline in self._logfile_text:
+			error_log_list = self.search_info(textline, error_log_list, i)
+			error_log_list, sum_build_log_list = self.search_error(textline, error_log_list, sum_build_log_list, i)
+			qa_error_list, error_log_list = self.search_qa(textline, qa_error_list, error_log_list, i)
+			i = i +1
+		# Run repoman check_repoman()
+		categories = self._build_dict['categories']
+		package = self._build_dict['package']
+		ebuild_version = self._build_dict['ebuild_version']
+		repoman_error_list = init_repoman.check_repoman(categories, package, ebuild_version, self._config_profile)
+		if repoman_error_list != []:
+			sum_build_log_list.append("repoman")
+		if qa_error_list != []:
+			sum_build_log_list.append("qa")
+		build_log_dict['repoman_error_list'] = repoman_error_list
+		build_log_dict['qa_error_list'] = qa_error_list
+		build_log_dict['error_log_list'] = error_log_list
+		build_log_dict['summary_error_list'] = sum_build_log_list
+		return build_log_dict
+
+	def add_buildlog_main(self):
+		conn=self._CM.getConnection()
+		build_log_dict = {}
+		build_log_dict = self.get_buildlog_info()
+		sum_build_log_list = build_log_dict['summary_error_list']
+		error_log_list = build_log_dict['error_log_list']
+		build_error = ""
+		if error_log_list != []:
+			for log_line in error_log_list:
+				build_error = build_error + log_line
+		summary_error = ""
+		if sum_build_log_list != []:
+			for sum_log_line in sum_build_log_list:
+				summary_error = summary_error + " " + sum_log_line
+		print 'summary_error', summary_error
+		logfilename = re.sub("\/var\/log\/portage\/", "",  self._mysettings.get("PORTAGE_LOG_FILE"))
+		build_id = move_queru_buildlog(conn, self._build_dict['queue_id'], build_error, summary_error, logfilename, build_log_dict)
+		# update_qa_repoman(conn, build_id, build_log_dict)
+		print "build_id", build_id, "logged to db."

diff --git a/gobs/pym/categories.py b/gobs/pym/categories.py
index 6613cb2..636f114 100644
--- a/gobs/pym/categories.py
+++ b/gobs/pym/categories.py
@@ -1,20 +1,29 @@
-from gobs.text import gobs_text
+#from gobs.text import gobs_text
+from gobs.text import get_file_text
 import portage
 
 class gobs_categories(object):
 	
-	def __init__(self, database, mysettings):
-		self.database = database
-		self.mysettings = mysettings
-		self.init_text = gobs_text()
+	def __init__(self, CM, mysettings):
+		self._CM = CM
+		self._conn=CM.getConnection()
+		self._mysettings = mysettings
+		if CM.getName() is 'pgsql':
+		  from gobs.querys import pgsql
+		  self.dbquerys = pgsql
+		elif CM.getName() is 'mysql':
+		  import querys.mysql
 	
+	def __del__(self):
+	 self._CM.putConnection(self._conn)
+	  
 	def update_categories_db(self, categories):
 		# Update categories_meta in the db
-		categories_dir = self.mysettings['PORTDIR'] + "/" + categories + "/"
+		categories_dir = self._mysettings['PORTDIR'] + "/" + categories + "/"
 		categories_metadata_xml_checksum_tree = portage.checksum.sha256hash(categories_dir + "metadata.xml")[0]
-		categories_metadata_xml_text_tree = self.init_text.get_file_text(categories_dir + "metadata.xml")
-		categories_metadata_xml_checksum_db = self.database.get_categories_checksum_db(categories)
+		categories_metadata_xml_text_tree = get_file_text(categories_dir + "metadata.xml")
+		categories_metadata_xml_checksum_db = self.dbquerys.get_categories_checksum_db(self._conn, categories)
 		if categories_metadata_xml_checksum_db is None:
-			self.database.add_new_categories_meta_sql(categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree)
+			self.dbquerys.add_new_categories_meta_sql(self._conn,categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree)
 		elif categories_metadata_xml_checksum_db != categories_metadata_xml_checksum_tree:
-			self.database.update_categories_meta_sql(categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree)
+			self.dbquerys.update_categories_meta_sql(self._conn,categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree)

diff --git a/gobs/pym/check_setup.py b/gobs/pym/check_setup.py
index 5b1a0b4..a07215b 100644
--- a/gobs/pym/check_setup.py
+++ b/gobs/pym/check_setup.py
@@ -1,49 +1,95 @@
 import portage
 import os
 import errno
-from gobs.text import gobs_text
+from gobs.text import get_file_text
 
-class check_config(object):
-	
-	def __init__(self, database, gobs_settings_dict):
-		self.database = database
-		self.gobs_settings_dict = gobs_settings_dict
-		self.init_text = gobs_text()
+from gobs.readconf import get_conf_settings
+reader=get_conf_settings()
+gobs_settings_dict=reader.read_gobs_settings_all()
+# make a CM
+from gobs.ConnectionManager import connectionManager
+CM=connectionManager(gobs_settings_dict)
+#selectively import the pgsql/mysql querys
+if CM.getName()=='pgsql':
+	from gobs.querys.pgsql import *
+elif CM.getName()=='mysql':
+		from gobs.querys.mysql import *
 
-	def check_make_conf(self):
-		# FIXME: mark any config updating true in the db when updating the configs
-		# Get the config list
-		config_list_all = self.database.get_config_list_all()
-		print "Checking configs for changes and errors"
-		configsDict = {}
-		for config_id in config_list_all:
-			attDict={}
-			# Set the config dir
-			check_config_dir = self.gobs_settings_dict['host_setup_root'] + "config/" + config_id[0] + "/"
-			make_conf_file = check_config_dir + "etc/portage/make.conf"
-			# Check if we can open the file and close it
-			# Check if we have some error in the file (portage.util.getconfig)
-			# Check if we envorment error with the config (settings.validate)
-			try:
-				open_make_conf = open(make_conf_file)
-				open_make_conf.close()
-				portage.util.getconfig(make_conf_file, tolerant=0, allow_sourcing=False, expand=True)
-				mysettings = portage.config(config_root = check_config_dir)
-				mysettings.validate()
-				# With errors we update the db on the config and disable the config
-			except Exception as e:
-				attDict['config_error'] = e
-				attDict['active'] = 'False'
-				print "Fail", config_id[0]
-			else:
-				attDict['config_error'] = ''
-				attDict['active'] = 'True'
-				print "Pass", config_id[0]
-			# Get the checksum of make.conf
-			make_conf_checksum_tree = portage.checksum.sha256hash(make_conf_file)[0]
-			# Check if we have change the make.conf and update the db with it
-			attDict['make_conf_text'] = self.init_text.get_file_text(make_conf_file)
-			attDict['make_conf_checksum_tree'] = make_conf_checksum_tree
-			configsDict[config_id]=attDict
-		self.database.update__make_conf(configsDict)
-		print "Updated configurtions"
+def check_make_conf(conn, gobs_settings_dict):
+  # FIXME: mark any config updating true in the db when updating the configs
+  # Get the config list
+  ##selective import the pgsql/mysql queries
+  config_list_all = _dbquerys.get_config_list_all(conn)
+  print "Checking configs for changes and errors"
+  configsDict = {}
+  for config_id in config_list_all:
+	  attDict={}
+	  # Set the config dir
+	  check_config_dir = gobs_settings_dict['gobs_root'] + "config/" + config_id[0] + "/"
+	  make_conf_file = check_config_dir + "etc/portage/make.conf"
+	  # Check if we can open the file and close it
+	  # Check if we have some error in the file (portage.util.getconfig)
+	  # Check if we envorment error with the config (settings.validate)
+	  try:
+		  open_make_conf = open(make_conf_file)
+		  open_make_conf.close()
+		  portage.util.getconfig(make_conf_file, tolerant=0, allow_sourcing=False, expand=True)
+		  mysettings = portage.config(config_root = check_config_dir)
+		  mysettings.validate()
+		  # With errors we update the db on the config and disable the config
+	  except Exception as e:
+		  attDict['config_error'] = e
+		  attDict['active'] = 'False'
+		  print "Fail", config_id[0]
+	  else:
+		  attDict['config_error'] = ''
+		  attDict['active'] = 'True'
+		  print "Pass", config_id[0]
+	  # Get the checksum of make.conf
+	  make_conf_checksum_tree = portage.checksum.sha256hash(make_conf_file)[0]
+	  # Check if we have change the make.conf and update the db with it
+	  attDict['make_conf_text'] = get_file_text(make_conf_file)
+	  attDict['make_conf_checksum_tree'] = make_conf_checksum_tree
+	  configsDict[config_id]=attDict
+  _dbquerys.update__make_conf(conn,configsDict)
+  CM.putConnection(conn)
+  print "Updated configurtions"
+
+def check_make_conf_guest(connection, config_profile):
+	make_conf_checksum_db = get_profile_checksum(connection,config_profile)[0]
+	if make_conf_checksum_db is None:
+		return "1"
+	make_conf_file = "/etc/make.conf"
+	make_conf_checksum_tree = portage.checksum.sha256hash(make_conf_file)[0]
+	if make_conf_checksum_tree != make_conf_checksum_db:
+		return "2"
+	# Check if we can open the file and close it
+	# Check if we have some error in the file (portage.util.getconfig)
+	# Check if we envorment error with the config (settings.validate)
+	try:
+		open_make_conf = open(make_conf_file)
+		open_make_conf.close()
+		portage.util.getconfig(make_conf_file, tolerant=0, allow_sourcing=False, expand=True)
+		portage.config()
+		portage.settings.validate()
+		# With errors we return false
+	except Exception as e:
+		return "3"
+	return "4"
+
+def check_configure_guest(connection, config_profile):
+	pass_make_conf = check_make_conf_guest(connection, config_profile)
+	print pass_make_conf
+	if pass_make_conf == "1":
+		# profile not active or updatedb is runing
+		return False
+	elif pass_make_conf == "2":
+		#  update make.conf
+		return False
+	elif pass_make_conf == "3":
+		# set the config as no working
+		make_conf_error(connection,config_profile)
+		return False
+	elif pass_make_conf == "4":
+		# make.conf check OK
+		return True
\ No newline at end of file

diff --git a/gobs/pym/flags.py b/gobs/pym/flags.py
index d41f97c..ba9faf6 100644
--- a/gobs/pym/flags.py
+++ b/gobs/pym/flags.py
@@ -1,9 +1,16 @@
+from _emerge.main import parse_opts
+from _emerge.depgraph import backtrack_depgraph, depgraph, resume_depgraph
+from _emerge.create_depgraph_params import create_depgraph_params
+from _emerge.actions import load_emerge_config
+import portage
+import os
+
 class gobs_use_flags(object):
 	
 	def __init__(self, mysettings, myportdb, cpv):
-		self.mysettings = mysettings
-		self.myportdb = myportdb
-		self.cpv = cpv
+		self._mysettings = mysettings
+		self._myportdb = myportdb
+		self._cpv = cpv
 	
 	def get_iuse(self):
 		"""Gets the current IUSE flags from the tree
@@ -13,7 +20,7 @@ class gobs_use_flags(object):
 		@rtype list
 		@returns [] or the list of IUSE flags
 		"""
-		return self.myportdb.aux_get(self.cpv, ["IUSE"])[0].split()
+		return self._myportdb.aux_get(self._cpv, ["IUSE"])[0].split()
 		
 	def reduce_flag(self, flag):
 		"""Absolute value function for a USE flag
@@ -61,7 +68,7 @@ class gobs_use_flags(object):
 				if f in x:
 					use.remove(x)
 		# clean out any arch's
-		archlist = self.mysettings["PORTAGE_ARCHLIST"].split()
+		archlist = self._mysettings["PORTAGE_ARCHLIST"].split()
 		for a in use[:]:
 			if a in archlist:
 				use.remove(a)
@@ -80,20 +87,34 @@ class gobs_use_flags(object):
 		@return  use, use_expand_hidden, usemask, useforce
 		"""
 		use = None
-		self.mysettings.unlock()
+		self._mysettings.unlock()
 		try:
-			self.mysettings.setcpv(self.cpv, use_cache=None, mydb=self.myportdb)
-			use = self.mysettings['PORTAGE_USE'].split()
-			use_expand_hidden = self.mysettings["USE_EXPAND_HIDDEN"].split()
-			usemask = list(self.mysettings.usemask)
-			useforce =  list(self.mysettings.useforce)
+			self._mysettings.setcpv(self._cpv, use_cache=None, mydb=self._myportdb)
+			use = self._mysettings['PORTAGE_USE'].split()
+			use_expand_hidden = self._mysettings["USE_EXPAND_HIDDEN"].split()
+			usemask = list(self._mysettings.usemask)
+			useforce =  list(self._mysettings.useforce)
 		except KeyError:
-			self.mysettings.reset()
-			self.mysettings.lock()
+			self._mysettings.reset()
+			self._mysettings.lock()
 			return [], [], [], []
 		# reset cpv filter
-		self.mysettings.reset()
-		self.mysettings.lock()
+		self._mysettings.reset()
+		self._mysettings.lock()
+		return use, use_expand_hidden, usemask, useforce
+
+	def get_all_cpv_use_looked(self):
+		"""Uses portage to determine final USE flags and settings for an emerge
+		@type cpv: string
+		@param cpv: eg cat/pkg-ver
+		@rtype: lists
+		@return  use, use_expand_hidden, usemask, useforce
+		"""
+		# use = self._mysettings['PORTAGE_USE'].split()
+		use = os.environ['USE'].split()
+		use_expand_hidden = self._mysettings["USE_EXPAND_HIDDEN"].split()
+		usemask = list(self._mysettings.usemask)
+		useforce = list(self._mysettings.useforce)
 		return use, use_expand_hidden, usemask, useforce
 
 	def get_flags(self):
@@ -113,3 +134,91 @@ class gobs_use_flags(object):
 		#flags = filter_flags(use_flags, use_expand_hidden, usemasked, useforced)
 		final_flags = self.filter_flags(final_use, use_expand_hidden, usemasked, useforced)
 		return iuse_flags, final_flags
+
+	def get_flags_looked(self):
+		"""Retrieves all information needed to filter out hidden, masked, etc.
+		USE flags for a given package.
+
+		@type cpv: string
+		@param cpv: eg. cat/pkg-ver
+		@type final_setting: boolean
+		@param final_setting: used to also determine the final
+		enviroment USE flag settings and return them as well.
+		@rtype: list or list, list
+		@return IUSE or IUSE, final_flags
+		"""
+		final_use, use_expand_hidden, usemasked, useforced = self.get_all_cpv_use_looked()
+		iuse_flags = self.filter_flags(self.get_iuse(), use_expand_hidden, usemasked, useforced)
+		#flags = filter_flags(use_flags, use_expand_hidden, usemasked, useforced)
+		final_flags = self.filter_flags(final_use, use_expand_hidden, usemasked, useforced)
+		return iuse_flags, final_flags
+
+	def get_needed_dep_useflags(self, build_use_flags_list):
+		tmpcmdline = []
+		tmpcmdline.append("-p")
+		tmpcmdline.append("--autounmask")
+		tmpcmdline.append("=" + self._cpv)
+		print tmpcmdline
+		myaction, myopts, myfiles = parse_opts(tmpcmdline, silent=False)
+		print myaction, myopts, myfiles
+		myparams = create_depgraph_params(myopts, myaction)
+		print myparams
+		settings, trees, mtimedb = load_emerge_config()
+		try:
+			success, mydepgraph, favorites = backtrack_depgraph(
+				settings, trees, myopts, myparams, myaction, myfiles, spinner=None)
+			print  success, mydepgraph, favorites
+		except portage.exception.PackageSetNotFound as e:
+			root_config = trees[settings["ROOT"]]["root_config"]
+			display_missing_pkg_set(root_config, e.value)
+			return 1
+		use_changes = None
+		mydepgraph._show_merge_list()
+		mydepgraph.display_problems()
+		if mydepgraph._dynamic_config._needed_use_config_changes:
+			use_changes = {}
+			for pkg, needed_use_config_changes in mydepgraph._dynamic_config._needed_use_config_changes.items():
+				new_use, changes = needed_use_config_changes
+				use_changes[pkg.self._cpv] = changes
+		print use_changes
+		if use_changes is None:
+			return None
+		iteritems_packages = {}
+		for k, v in use_changes.iteritems():
+			k_package = portage.versions.cpv_getkey(k)
+			iteritems_packages[ k_package ] = v
+		print iteritems_packages
+		return iteritems_packages
+							
+	def comper_useflags(self, build_dict):
+		iuse_flags, use_enable = self.get_flags()
+		iuse = []
+		print "use_enable", use_enable
+		build_use_flags_dict = build_dict['build_useflags']
+		print "build_use_flags_dict", build_use_flags_dict
+		build_use_flags_list = []
+		if use_enable == []:
+			if build_use_flags_dict is None:
+				return None
+		for iuse_line in iuse_flags:
+			iuse.append(self.reduce_flag(iuse_line))
+		iuse_flags_list = list(set(iuse))
+		use_disable = list(set(iuse_flags_list).difference(set(use_enable)))
+		use_flagsDict = {}
+		for x in use_enable:
+			use_flagsDict[x] = True
+		for x in use_disable:
+			use_flagsDict[x] = False
+		print "use_flagsDict", use_flagsDict
+		for k, v in use_flagsDict.iteritems():
+			print "tree use flags", k, v
+			print "db use flags", k, build_use_flags_dict[k]
+		if build_use_flags_dict[k] != v:
+			if build_use_flags_dict[k] is True:
+				build_use_flags_list.append(k)
+			if build_use_flags_dict[k] is False:
+				build_use_flags_list.append("-" + k)
+		if build_use_flags_list == []:
+			build_use_flags_list = None
+		print build_use_flags_list
+		return build_use_flags_list

diff --git a/gobs/pym/manifest.py b/gobs/pym/manifest.py
index 140a5d1..fb29f0a 100644
--- a/gobs/pym/manifest.py
+++ b/gobs/pym/manifest.py
@@ -4,14 +4,16 @@ from portage import os, _encodings, _unicode_decode
 from portage.exception import DigestException, FileNotFound
 from portage.localization import _
 from portage.manifest import Manifest
+import portage
 
 class gobs_manifest(object):
 
-	def __init__ (self, mysettings):
-		self.mysettings = mysettings
+	def __init__ (self, mysettings, pkgdir):
+		self._mysettings = mysettings
+		self._pkgdir = pkgdir
 
 	# Copy of portage.digestcheck() but without the writemsg() stuff
-	def digestcheck(self, pkgdir):
+	def digestcheck(self):
 		"""
 		Verifies checksums. Assumes all files have been downloaded.
 		@rtype: int
@@ -20,14 +22,14 @@ class gobs_manifest(object):
 		
 		myfiles = []
 		justmanifest = None
-		self.mysettings['PORTAGE_QUIET'] = '1'
+		self._mysettings['PORTAGE_QUIET'] = '1'
 			
-		if self.mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
+		if self._mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
 			return None
-		manifest_path = os.path.join(pkgdir, "Manifest")
+		manifest_path = os.path.join(self._pkgdir, "Manifest")
 		if not os.path.exists(manifest_path):
 			return ("!!! Manifest file not found: '%s'") % manifest_path
-			mf = Manifest(pkgdir, mysettings["DISTDIR"])
+			mf = Manifest(self._pkgdir, self._mysettings["DISTDIR"])
 			manifest_empty = True
 			for d in mf.fhashdict.values():
 				if d:
@@ -36,7 +38,7 @@ class gobs_manifest(object):
 			if manifest_empty:
 				return ("!!! Manifest is empty: '%s'") % manifest_path
 			try:
-				if  "PORTAGE_PARALLEL_FETCHONLY" not in self.mysettings:
+				if  "PORTAGE_PARALLEL_FETCHONLY" not in self._mysettings:
 					mf.checkTypeHashes("EBUILD")
 					mf.checkTypeHashes("AUX")
 					mf.checkTypeHashes("MISC", ignoreMissingFiles=True)
@@ -51,7 +53,7 @@ class gobs_manifest(object):
 				return ("!!! Digest verification failed: %s\nReason: %s\nGot: %s\nExpected: %s") \
 					 % (e.value[0], e.value[1], e.value[2], e.value[3])
 			# Make sure that all of the ebuilds are actually listed in the Manifest.
-			for f in os.listdir(pkgdir):
+			for f in os.listdir(self._pkgdir):
 				pf = None
 				if f[-7:] == '.ebuild':
 					pf = f[:-7]
@@ -60,7 +62,7 @@ class gobs_manifest(object):
 						% os.path.join(pkgdir, f)
 			""" epatch will just grab all the patches out of a directory, so we have to
 			make sure there aren't any foreign files that it might grab."""
-			filesdir = os.path.join(pkgdir, "files")
+			filesdir = os.path.join(self._pkgdir, "files")
 			for parent, dirs, files in os.walk(filesdir):
 				try:
 					parent = _unicode_decode(parent,
@@ -96,4 +98,27 @@ class gobs_manifest(object):
 						if file_type != "AUX" and not f.startswith("digest-"):
 							return ("!!! A file is not listed in the Manifest: '%s'") \
 								 % os.path.join(filesdir, f)
+		return None
+
+	def check_file_in_manifest(self, portdb, cpv, build_dict, build_use_flags_list):
+		myfetchlistdict = portage.FetchlistDict(self._pkgdir, self._mysettings, portdb)
+		my_manifest = portage.Manifest(self._pkgdir, self._mysettings['DISTDIR'], fetchlist_dict=myfetchlistdict, manifest1_compat=False, from_scratch=False)
+		if my_manifest.findFile(build_dict['package'] + "-" + build_dict['ebuild_version'] + ".ebuild") is None:
+			return "Ebuild file not found."
+		cpv_fetchmap = portdb.getFetchMap(cpv, useflags=build_use_flags_list, mytree=None)
+		self._mysettings.unlock()
+		try:
+			portage.fetch(cpv_fetchmap, self._mysettings, listonly=0, fetchonly=0, locks_in_subdir='.locks', use_locks=1, try_mirrors=1)
+		except:
+			self._mysettings.lock()
+			return "Can't fetch the file."
+		self._mysettings.lock()
+		try:
+			my_manifest.checkCpvHashes(cpv, checkDistfiles=True, onlyDistfiles=False, checkMiscfiles=True)
+		except:
+			return "Can't fetch the file or the hash failed."
+		try:
+			portdb.fetch_check(cpv, useflags=build_use_flags_list, mysettings=self._mysettings, all=False)
+		except:	
+			return "Fetch check failed."
 		return None
\ No newline at end of file

diff --git a/gobs/pym/old_cpv.py b/gobs/pym/old_cpv.py
index 4cd8ec0..9dacd82 100644
--- a/gobs/pym/old_cpv.py
+++ b/gobs/pym/old_cpv.py
@@ -1,14 +1,20 @@
 class gobs_old_cpv(object):
 	
-	def __init__(self, database, myportdb, mysettings):
-		self.database = database
-		self.myportdb = myportdb
-		self.mysettings = mysettings
+	def __init__(self, CM, myportdb, mysettings):
+		self._CM = CM
+		self._mysettings = mysettings
+		self._myportdb = myportdb
+		if CM.getName() is 'pgsql':
+		  from gobs.querys import pgsql
+		  self.dbquerys = pgsql
+		elif CM.getName() is 'mysql':
+		  import querys.mysql
 
 	def mark_old_ebuild_db(self, categories, package, package_id):
-		ebuild_list_tree = sorted(self.myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None))
+		conn=self._CM.getConnection()
+		ebuild_list_tree = sorted(self._myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None))
 		# Get ebuild list on categories, package in the db
-		ebuild_list_db = self.database.cp_list_db(package_id)
+		ebuild_list_db = self.dbquerys.cp_list_db(conn,package_id)
 		# Check if don't have the ebuild in the tree
 		# Add it to the no active list
 		old_ebuild_list = []
@@ -20,18 +26,20 @@ class gobs_old_cpv(object):
 			if  old_ebuild_list != []:
 				for old_ebuild in old_ebuild_list:
 					print "O", categories + "/" + package + "-" + old_ebuild[0]
-				self.database.add_old_ebuild(package_id, old_ebuild_list)
+					self.dbquerys.add_old_ebuild(conn,package_id, old_ebuild_list)
 		# Check if we have older no activ ebuilds then 60 days
-		ebuild_old_list_db = self.database.cp_list_old_db(package_id)
+		ebuild_old_list_db = self.dbquerys.cp_list_old_db(conn,package_id)
 		# Delete older ebuilds in the db
 		if ebuild_old_list_db != []:
 			for del_ebuild_old in ebuild_old_list_db:
 				print "D", categories + "/" + package + "-" + del_ebuild_old[1]
-			self.database.del_old_ebuild(ebuild_old_list_db)
+			self.dbquerys.del_old_ebuild(conn,ebuild_old_list_db)
+		self._CM.putConnection(conn)
 
 	def mark_old_package_db(self, package_id_list_tree):
+		conn=self._CM.getConnection()
 		# Get categories/package list from db
-		package_list_db = self.database.cp_all_db()
+		package_list_db = self.dbquerys.cp_all_db(conn)
 		old_package_id_list = []
 		# Check if don't have the categories/package in the tree
 		# Add it to the no active list
@@ -40,34 +48,37 @@ class gobs_old_cpv(object):
 				old_package_id_list.append(package_line)
 		# Set no active on categories/package and ebuilds in the db that no longer in tree
 		if old_package_id_list != []:
-			mark_old_list = self.database.add_old_package(old_package_id_list)
+			mark_old_list = self.dbquerys.add_old_package(conn,old_package_id_list)
 			if mark_old_list != []:
 				for x in mark_old_list:
-					element = self.database.get_cp_from_package_id(x)
+					element = self.dbquerys.get_cp_from_package_id(conn,x)
 					print "O", element[0]
 			# Check if we have older no activ categories/package then 60 days
-			del_package_id_old_list = self.database.cp_all_old_db(old_package_id_list)
+			del_package_id_old_list = self.dbquerys.cp_all_old_db(conn,old_package_id_list)
 		# Delete older  categories/package and ebuilds in the db
 		if del_package_id_old_list != []:
 			for i in del_package_id_old_list:
-				element = self.database.get_cp_from_package_id(i)
+				element = self.dbquerys.get_cp_from_package_id(conn,i)
 				print "D", element
-			self.database.del_old_package(del_package_id_old_list)
-								
+			self.dbquerys.del_old_package(conn,del_package_id_old_list)
+		self._CM.putConnection(conn)
+		
 	def mark_old_categories_db(self):
+		conn=self._CM.getConnection()
 		# Get categories list from the tree and db
-		categories_list_tree = self.mysettings.categories
-		categories_list_db = self.database.get_categories_db()
+		categories_list_tree = self._mysettings.categories
+		categories_list_db =self.dbquerys.get_categories_db(conn)
 		categories_old_list = []
 		# Check if don't have the categories in the tree
 		# Add it to the no active list
 		for categories_line in categories_list_db:
 			if not categories_line[0] in categories_list_tree:
-				old_c = selfdatabase.get_old_categories(categories_line[0])
+				old_c = self.dbquerys.get_old_categories(conn,categories_line[0])
 				if old_c is not None:
 					categories_old_list.append(categories_line)
 		# Delete older  categories in the db
 		if categories_old_list != []:
 			for real_old_categories in categories_old_list:
-				self.database.del_old_categories(real_old_categoriess)
+				self.dbquerys.del_old_categories(conn,real_old_categoriess)
 				print "D", real_old_categories
+		self._CM.putConnection(conn)
\ No newline at end of file

diff --git a/gobs/pym/package.py b/gobs/pym/package.py
index ed8d70b..89486e3 100644
--- a/gobs/pym/package.py
+++ b/gobs/pym/package.py
@@ -2,28 +2,33 @@ import portage
 from gobs.flags import gobs_use_flags
 from gobs.repoman_gobs import gobs_repoman
 from gobs.manifest import gobs_manifest
-from gobs.text import gobs_text
+from gobs.text import get_file_text, get_ebuild_text
 from gobs.old_cpv import gobs_old_cpv
 
 class gobs_package(object):
 
-	def __init__(self, mysettings, database, myportdb, gobs_settings_dict):
-		self.mysettings = mysettings
-		self.database = database
-		self.gobs_settings_dict = gobs_settings_dict
-		self.myportdb = myportdb
-		self.init_text = gobs_text()
+	def __init__(self, mysettings, CM, myportdb, gobs_settings_dict):
+		self._mysettings = mysettings
+		self._CM = CM
+		self._gobs_settings_dict = gobs_settings_dict
+		self._myportdb = myportdb
+		if CM.getName() is 'pgsql':
+		  from gobs.querys import pgsql
+		  self._dbquerys = pgsql
+		elif CM.getName() is 'mysql':
+		  from gobs.querys import mysql
 
 	def change_config(self, config_id):
 		# Change config_root  config_id = table configs.id
-		my_new_setup = self.gobs_settings_dict['host_setup_root'] +"config/" + config_id + "/"
+		my_new_setup = self._gobs_settings_dict['gobs_root'] +"config/" + config_id + "/"
 		mysettings_setup = portage.config(config_root = my_new_setup)
 		return mysettings_setup
 
 	def config_match_ebuild(self, categories, package):
+		conn=self._CM.getConnection()
 		config_cpv_listDict ={}
 		# Get a list from table configs/setups with default_config=Fales and active = True
-		config_list_all  = self.database.get_config_list()
+		config_list_all  = self._dbquerys.get_config_list(conn)
 		if config_list_all is ():
 			return config_cpv_listDict
 		for i in config_list_all:
@@ -59,12 +64,13 @@ class gobs_package(object):
 			# Clean some cache
 			myportdb_setup.close_caches()
 			portage.portdbapi.portdbapi_instances.remove(myportdb_setup)
+		self._CM.putConnection(conn)
 		return config_cpv_listDict
 
 	def get_ebuild_metadata(self, ebuild_line):
 		# Get the auxdbkeys infos for the ebuild
 		try:
-			ebuild_auxdb_list = self.myportdb.aux_get(ebuild_line, portage.auxdbkeys)
+			ebuild_auxdb_list = self._myportdb.aux_get(ebuild_line, portage.auxdbkeys)
 		except:
 			ebuild_auxdb_list = []
 		else:
@@ -77,8 +83,8 @@ class gobs_package(object):
 		attDict = {}
 		ebuild_version_tree = portage.versions.cpv_getversion(ebuild_line)
 		ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild")[0]
-		ebuild_version_text = self.init_text.get_ebuild_text(pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild")
-		init_repoman = gobs_repoman(self.mysettings, self.myportdb, self.database)
+		ebuild_version_text = get_ebuild_text(pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild")
+		init_repoman = gobs_repoman(self._mysettings, self._myportdb)
 		repoman_error = init_repoman.check_repoman(categories, package, ebuild_version_tree, config_id)
 		ebuild_version_metadata_tree = self.get_ebuild_metadata(ebuild_line)
 		# if there some error to get the metadata we add rubish to the 
@@ -121,6 +127,7 @@ class gobs_package(object):
 		return metadataDict
 
 	def add_new_ebuild_buildquery_db(self, ebuild_id_list, packageDict, config_cpv_listDict):
+		conn=self._CM.getConnection()
 		# Get the needed info from packageDict and config_cpv_listDict and put that in buildqueue
 		# Only add it if ebuild_version in packageDict and config_cpv_listDict match
 		if config_cpv_listDict != {}:
@@ -149,18 +156,46 @@ class gobs_package(object):
 						use_enable_list.append(s)
 					# Comper ebuild_version and add the ebuild_version to buildqueue
 					if portage.vercmp(v['ebuild_version_tree'], latest_ebuild_version) == 0:
-						self.database.add_new_package_buildqueue(ebuild_id, config_id, use_flags_list, use_enable_list, message)
+						self._dbquerys.add_new_package_buildqueue(conn,ebuild_id, config_id, use_flags_list, use_enable_list, message)
 						print "B",  config_id, v['categories'] + "/" + v['package'] + "-" + latest_ebuild_version, "USE:", use_enable	# B = Build config cpv use-flags
 					i = i +1
+		self._CM.putConnection(conn)
+
+	def add_new_ebuild_buildquery_db_looked(self, build_dict, config_profile):
+		conn=self._CM.getConnection()
+		myportdb = portage.portdbapi(mysettings=self._mysettings)
+		cpv = build_dict['cpv']
+		message = None
+		init_useflags = gobs_use_flags(self._mysettings, myportdb, cpv)
+		iuse_flags_list, final_use_list = init_useflags.get_flags_looked()
+		iuse = []
+		use_flags_list = []
+		use_enable_list = []
+		for iuse_line in iuse_flags_list:
+			iuse.append(init_useflags.reduce_flag(iuse_line))
+		iuse_flags_list2 = list(set(iuse))
+		use_enable = final_use_list
+		use_disable = list(set(iuse_flags_list2).difference(set(use_enable)))
+		use_flagsDict = {}
+		for x in use_enable:
+			use_flagsDict[x] = True
+		for x in use_disable:
+			use_flagsDict[x] = False
+		for u, s in  use_flagsDict.iteritems():
+			use_flags_list.append(u)
+			use_enable_list.append(s)
+		ebuild_id = self._dbquerys.get_ebuild_id_db_checksum(conn, build_dict)
+		self._dbquerys.add_new_package_buildqueue(conn, ebuild_id, config_profile, use_flags_list, use_enable_list, message)
+		self._CM.putConnection(conn)
 
 	def get_package_metadataDict(self, pkgdir, package):
 		# Make package_metadataDict
 		attDict = {}
 		package_metadataDict = {}
 		changelog_checksum_tree = portage.checksum.sha256hash(pkgdir + "/ChangeLog")
-		changelog_text_tree = self.init_text.get_file_text(pkgdir + "/ChangeLog")
+		changelog_text_tree = get_file_text(pkgdir + "/ChangeLog")
 		metadata_xml_checksum_tree = portage.checksum.sha256hash(pkgdir + "/metadata.xml")
-		metadata_xml_text_tree = self.init_text.get_file_text(pkgdir + "/metadata.xml")
+		metadata_xml_text_tree = get_file_text(pkgdir + "/metadata.xml")
 		attDict['changelog_checksum'] =  changelog_checksum_tree[0]
 		attDict['changelog_text'] =  changelog_text_tree
 		attDict['metadata_xml_checksum'] =  metadata_xml_checksum_tree[0]
@@ -169,62 +204,65 @@ class gobs_package(object):
 		return package_metadataDict
 
 	def add_new_package_db(self, categories, package):
+		conn=self._CM.getConnection()
 		# add new categories package ebuild to tables package and ebuilds
 		print "N", categories + "/" + package				# N = New Package
-		pkgdir = self.mysettings['PORTDIR'] + "/" + categories + "/" + package		# Get PORTDIR + cp
-		categories_dir = self.mysettings['PORTDIR'] + "/" + categories + "/"
+		pkgdir = self._mysettings['PORTDIR'] + "/" + categories + "/" + package		# Get PORTDIR + cp
+		categories_dir = self._mysettings['PORTDIR'] + "/" + categories + "/"
 		# Get the ebuild list for cp
-		ebuild_list_tree = self.myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None)
+		ebuild_list_tree = self._myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None)
 		config_cpv_listDict = self.config_match_ebuild(categories, package)
-		config_id  = self.database.get_default_config()[0]
+		config_id  = self._dbquerys.get_default_config(conn)
 		packageDict ={}
 		for ebuild_line in sorted(ebuild_list_tree):
 			# Make the needed packageDict
 			packageDict[ebuild_line] = self.get_packageDict(pkgdir, ebuild_line, categories, package, config_id)
 			# Add the ebuild to db
-			return_id = self.database.add_new_package_sql(packageDict)
+			return_id = self._dbquerys.add_new_package_sql(conn,packageDict)
 			ebuild_id_list = return_id[0]
 			package_id_list = return_id[1]
 			package_id = package_id_list[0]
 			# Add metadataDict to db
 			metadataDict = self.get_metadataDict(packageDict, ebuild_id_list)
-			self.database.add_new_metadata(metadataDict)
+			self._dbquerys.add_new_metadata(conn,metadataDict)
 			# Add any qa and repoman erro for the ebuild to buildlog
 			qa_error = []
-			init_manifest =  gobs_manifest(self.mysettings)
+			init_manifest =  gobs_manifest(self._mysettings)
 			manifest_error = init_manifest.digestcheck(pkgdir)
 			if manifest_error is not None:
 				qa_error.append(manifest_error)
 				print "QA:", categories + "/" + package, qa_error
-			self.database.add_qa_repoman(ebuild_id_list, qa_error, packageDict, config_id)
+			self._dbquerys.add_qa_repoman(conn,ebuild_id_list, qa_error, packageDict, config_id)
 			# Add the ebuild to the buildqueru table if needed
 			self.add_new_ebuild_buildquery_db(ebuild_id_list, packageDict, config_cpv_listDict)
 			# Add some checksum on some files
 			package_metadataDict = self.get_package_metadataDict(pkgdir, package)
-			self.database.add_new_package_metadata(package_id, package_metadataDict)
+			self._dbquerys.add_new_package_metadata(conn,package_id, package_metadataDict)
 			# Add the manifest file to db
 			manifest_checksum_tree = portage.checksum.sha256hash(pkgdir + "/Manifest")[0]
-			get_manifest_text = self.init_text.get_file_text(pkgdir + "/Manifest")
-			self.database.add_new_manifest_sql(package_id, get_manifest_text, manifest_checksum_tree)
+			get_manifest_text = get_file_text(pkgdir + "/Manifest")
+			self._dbquerys.add_new_manifest_sql(conn,package_id, get_manifest_text, manifest_checksum_tree)
+		self._CM.putConnection(conn)
 		return package_id
 
 	def update_package_db(self, categories, package, package_id):
+		conn=self._CM.getConnection()
 		# Update the categories and package with new info
-		pkgdir = self.mysettings['PORTDIR'] + "/" + categories + "/" + package		# Get PORTDIR with cp
+		pkgdir = self._mysettings['PORTDIR'] + "/" + categories + "/" + package		# Get PORTDIR with cp
 		# Get the checksum from the file in portage tree
 		manifest_checksum_tree = portage.checksum.sha256hash(pkgdir + "/Manifest")[0]
 		# Get the checksum from the db in package table
-		manifest_checksum_db = self.database.get_manifest_db(package_id)
+		manifest_checksum_db = self._dbquerys.get_manifest_db(conn,package_id)
 		# if we have the same checksum return else update the package
-		ebuild_list_tree = self.myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None)
+		ebuild_list_tree = self._myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=None)
 		if manifest_checksum_tree != manifest_checksum_db:
 			print "U", categories + "/" + package		# U = Update
 			# Get package_metadataDict and update the db with it
 			package_metadataDict = self.get_package_metadataDict(pkgdir, package)
-			self.database.update_new_package_metadata(package_id, package_metadataDict)
+			self._dbquerys.update_new_package_metadata(conn,package_id, package_metadataDict)
 			# Get config_cpv_listDict
 			config_cpv_listDict = self.config_match_ebuild(categories, package)
-			config_id  = self.database.get_default_config()
+			config_id  = self._dbquerys.get_default_config(conn)
 			packageDict ={}
 			for ebuild_line in sorted(ebuild_list_tree):
 				old_ebuild_list = []
@@ -232,7 +270,7 @@ class gobs_package(object):
 				ebuild_version_tree = portage.versions.cpv_getversion(ebuild_line)
 				# Get the checksum of the ebuild in tree and db
 				ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild")[0]
-				ebuild_version_manifest_checksum_db = self.database.get_ebuild_checksum(package_id, ebuild_version_tree)
+				ebuild_version_manifest_checksum_db = self._dbquerys.get_ebuild_checksum(conn,package_id, ebuild_version_tree)
 				# Check if the checksum have change
 				if ebuild_version_manifest_checksum_db is None or ebuild_version_checksum_tree != ebuild_version_manifest_checksum_db:
 					# Get packageDict for ebuild
@@ -243,26 +281,47 @@ class gobs_package(object):
 						print "U", categories + "/" + package + "-" + ebuild_version_tree	# U = Updated ebuild
 						# Fix so we can use add_new_package_sql(packageDict) to update the ebuilds
 						old_ebuild_list.append(ebuild_version_tree)
-						self.database.add_old_ebuild(package_id, old_ebuild_list)
-						self.database.update_active_ebuild(package_id, ebuild_version_tree)
+						self._dbquerys.add_old_ebuild(conn,package_id, old_ebuild_list)
+						self._dbquerys.update_active_ebuild(conn,package_id, ebuild_version_tree)
 			# Use packageDictand and metadataDict to update the db
-			return_id = self.database.add_new_package_sql(packageDict)
+			return_id = self._dbquerys.add_new_package_sql(conn,packageDict)
 			ebuild_id_list = return_id[0]
 			metadataDict = self.get_metadataDict(packageDict, ebuild_id_list)
-			self.database.add_new_metadata(metadataDict)
+			self._dbquerys.add_new_metadata(conn,metadataDict)
 			# Get the text in Manifest and update it
-			get_manifest_text = self.init_text.get_file_text(pkgdir + "/Manifest")
-			self.database.update_manifest_sql(package_id, get_manifest_text, manifest_checksum_tree)
+			get_manifest_text = get_file_text(pkgdir + "/Manifest")
+			self._dbquerys.update_manifest_sql(conn,package_id, get_manifest_text, manifest_checksum_tree)
 			# Add any qa and repoman erros to buildlog
 			qa_error = []
-			init_manifest =  gobs_manifest(self.mysettings)
+			init_manifest =  gobs_manifest(self._mysettings)
 			manifest_error = init_manifest.digestcheck(pkgdir)
 			if manifest_error is not None:
 				qa_error.append(manifest_error)
 				print "QA:", categories + "/" + package, qa_error
-			self.database.add_qa_repoman(ebuild_id_list, qa_error, packageDict, config_id)
+			self._dbquerys.add_qa_repoman(conn,ebuild_id_list, qa_error, packageDict, config_id)
 			# Add the ebuild to the buildqueru table if needed
 			self.add_new_ebuild_buildquery_db(ebuild_id_list, packageDict, config_cpv_listDict)
 			# Mark or remove any old ebuilds
-			init_old_cpv = gobs_old_cpv(self.database, self.myportdb, self.mysettings)
+			init_old_cpv = gobs_old_cpv(self._CM, self._myportdb, self._mysettings)
 			init_old_cpv.mark_old_ebuild_db(categories, package, package_id)
+		self._CM.putConnection(conn)
+
+	def update_ebuild_db(self, build_dict):
+		conn=self._CM.getConnection()
+		config_id = build_dict['config_profile']
+		categories = build_dict['categories']
+		package = build_dict['package']
+		package_id = build_dict['package_id']
+		cpv = build_dict['cpv']
+		ebuild_version_tree = build_dict['ebuild_version']
+		pkgdir = self._mysettings['PORTDIR'] + "/" + categories + "/" + package		# Get PORTDIR with cp
+		packageDict ={}
+		ebuild_version_manifest_checksum_db = self._dbquerys.get_ebuild_checksum(conn,package_id, ebuild_version_tree)
+		packageDict[cpv] = self.get_packageDict(pkgdir, cpv, categories, package, config_id)
+		old_ebuild_list = []
+		if ebuild_version_manifest_checksum_db is not None:
+			old_ebuild_list.append(ebuild_version_tree)
+			self._dbquerys.add_old_ebuild(conn,package_id, old_ebuild_list)
+			self._dbquerys.update_active_ebuild(conn,package_id, ebuild_version_tree)
+		return_id = self._dbquerys.add_new_package_sql(conn,packageDict)
+		print 'return_id', return_id
\ No newline at end of file

diff --git a/gobs/pym/querys/__init__.py b/gobs/pym/querys/__init__.py
new file mode 100644
index 0000000..8d1c8b6
--- /dev/null
+++ b/gobs/pym/querys/__init__.py
@@ -0,0 +1 @@
+ 

diff --git a/gobs/pym/querys/mysql.py b/gobs/pym/querys/mysql.py
new file mode 100644
index 0000000..436fa30
--- /dev/null
+++ b/gobs/pym/querys/mysql.py
@@ -0,0 +1 @@
+#should contain the same functions as in pgsql.py but for mysql 

diff --git a/gobs/pym/querys/pgsql.py b/gobs/pym/querys/pgsql.py
new file mode 100644
index 0000000..ce22032
--- /dev/null
+++ b/gobs/pym/querys/pgsql.py
@@ -0,0 +1,586 @@
+#every function takes a connection as a parameter that is provided by the CM
+def get_default_config(connection):
+	cursor = connection.cursor()
+	sqlQ = 'SELECT id FROM configs WHERE default_config = True'
+	cursor.execute(sqlQ)
+	return cursor.fetchone()
+
+def get_profile_checksum(connection, config_profile):
+    cursor = connection.cursor()
+    sqlQ = "SELECT make_conf_checksum FROM configs WHERE active = 'True' AND id = %s AND updateing = 'False'"
+    cursor.execute(sqlQ, (config_profile,))
+    return cursor.fetchone()
+  
+def get_package_to_build(connection, config_profile, queue_id, new):
+  cursor =connection.cursor()
+  print "queue_id config_profile", queue_id, config_profile
+  # get what to build
+  if new is True:
+    sqlQ1 = "SELECT ebuild_id, post_message FROM buildqueue WHERE config = %s AND queue_id = %s"
+  else:
+    sqlQ1 = "SELECT ebuild_id, post_message FROM buildqueue WHERE config = %s AND extract(epoch from (NOW()) - timestamp) > 7200 AND queue_id = %s"
+  # get use flags to use
+  sqlQ2 = "SELECT useflag, enabled FROM ebuildqueuedwithuses WHERE queue_id = %s"
+  # get ebuild version (v) and package id
+  sqlQ3 = 'SELECT ebuild_version, package_id, ebuild_checksum FROM ebuilds WHERE id = %s'
+  # get categoriy and package (c, p)
+  sqlQ4 ='SELECT category, package_name FROM packages WHERE package_id = %s'
+  build_dict = {}
+  build_useflags_dict = {}
+  cursor.execute(sqlQ1, (config_profile, queue_id))
+  entries = cursor.fetchone()
+  print "sqlQ1", entries
+  if entries is None:
+    build_dict['ebuild_id'] = None
+    build_dict['post_message'] = None
+    return build_dict
+  build_dict['queue_id'] = queue_id
+  ebuild_id = entries[0]
+  build_dict['ebuild_id'] = ebuild_id
+  build_dict['post_message'] = entries[1]
+  cursor.execute(sqlQ2, (queue_id,))
+  entries = cursor.fetchall()
+  print 'build_useflags', entries
+  if entries == []:
+    build_useflags = None
+  else:
+    build_useflags = entries
+  cursor.execute(sqlQ3, (ebuild_id,))
+  entries = cursor.fetchall()[0]
+  build_dict['ebuild_version'] = entries[0]
+  build_dict['package_id'] = entries[1]
+  build_dict['ebuild_checksum'] = entries[2]
+  cursor.execute(sqlQ4, (entries[1],))
+  entries = cursor.fetchall()[0]
+  build_dict['categories'] = entries[0]
+  build_dict['package'] = entries[1]
+  if build_useflags is None:
+    build_dict['build_useflags'] = None
+  else:
+    for x in build_useflags:
+      build_useflags_dict[x[0]] = x[1]
+      build_dict['build_useflags'] = build_useflags_dict
+  print "build_dict", build_dict
+  return build_dict
+
+def check_revision(connection, build_dict, config_profile, ebuild_version_checksum_tree):
+  cursor = connection.cursor()
+  build_useflags = []
+  sqlQ1 = 'SELECT id FROM ebuilds WHERE ebuild_version = %s AND ebuild_checksum = %s AND package_id = %s'
+  sqlQ2 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s AND config = %s'
+  sqlQ3 = "SELECT useflag FROM ebuildqueuedwithuses WHERE queue_id = %s AND enabled = 'True'"
+  cursor.execute(sqlQ1, (build_dict['ebuild_version'], ebuild_version_checksum_tree, build_dict['package_id']))
+  ebuild_id = cursor.fetchone()
+  print "ebuild_id", ebuild_id
+  if ebuild_id is None:
+    return None
+  cursor.execute(sqlQ2, (ebuild_id[0], config_profile))
+  queue_id_list = cursor.fetchall()
+  print 'queue_id_list',  queue_id_list 
+  if queue_id_list == []:
+    return None
+  for queue_id in queue_id_list[0]:
+    print 'queue_id', queue_id
+    cursor.execute(sqlQ3, (queue_id,))
+    entries = cursor.fetchall()
+    if entries == []:
+      build_useflags = None
+    else:
+      for use_line in sorted(entries):
+	      build_useflags.append(unicode(use_line[0]))
+    print "build_useflags build_dict['build_useflags']", build_useflags, build_dict['build_useflags']
+    if build_useflags == build_dict['build_useflags']:
+      print 'queue_id', queue_id
+      return queue_id
+  return None
+
+def get_config_list(connection):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT id FROM configs WHERE default_config = False AND active = True'
+  cursor.execute(sqlQ)
+  return cursor.fetchall()
+
+def get_config_list_all(connection):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT id FROM configs'
+  cursor.execute(sqlQ)
+  return cursor.fetchall()
+
+def update__make_conf(connection, configsDict):
+  cursor = connection.cursor()
+  sqlQ = 'UPDATE configs SET make_conf_checksum = %s, make_conf_text = %s, active = %s, config_error = %s WHERE id = %s'
+  for k, v in configsDict.iteritems():
+    params = [v['make_conf_checksum_tree'], v['make_conf_text'], v['active'], v['config_error'], k]
+    cursor.execute(sqlQ, params)
+  connection.commit()
+
+def have_package_db(connection, categories, package):
+  cursor = connection.cursor()
+  sqlQ ='SELECT package_id FROM packages WHERE category = %s AND package_name = %s'
+  params = categories, package
+  cursor.execute(sqlQ, params)
+  return cursor.fetchone()
+  
+def have_activ_ebuild_id(connection, ebuild_id):
+	cursor = connection.cursor()
+	sqlQ = 'SELECT ebuild_checksum FROM ebuilds WHERE id = %s AND active = TRUE'
+	cursor.execute(sqlQ, (ebuild_id,))
+	entries = cursor.fetchone()
+	if entries is None:
+		return None
+	# If entries is not None we need [0]
+	return entries[0]
+
+def get_categories_db(connection):
+  cursor = connection.cursor()
+  sqlQ =' SELECT category FROM categories'
+  cursor.execute(sqlQ)
+  return cursor.fetchall()
+
+def get_categories_checksum_db(connection, categories):
+  cursor = connection.cursor()
+  sqlQ =' SELECT metadata_xml_checksum FROM categories_meta WHERE category = %s'
+  cursor.execute(sqlQ, (categories,))
+  return cursor.fetchone()
+
+def add_new_categories_meta_sql(connection, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
+  cursor = connection.cursor()
+  sqlQ = 'INSERT INTO categories_meta (category, metadata_xml_checksum, metadata_xml_text) VALUES  ( %s, %s, %s )'
+  params = categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree
+  cursor.execute(sqlQ, params)
+  connection.commit()
+
+def update_categories_meta_sql(connection, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
+  cursor = connection.cursor()
+  sqlQ ='UPDATE categories_meta SET metadata_xml_checksum = %s, metadata_xml_text = %s WHERE category = %s'
+  params = (categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree, categories)
+  cursor.execute(sqlQ, params)
+  connection.commit()
+
+def add_new_manifest_sql(connection, package_id, get_manifest_text, manifest_checksum_tree):
+  cursor = connection.cursor()
+  sqlQ = 'INSERT INTO manifest (package_id, manifest, checksum) VALUES  ( %s, %s, %s )'
+  params = package_id, get_manifest_text, manifest_checksum_tree
+  cursor.execute(sqlQ, params)
+  connection.commit()
+
+def add_new_package_metadata(connection, package_id, package_metadataDict):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT changelog_checksum FROM packages_meta WHERE package_id = %s'
+  cursor.execute(sqlQ, (package_id,))
+  if cursor.fetchone() is None:
+    sqlQ = 'INSERT INTO packages_meta (package_id, changelog_text, changelog_checksum, metadata_text, metadata_checksum) VALUES  ( %s, %s, %s, %s, %s )'
+    for k, v in package_metadataDict.iteritems():
+      params = package_id, v['changelog_text'], v['changelog_checksum'], v[' metadata_xml_text'], v['metadata_xml_checksum']
+      cursor.execute(sqlQ, params)
+    connection.commit()
+
+def update_new_package_metadata(connection, package_id, package_metadataDict):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT changelog_checksum, metadata_checksum FROM packages_meta WHERE package_id = %s'
+  cursor.execute(sqlQ, package_id)
+  entries = cursor.fetchone()
+  if entries is None:
+    changelog_checksum_db = None
+    metadata_checksum_db = None
+  else:
+    changelog_checksum_db = entries[0]
+    metadata_checksum_db = entries[1]
+  for k, v in package_metadataDict.iteritems():
+    if changelog_checksum_db != v['changelog_checksum']:
+      sqlQ = 'UPDATE packages_meta SET changelog_text = %s, changelog_checksum = %s WHERE package_id = %s'
+      params = v['changelog_text'], v['changelog_checksum'], package_id
+      cursor.execute(sqlQ, params)
+    if metadata_checksum_db != v['metadata_xml_checksum']:
+      sqlQ = 'UPDATE packages_meta SET metadata_text = %s, metadata_checksum = %s WHERE package_id = %s'
+      params = v[' metadata_xml_text'], v['metadata_xml_checksum'], package_id
+      cursor.execute(sqlQ, params)
+  connection.commit()
+
+def get_manifest_db(connection, package_id):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT checksum FROM manifest WHERE package_id = %s'
+  cursor.execute(sqlQ, package_id)
+  entries = cursor.fetchone()
+  if entries is None:
+	  return None
+  # If entries is not None we need [0]
+  return entries[0]
+
+def update_manifest_sql(connection, package_id, get_manifest_text, manifest_checksum_tree):
+  cursor = connection.cursor()
+  sqlQ = 'UPDATE manifest SET checksum = %s, manifest = %s WHERE package_id = %s'
+  params = (manifest_checksum_tree, get_manifest_text, package_id)
+  cursor.execute(sqlQ, params)
+  connection.commit()
+
+def add_new_metadata(connection, metadataDict):
+  cursor = connection.cursor()
+  for k, v in metadataDict.iteritems():
+    #moved the cursor out side of the loop
+    sqlQ = 'SELECT updaterestrictions( %s, %s )'
+    params = k, v['restrictions']
+    cursor.execute(sqlQ, params)
+    sqlQ = 'SELECT updatekeywords( %s, %s )'
+    params = k, v['keyword']
+    cursor.execute(sqlQ, params)
+    sqlQ = 'SELECT updateiuse( %s, %s )'
+    params = k, v['iuse']
+    cursor.execute(sqlQ, params)
+  connection.commit()
+
+def add_new_package_sql(connection, packageDict):
+  #lets have a new cursor for each metod as per best practice
+  cursor = connection.cursor()
+  sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
+  ebuild_id_list = []
+  package_id_list = []
+  for k, v in packageDict.iteritems():
+    params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
+    v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
+    v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
+    v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
+    v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
+    v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
+    v['ebuild_version_metadata_tree'][16]]
+    cursor.execute(sqlQ, params)
+    mid = cursor.fetchone()
+    mid=mid[0]
+    ebuild_id_list.append(mid[1])
+    package_id_list.append(mid[0])
+  connection.commit()
+  # add_new_metadata(metadataDict)
+  return ebuild_id_list, package_id_list
+
+def add_new_ebuild_sql(connection, packageDict, new_ebuild_list):
+  #lets have a new cursor for each metod as per best practice
+  cursor = connection.cursor()
+  sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
+  ebuild_id_list = []
+  package_id_list = []
+  for k, v in packageDict.iteritems():
+    for x in new_ebuild_list:
+      if x == v['ebuild_version_tree']:
+	params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
+	v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
+	v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
+	v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
+	v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
+	v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
+	v['ebuild_version_metadata_tree'][16]]
+	cursor.execute(sqlQ, params)
+	mid = cursor.fetchone()
+	mid=mid[0]
+	ebuild_id_list.append(mid[1])
+	package_id_list.append(mid[0])
+  connection.commit()
+  # add_new_metadata(metadataDict)
+  return ebuild_id_list, package_id_list
+
+def update_active_ebuild(connection, package_id, ebuild_version_tree):
+  cursor = connection.cursor()
+  sqlQ ="UPDATE ebuilds SET active = 'False', timestamp = now() WHERE package_id = %s AND ebuild_version = %s AND active = 'True'"
+  cursor.execute(sqlQ, (package_id, ebuild_version_tree))
+  connection.commit()
+
+def get_ebuild_id_db(connection, categories, package, ebuild_version_tree):
+	cursor = connection.cursor()
+	sqlQ ='SELECT id FROM packages WHERE category = %s AND ebuild_name = %s AND ebuild_version = %s'
+	cursor.execute(sqlQ, (categories, package, ebuild_version_tree))
+	entries = cursor.fetchone()
+	return entries
+
+def get_ebuild_id_db_checksum(connection, build_dict):
+	cursor = connection.cursor()
+	sqlQ = 'SELECT id FROM ebuilds WHERE ebuild_version = %s AND ebuild_checksum = %s AND package_id = %s'
+	cursor.execute(sqlQ, (build_dict['ebuild_version'], build_dict['checksum'], build_dict['package_id']))
+	ebuild_id = cursor.fetchone()
+	print 'ebuild_id', ebuild_id
+	if ebuild_id is None:
+		return None
+	return ebuild_id
+
+def get_cpv_from_ebuild_id(connection, ebuild_id):
+	cursor = connection.cursor()
+	#wasent used
+	#sqlQ = 'SELECT package_id FROM ebuild WHERE id = %s'
+	sqlQ='SELECT category, ebuild_name, ebuild_version FROM packages WHERE id = %s'
+	cursor.execute(sqlQ, ebuild_id)
+	entries = cursor.fetchone()
+	return entries
+
+def get_cp_from_package_id(connection, package_id):
+  cursor =connection.cursor()
+  sqlQ = "SELECT ARRAY_TO_STRING(ARRAY[category, package_name] , '/') AS cp FROM packages WHERE package_id = %s"
+  cursor.execute(sqlQ, (package_id,))
+  return cursor.fetchone()
+
+def get_keyword_id_db(connection, arch, stable):
+	cursor =connection.cursor()
+	sqlQ ='SELECT id_keyword FROM keywords WHERE ARCH = %s AND stable = %s'
+	cursor.execute(sqlQ, (arch, stable))
+	entries = cursor.fetchone()
+	#why only return 1 entery? if that IS the point use top(1)
+	return entries
+	
+def add_new_keywords(connection, ebuild_id, keyword_id):
+	cursor = connection.cursor()
+	sqlQ ='INSERT INTO keywordsToEbuild (ebuild_id, id_keyword) VALUES  ( %s, %s )'
+	cursor.execute(sqlQ, (ebuild_id, keyword_id))
+	connection.commit()
+		
+def have_package_buildqueue(connection, ebuild_id, config_id):
+	cursor = connection.cursor()
+	sqlQ = 'SELECT useflags FROM buildqueue WHERE  ebuild_id = %s  AND config_id = %s'
+	params = (ebuild_id[0], config_id)
+	cursor.execute(sqlQ, params)
+	entries = cursor.fetchone()
+	return entries
+
+def add_new_package_buildqueue(connection, ebuild_id, config_id, iuse_flags_list, use_enable, message):
+  cursor = connection.cursor()
+  sqlQ="SELECT insert_buildqueue( %s, %s, %s, %s, %s )"
+  if not iuse_flags_list:
+    iuse_flags_list=None
+    use_enable=None
+  params = ebuild_id, unicode(config_id), iuse_flags_list, use_enable, message
+  cursor.execute(sqlQ, params)
+  connection.commit()
+  
+def get_ebuild_checksum(connection, package_id, ebuild_version_tree):
+  cursor = connection.cursor()
+  sqlQ = 'SELECT ebuild_checksum FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = TRUE'
+  cursor.execute(sqlQ, (package_id, ebuild_version_tree))
+  entries = cursor.fetchone()
+  if entries is None:
+	  return None
+ # If entries is not None we need [0]
+  return entries[0]
+
+def cp_all_db(connection):
+  cursor = connection.cursor()
+  sqlQ = "SELECT package_id FROM packages"
+  cursor.execute(sqlQ)
+  return cursor.fetchall()
+
+def add_old_package(connection, old_package_list):
+  cursor = connection.cursor()
+  mark_old_list = []
+  sqlQ = "UPDATE ebuilds SET active = 'FALSE', timestamp = NOW() WHERE package_id = %s AND active = 'TRUE' RETURNING package_id"
+  for old_package in old_package_list:
+    cursor.execute(sqlQ, (old_package[0],))
+    entries = cursor.fetchone()
+    if entries is not None:
+      mark_old_list.append(entries[0])
+  connection.commit()
+  return mark_old_list
+  
+def get_old_categories(connection, categories_line):
+  cursor = connection.cursor()
+  sqlQ = "SELECT package_name FROM packages WHERE category = %s"
+  cursor.execute(sqlQ (categories_line))
+  return cursor.fetchone()
+
+def del_old_categories(connection, real_old_categoriess):
+  cursor = connection.cursor()
+  sqlQ1 = 'DELETE FROM categories_meta WHERE category = %s'
+  sqlQ2 = 'DELETE FROM categories categories_meta WHERE category = %s'
+  cursor.execute(sqlQ1 (real_old_categories))
+  cursor.execute(sqlQ2 (real_old_categories))
+  connection.commit()
+
+def add_old_ebuild(connection, package_id, old_ebuild_list):
+  cursor = connection.cursor()
+  sqlQ1 = "UPDATE ebuilds SET active = 'FALSE' WHERE package_id = %s AND ebuild_version = %s"
+  sqlQ2 = "SELECT id FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = 'TRUE'"
+  sqlQ3 = "SELECT queue_id FROM buildqueue WHERE ebuild_id = %s"
+  sqlQ4 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
+  sqlQ5 = 'DELETE FROM buildqueue WHERE queue_id = %s'
+  for old_ebuild in  old_ebuild_list:
+	  cursor.execute(sqlQ2, (package_id, old_ebuild[0]))
+	  ebuild_id_list = cursor.fetchall()
+	  if ebuild_id_list is not None:
+		for ebuild_id in ebuild_id_list:
+			cursor.execute(sqlQ3, (ebuild_id))
+			queue_id_list = cursor.fetchall()
+			if queue_id_list is not None:
+				for queue_id in queue_id_list:
+					cursor.execute(sqlQ4, (queue_id))
+					cursor.execute(sqlQ5, (queue_id))
+		cursor.execute(sqlQ1, (package_id, old_ebuild[0]))
+  connection.commit()
+  
+def cp_all_old_db(connection, old_package_id_list):
+  cursor = connection.cursor()
+  old_package_list = []
+  for old_package in old_package_id_list:
+    sqlQ = "SELECT package_id FROM ebuilds WHERE package_id = %s AND active = 'FALSE' AND date_part('days', NOW() - timestamp) < 60"
+    cursor.execute(sqlQ, old_package)
+    entries = cursor.fetchone()
+    if entries is None:
+      old_package_list.append(old_package)
+  return old_package_list
+  
+def del_old_ebuild(connection, ebuild_old_list_db):
+  cursor = connection.cursor()
+  sqlQ1 = 'SELECT build_id FROM buildlog WHERE ebuild_id = %s'
+  sqlQ2 = 'DELETE FROM qa_problems WHERE build_id = %s'
+  sqlQ3 = 'DELETE FROM repoman_problems WHERE build_id = %s'
+  sqlQ4 = 'DELETE FROM ebuildbuildwithuses WHERE build_id = %s'
+  sqlQ5 = 'DELETE FROM ebuildhaveskeywords WHERE ebuild_id = %s'
+  sqlQ6 = 'DELETE FROM ebuildhavesiuses WHERE ebuild_id = %s'
+  sqlQ7 = 'DELETE FROM ebuildhavesrestrictions WHERE ebuild_id = %s'
+  sqlQ8 = 'DELETE FROM buildlog WHERE ebuild_id = %s'
+  sqlQ9 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s'
+  sqlQ10 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
+  sqlQ11 = 'DELETE FROM buildqueue WHERE ebuild_id  = %s'
+  sqlQ12 = 'DELETE FROM ebuilds WHERE id  = %s'
+  for ebuild_id in ebuild_old_list_db:
+	cursor.execute(sqlQ1, (ebuild_id[0],))
+	build_id_list = cursor.fetchall()
+	if  build_id_list != []:
+		for build_id in build_id_list:
+			cursor.execute(sqlQ2, (build_id[0],))
+			cursor.execute(sqlQ3, (build_id[0],))
+			cursor.execute(sqlQ4, (build_id[0],))
+	cursor.execute(sqlQ9, (ebuild_id[0],))
+	queue_id_list = cursor.fetchall()
+	if queue_id_list != []:
+		for queue_id in queue_id_list:
+			cursor.execute(sqlQ10, (queue_id[0],))
+	cursor.execute(sqlQ5, (ebuild_id[0],))
+	cursor.execute(sqlQ6, (ebuild_id[0],))
+	cursor.execute(sqlQ7, (ebuild_id[0],))
+	cursor.execute(sqlQ8, (ebuild_id[0],))
+	cursor.execute(sqlQ11, (ebuild_id[0],))
+	cursor.execute(sqlQ12, (ebuild_id[0],))
+  connection.commit()
+  
+def del_old_package(connection, package_id_list):
+  cursor = connection.cursor()
+  sqlQ1 = 'SELECT id FROM ebuilds WHERE package_id = %s'
+  sqlQ2 = 'DELETE FROM ebuilds WHERE package_id = %s'
+  sqlQ3 = 'DELETE FROM manifest WHERE package_id = %s'
+  sqlQ4 = 'DELETE FROM packages_meta WHERE package_id = %s'
+  sqlQ5 = 'DELETE FROM packages WHERE package_id = %s'
+  for package_id in package_id_list:
+	  cursor.execute(sqlQ1, package_id)
+	  ebuild_id_list = cursor.fetchall()
+	  self.del_old_ebuild(ebuild_id_list)
+	  cursor.execute(sqlQ2, (package_id,))
+	  cursor.execute(sqlQ3, (package_id,))
+	  cursor.execute(sqlQ4, (package_id,))
+	  cursor.execute(sqlQ5, (package_id,))
+  connection.commit()
+
+def cp_list_db(connection, package_id):
+  cursor = connection.cursor()
+  sqlQ = "SELECT ebuild_version FROM ebuilds WHERE active = 'TRUE' AND package_id = %s"
+  cursor.execute(sqlQ, (package_id))
+  return cursor.fetchall()
+
+def cp_list_old_db(connection, package_id):
+  cursor = connection.cursor()
+  sqlQ ="SELECT id, ebuild_version FROM ebuilds WHERE active = 'FALSE' AND package_id = %s AND date_part('days', NOW() - timestamp) > 60"
+  cursor.execute(sqlQ, package_id)
+  return cursor.fetchall()
+
+def move_queru_buildlog(connection, queue_id, build_error, summary_error, logfilename, build_log_dict):
+	cursor = connection.cursor()
+	print 'queue_id', queue_id
+	print 'build_log_dict', build_log_dict
+	repoman_error_list = build_log_dict['repoman_error_list']
+	qa_error_list = build_log_dict['qa_error_list']
+	sqlQ = 'SELECT make_buildlog( %s, %s, %s, %s, %s, %s)'
+	cursor.execute(sqlQ, (queue_id, summary_error, build_error, logfilename, qa_error_list, repoman_error_list))
+	entries = cursor.fetchone()
+	connection.commit()
+	return entries
+
+def add_qa_repoman(connection, ebuild_id_list, qa_error, packageDict, config_id):
+  cursor = connection.cursor()
+  ebuild_i = 0
+  for k, v in packageDict.iteritems():
+    ebuild_id = ebuild_id_list[ebuild_i]
+    sqlQ = "INSERT INTO buildlog (ebuild_id, config, error_summary, timestamp, hash ) VALUES  ( %s, %s, %s, now(), '1' ) RETURNING build_id"
+    if v['ebuild_error'] != [] or qa_error != []:
+      if v['ebuild_error'] != [] or qa_error == []:
+	summary = "Repoman"
+      elif v['ebuild_error'] == [] or qa_error != []:
+	summary = "QA"
+      else:
+	summary = "QA:Repoman"
+      params = (ebuild_id, config_id, summary)
+      cursor.execute(sqlQ, params)
+      build_id = cursor.fetchone()
+      if v['ebuild_error'] != []:
+	sqlQ = 'INSERT INTO repoman_problems (problem, build_id ) VALUES ( %s, %s )'
+	for x in v['ebuild_error']:
+	  params = (x, build_id)
+	  cursor.execute(sqlQ, params)
+      if qa_error != []:
+	sqlQ = 'INSERT INTO qa_problems (problem, build_id ) VALUES ( %s, %s )'
+	for x in qa_error:
+	  params = (x, build_id)
+	  cursor.execute(sqlQ, params)
+    ebuild_i = ebuild_i +1
+  connection.commit()
+
+def update_qa_repoman(connection, build_id, build_log_dict):
+	cursor = connection.cursor()
+	sqlQ1 = 'INSERT INTO repoman_problems (problem, build_id ) VALUES ( %s, %s )'
+	sqlQ2 = 'INSERT INTO qa_problems (problem, build_id ) VALUES ( %s, %s )'
+	if build_log_dict['repoman_error_list'] != []:
+		for x in build_log_dict['repoman_error_list']:
+			params = (x, build_id)
+			cursor.execute(sqlQ, params)
+	if build_log_dict['qa_error_list'] != []:
+		for x in build_log_dict['qa_error_list']:
+			params = (x, build_id)
+			cursor.execute(sqlQ, params)
+	connection.commit()
+
+def get_arch_db(connection):
+  cursor = connection.cursor()
+  sqlQ = "SELECT keyword FROM keywords WHERE keyword = 'amd64'"
+  cursor.execute(sqlQ)
+  return cursor.fetchone()
+
+def add_new_arch_db(connection, arch_list):
+  cursor = connection.cursor()
+  sqlQ = 'INSERT INTO keywords (keyword) VALUES  ( %s )'
+  for arch in arch_list:
+    cursor.execute(sqlQ, (arch,))
+  connection.commit()
+
+def check_fail_times(connection, queue_id, what_error):
+	cursor = connection.cursor()
+	sqlQ = 'SELECT errors FROM temp_errors_queue_qa WHERE queue_id = %s AND what_error = %s'
+	cursor.execute(sqlQ, (queue_id, what_error))
+	return cursor.fetchone()
+
+def add_fail_times(connection, queue_id, what_error):
+	cursor = connection.cursor()
+	x = 1
+	sqlQ = 'INSERT INTO temp_errors_queue_qa (queue_id, what_error, errors) VALUES ( %s, %s, %s)'
+	cursor.execute(sqlQ, (queue_id, what_error, x))
+	connection.commit()
+
+def update_fail_times(connection, queue_id, what_error, fail_times):
+	cursor = connection.cursor()
+	x = fail_times + 1
+	sqlQ = 'UPDATE temp_errors_queue_qa SET errors = %s WHERE queue_id = %s AND what_error = %s'
+	cursor.execute(sqlQ, (x, queue_id, what_error))
+	connection.commit()
+def del_old_queue(connection, queue_id):
+	cursor = connection.cursor()
+	sqlQ1 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
+	sqlQ2 = 'DELETE FROM temp_errors_queue_qa WHERE queue_id  = %s'
+	sqlQ3 = 'DELETE FROM buildqueue WHERE queue_id  = %s'
+	cursor.execute(sqlQ1, (queue_id,))
+	cursor.execute(sqlQ2, (queue_id,))
+	cursor.execute(sqlQ3, (queue_id,))
+	connection.commit()
+
+def make_conf_error(connection,config_profile):
+  pass

diff --git a/gobs/pym/readconf.py b/gobs/pym/readconf.py
index 848a3e6..c017561 100644
--- a/gobs/pym/readconf.py
+++ b/gobs/pym/readconf.py
@@ -33,6 +33,7 @@ class get_conf_settings(object):
 			# Buildhost setup (host/setup on guest)
 			if element[0] == 'GOBSCONFIG':
 				get_gobs_config = element[1]
+			
 			open_conffile.close()
 		gobs_settings_dict = {}
 		gobs_settings_dict['sql_backend'] = get_sql_backend.rstrip('\n')
@@ -40,6 +41,6 @@ class get_conf_settings(object):
 		gobs_settings_dict['sql_host'] = get_sql_host.rstrip('\n')
 		gobs_settings_dict['sql_user'] = get_sql_user.rstrip('\n')
 		gobs_settings_dict['sql_passwd'] = get_sql_passwd.rstrip('\n')
-		gobs_settings_dict['host_setup_root'] = get_gobs_root.rstrip('\n')
-		gobs_settings_dict['guest_setup_root'] = get_build_config.rstrip('\n')
+		gobs_settings_dict['gobs_root'] = get_gobs_root.rstrip('\n')
+		gobs_settings_dict['gobs_config'] = get_gobs_config.rstrip('\n')
 		return gobs_settings_dict

diff --git a/gobs/pym/repoman_gobs.py b/gobs/pym/repoman_gobs.py
index c495d80..ef10f9c 100644
--- a/gobs/pym/repoman_gobs.py
+++ b/gobs/pym/repoman_gobs.py
@@ -11,26 +11,25 @@ import codecs
 
 class gobs_repoman(object):
 	
-	def __init__(self, mysettings, myportdb, database):
-		self.mysettings = mysettings
-		self.myportdb = myportdb
-		self.database = database
+	def __init__(self, mysettings, myportdb):
+		self._mysettings = mysettings
+		self._myportdb = myportdb
 
 	def check_repoman(self, categories, package, ebuild_version_tree, config_id):
 		# We run repoman run_checks on the ebuild
-		pkgdir = self.mysettings['PORTDIR'] + "/" + categories + "/" + package
+		pkgdir = self._mysettings['PORTDIR'] + "/" + categories + "/" + package
 		full_path = pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild"
 		cpv = categories + "/" + package + "-" + ebuild_version_tree
 		root = '/'
 		trees = {
-		root : {'porttree' : portage.portagetree(root, settings=self.mysettings)}
+		root : {'porttree' : portage.portagetree(root, settings=self._mysettings)}
 		}
-		root_config = RootConfig(self.mysettings, trees[root], None)
+		root_config = RootConfig(self._mysettings, trees[root], None)
 		allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_"))
 		allvars.update(Package.metadata_keys)
 		allvars = sorted(allvars)
-		myaux = dict(zip(allvars, self.myportdb.aux_get(cpv, allvars)))
-		pkg = Package(cpv=cpv, metadata=myaux, root_config=root_config)
+		myaux = dict(zip(allvars, self._myportdb.aux_get(cpv, allvars)))
+		pkg = Package(cpv=cpv, metadata=myaux, root_config=root_config, type_name='ebuild')
 		fails = []
 		try:
 			# All ebuilds should have utf_8 encoding.

diff --git a/gobs/pym/text.py b/gobs/pym/text.py
index c3d53fc..9f5bb4e 100644
--- a/gobs/pym/text.py
+++ b/gobs/pym/text.py
@@ -3,39 +3,52 @@ import re
 import os
 import errno
 
-class gobs_text(object):
+def  get_file_text(filename):
+	# Return the filename contents
+	try:
+		textfile = open(filename)
+	except IOError, oe:
+		if oe.errno not in (errno.ENOENT, ):
+			raise
+			return "No file", filename
+	text = ""
+	for line in textfile:
+		text += unicode(line, 'utf-8')
+	textfile.close()
+	return text
 
-	def  get_file_text(self, filename):
-		# Return the filename contents
-		try:
-			textfile = open(filename)
-		except IOError, oe:
-			if oe.errno not in (errno.ENOENT, ):
-				raise
-				return "No file", filename
-		text = ""
-		for line in textfile:
-			text += unicode(line, 'utf-8')
-		textfile.close()
-		return text
-				
-	def  get_ebuild_text(self, filename):
-		"""Return the ebuild contents"""
-		try:
-			ebuildfile = open(filename)
-		except IOError, oe:
-			if oe.errno not in (errno.ENOENT, ):
-				raise
-				return "No Ebuild file there"
-		text = ""
-		dataLines = ebuildfile.readlines()
-		for i in dataLines:
-			text = text + i + " "
-		line2 = dataLines[2]
-		field = line2.split(" ")
-		ebuildfile.close()
-		try:
-			cvs_revision = field[3]
-		except:
-			cvs_revision = ''
-		return text, cvs_revision
+def  get_ebuild_text(filename):
+	"""Return the ebuild contents"""
+	try:
+		ebuildfile = open(filename)
+	except IOError, oe:
+		if oe.errno not in (errno.ENOENT, ):
+			raise
+			return "No Ebuild file there"
+	text = ""
+	dataLines = ebuildfile.readlines()
+	for i in dataLines:
+		text = text + i + " "
+	line2 = dataLines[2]
+	field = line2.split(" ")
+	ebuildfile.close()
+	try:
+		cvs_revision = field[3]
+	except:
+		cvs_revision = ''
+	return text, cvs_revision
+
+def  get_log_text_list(filename):
+	"""Return the log contents as a list"""
+	print "filename", filename
+	try:
+		logfile = open(filename)
+	except IOError, oe:
+		if oe.errno not in (errno.ENOENT, ):
+			raise
+		return None
+	text = []
+	dataLines = logfile.readlines()
+	for i in dataLines:
+		text.append(i)
+	return text



^ permalink raw reply related	[flat|nested] only message in thread

only message in thread, other threads:[~2011-07-10 15:31 UTC | newest]

Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2011-07-10 15:31 [gentoo-commits] dev/zorry:master commit in: gobs/pym/, gobs/pym/querys/ Magnus Granberg

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox