* [gentoo-commits] dev/zorry:master commit in: buildhost/updatedb/, gobs/bin/, ebuild/dev-python/gobs/, buildhost/portage_hook/, ...
@ 2011-08-28 12:34 Magnus Granberg
0 siblings, 0 replies; only message in thread
From: Magnus Granberg @ 2011-08-28 12:34 UTC (permalink / raw
To: gentoo-commits
commit: c17b53599b51f4131e8d723872fe009580340c28
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sun Aug 28 12:29:33 2011 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sun Aug 28 12:29:33 2011 +0000
URL: http://git.overlays.gentoo.org/gitweb/?p=dev/zorry.git;a=commit;h=c17b5359
adding git support
---
buildhost/README.txt | 38 +--
buildhost/portage_hook/buildhost_hook.py | 253 +++++++----
buildhost/portage_hook/sqlbackend.py | 47 ++-
buildhost/updatedb/core.py | 6 +-
ebuild/dev-python/gobs/gobs-9999.ebuild | 6 +-
gobs/bin/{portage_hooks => gobs_portage_hooks} | 2 +-
gobs/bin/gobs_updatedb | 22 +-
gobs/pym/build_log.py | 290 ++++++++++++
gobs/pym/check_setup.py | 6 +-
gobs/pym/initsql.py | 41 --
gobs/pym/package.py | 5 +-
gobs/pym/pgsqlbackend.py | 418 -----------------
gobs/pym/querys/__init__.py | 1 -
gobs/pym/querys/mysql.py | 1 -
gobs/pym/querys/pgsql.py | 596 ------------------------
gobs/pym/readconf.py | 6 +-
16 files changed, 528 insertions(+), 1210 deletions(-)
diff --git a/buildhost/README.txt b/buildhost/README.txt
index 4251940..3656437 100644
--- a/buildhost/README.txt
+++ b/buildhost/README.txt
@@ -1,30 +1,8 @@
-This builhost python code will try to solve some probs we have.
-
-1.No central unit that gether all logs and buld info on packages.
-2. Not any frontend to the tinderboxes.
-3. No binpackages for free.
-4. atomatic bug commiting.
-
-Dirs
-/etc/buildhost # config dir for buildhost.
-/etc/buildhost/buildhost.conf # config file for buildhost
-/fooo/ # dir for the configs for the buildhostes (/etc/make.conf /etc/make.profile)
-
-/updatedb # app to generate the build queru and fill the db with package info.
-/buildpackage # build a packages in the build queru.
-/frontend # frontend for the buldhosts
-
-Updatedb scan the portage tree for new packages and add packages info and add it to
-the build queru. It start to read the config file for sql settings. Next step is to init the sql
-backend that only mysql is supported now but we use the python sql api-2.0 so we could
-add more sql backends with some code in sqlbackend.py. When that is done we setup the default
-config to use when we ask portage for info on ebuilds.
-We check the portage dir for new package/ebuilds and add package/ebuild info for the
-package/ebuild When it is done we add it to the buildqueru and add the default use-flags to test
-with diffrents configs.
-
-Buildpackage is plest on all buildhosts. It start to check if it have any package in the buildqueru and
-start to build it with all dep and settings it have on that buildhost. When it is done it check for QA and
-repoman errors and add it to the build info of that package.
-
-Frontend will show info on the packages like use-flags logs, QA, repoman and build errors for all buildhosts.
+1. Setup the Backend
+Setup the gobs.conf for the db.
+Change GOBSROOT to point to the git repo with your configs for the profiles/setups.
+Change GOBSCONFIG to the profiles/setup to use on the guest.
+On the guest make a symlink from /etc/portage to the profiles/setups you will use on the guest.
+Import the sqldump to your sql.
+The portage/base/make.conf should be in the base profile/setup
+The portage/all/bashrc should be in all the guest profiles/setups
\ No newline at end of file
diff --git a/buildhost/portage_hook/buildhost_hook.py b/buildhost/portage_hook/buildhost_hook.py
index df24e12..032638a 100644
--- a/buildhost/portage_hook/buildhost_hook.py
+++ b/buildhost/portage_hook/buildhost_hook.py
@@ -7,57 +7,20 @@ import sys
from StringIO import StringIO
import os
import re
+import codecs
import portage
+from portage import config
+from portage import os, _encodings, _unicode_decode
+from portage import _unicode_encode
+from repoman.checks import run_checks
+from _emerge.RootConfig import RootConfig
+from _emerge.Package import Package
import _emerge
import hashlib
import errno
import time
-from xml.etree.ElementTree import ElementTree
-
-class get_conf_settings(object):
- # open the /etc/buildhost/buildhost.conf file and get the needed
- # settings for the database
- def get_conf_settings_from_file():
- configfile = "/etc/buildhost/buildhost.conf"
- open_conffile = open(configfile, 'r')
- dataLines = open_conffile.readlines()
- for line in dataLines:
- element = line.split('=')
- if element[0] == 'SQLBACKEND': # Databas backend (mysql)
- GetSqlBackend = element[1]
- if element[0] == 'SQLDB': # Database
- GetSqlDB = element[1]
- if element[0] == 'SQLHOST': # Host
- GetSqlHost = element[1]
- if element[0] == 'SQLUSER': # User
- GetSqlUser = element[1]
- if element[0] == 'SQLPASSWD': # Password
- GetSqlPasswd = element[1]
- if element[0] == 'BUILDCONFIG': # Buildhost root
- GetBuildConfig = element[1]
- open_conffile.close()
- return GetSqlBackend.rstrip('\n'), GetSqlDB.rstrip('\n'), GetSqlHost.rstrip('\n'), \
- GetSqlUser.rstrip('\n'), GetSqlPasswd.rstrip('\n'), GetBuildConfig.rstrip('\n')
-
- Get_BuildHost_settings = get_conf_settings_from_file()
- SetSqlBackend = Get_BuildHost_settings[0]
- SetSqlDB = Get_BuildHost_settings[1]
- SetSqlHost = Get_BuildHost_settings[2]
- SetSqlUser = Get_BuildHost_settings[3]
- SetSqlPasswd = Get_BuildHost_settings[4]
- SetBuildConfig = Get_BuildHost_settings[5]
-
-def init_sql_backend():
- from sqlbackend import DatabaseConfig # import DatebaseConfig
- database = None
-
- # Setup the database backent to use mysql
- if DatabaseConfig.mode == 'pgsql':
- from sqlbackend import PgSQLPackageDB
- database = PgSQLPackageDB(DatabaseConfig.settings['sql_pgsql'])
- return database
-def get_iuse(portdb, cpv):
+def get_iuse(myportdb, cpv):
"""Gets the current IUSE flags from the tree
To be used when a gentoolkit package object is not needed
@@ -66,7 +29,7 @@ def get_iuse(portdb, cpv):
@rtype list
@returns [] or the list of IUSE flags
"""
- return portdb.aux_get(cpv, ["IUSE"])[0].split()
+ return myportdb.aux_get(cpv, ["IUSE"])[0].split()
def reduce_flag(flag):
"""Absolute value function for a USE flag
@@ -96,7 +59,7 @@ def reduce_flags(the_list):
return r
-def filter_flags(settings, use, use_expand_hidden, usemasked, useforced):
+def filter_flags(mysettings, use, use_expand_hidden, usemasked, useforced):
"""Filter function to remove hidden or otherwise not normally
visible USE flags from a list.
@@ -119,7 +82,7 @@ def filter_flags(settings, use, use_expand_hidden, usemasked, useforced):
if f in x:
use.remove(x)
# clean out any arch's
- archlist = settings["PORTAGE_ARCHLIST"].split()
+ archlist = mysettings["PORTAGE_ARCHLIST"].split()
for a in use[:]:
if a in archlist:
use.remove(a)
@@ -130,7 +93,7 @@ def filter_flags(settings, use, use_expand_hidden, usemasked, useforced):
use.remove(a)
return use
-def get_all_cpv_use(settings, portdb, cpv):
+def get_all_cpv_use(mysettings, myportdb, cpv):
"""Uses portage to determine final USE flags and settings for an emerge
@type cpv: string
@@ -138,24 +101,13 @@ def get_all_cpv_use(settings, portdb, cpv):
@rtype: lists
@return use, use_expand_hidden, usemask, useforce
"""
- use = None
- settings.unlock()
- try:
- settings.setcpv(cpv, use_cache=None, mydb=portdb)
- use = settings['PORTAGE_USE'].split()
- use_expand_hidden = settings["USE_EXPAND_HIDDEN"].split()
- usemask = list(settings.usemask)
- useforce = list(settings.useforce)
- except KeyError:
- settings.reset()
- settings.lock()
- return [], [], [], []
- # reset cpv filter
- settings.reset()
- settings.lock()
+ use = mysettings['PORTAGE_USE'].split()
+ use_expand_hidden = mysettings["USE_EXPAND_HIDDEN"].split()
+ usemask = list(mysettings.usemask)
+ useforce = list(mysettings.useforce)
return use, use_expand_hidden, usemask, useforce
-def get_flags(settings, portdb, cpv):
+def get_flags(mysettings, myportdb, cpv):
"""Retrieves all information needed to filter out hidden, masked, etc.
USE flags for a given package.
@@ -167,14 +119,15 @@ def get_flags(settings, portdb, cpv):
@rtype: list or list, list
@return IUSE or IUSE, final_flags
"""
- final_use, use_expand_hidden, usemasked, useforced = get_all_cpv_use(settings, portdb, cpv)
- iuse_flags = filter_flags(settings, get_iuse(portdb, cpv), use_expand_hidden, usemasked, useforced)
+ final_use, use_expand_hidden, usemasked, useforced = get_all_cpv_use(mysettings, myportdb, cpv)
+ iuse_flags = filter_flags(mysettings, get_iuse(myportdb, cpv), use_expand_hidden, usemasked, useforced)
#flags = filter_flags(use_flags, use_expand_hidden, usemasked, useforced)
- final_flags = filter_flags(settings, final_use, use_expand_hidden, usemasked, useforced)
+ final_flags = filter_flags(mysettings, final_use, use_expand_hidden, usemasked, useforced)
return iuse_flags, final_flags
-def get_log_text(filename):
+def get_log_text_list(filename):
"""Return the log contents as a list"""
+ print "filename", filename
try:
logfile = open(filename)
except IOError, oe:
@@ -187,12 +140,14 @@ def get_log_text(filename):
text.append(i)
return text
-def add_new_ebuild_buildquery_db(database, settings, build_dict, ebuild_version_checksum_tree, config_profile):
- portdb = portage.portdbapi(mysettings=settings)
- message = None
+def add_new_ebuild_buildquery_db(database, mysettings, build_dict, ebuild_version_checksum_tree, config_profile):
+ myportdb = portage.portdbapi(mysettings=mysettings)
cpv = build_dict['cpv']
- iuse_flags_list, final_use_list = get_flags(settings, portdb, cpv)
+ message = None
+ iuse_flags_list, final_use_list = get_flags(mysettings, myportdb, cpv)
iuse = []
+ use_flags_list = []
+ use_enable_list = []
for iuse_line in iuse_flags_list:
iuse.append(reduce_flag(iuse_line))
iuse_flags_list2 = list(set(iuse))
@@ -208,37 +163,153 @@ def add_new_ebuild_buildquery_db(database, settings, build_dict, ebuild_version_
use_enable_list.append(s)
ebuild_id = database.get_ebuild_id_db(build_dict, ebuild_version_checksum_tree)
database.add_new_package_buildqueue(ebuild_id, config_profile, use_flags_list, use_enable_list, message)
- return
-def get_buillog_info(settings, mode):
+
+def check_repoman(settings, portdb, categories, package, ebuild_version_tree, config_id):
+ # We run repoman run_checks on the ebuild
+ pkgdir = settings['PORTDIR'] + "/" + categories + "/" + package
+ full_path = pkgdir + "/" + package + "-" + ebuild_version_tree + ".ebuild"
+ cpv = categories + "/" + package + "-" + ebuild_version_tree
+ root = '/'
+ trees = {
+ root : {'porttree' : portage.portagetree(root, settings=settings)}
+ }
+ root_config = RootConfig(settings, trees[root], None)
+ allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_"))
+ allvars.update(Package.metadata_keys)
+ allvars = sorted(allvars)
+ myaux = dict(zip(allvars, portdb.aux_get(cpv, allvars)))
+ pkg = Package(cpv=cpv, metadata=myaux, root_config=root_config)
+ fails = []
+ try:
+ # All ebuilds should have utf_8 encoding.
+ f = codecs.open(_unicode_encode(full_path,
+ encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['repo.content'])
+ try:
+ for check_name, e in run_checks(f, pkg):
+ fails.append(check_name + ": " + e)
+ finally:
+ f.close()
+ except UnicodeDecodeError:
+ # A file.UTF8 failure will have already been recorded above.
+ pass
+ # fails will have a list with repoman errors
+ return fails
+
+def get_buildlog_info(settings, mode, build_dict, config_profile):
logfile_text = get_log_text(settings.get("PORTAGE_LOG_FILE"))
-def get_build_dict_db(settings, database, config_profile):
- categories = settings['CATEGORY']
- package = settings['PN']
- ebuild_version = settings['PVR']
+ # FIXME to support more errors and stuff
+ # Missing QA loging
+ i = 0
+ error_log_list = []
+ qa_error_list = []
+ sum_build_log_list = []
+ for textline in logfile_text:
+ if re.search(" * Package:", textline):
+ error_log_list.append(textline)
+ if re.search(" * Repository:", textline):
+ error_log_list.append(textline)
+ if re.search(" * Maintainer:", textline):
+ error_log_list.append(textline)
+ if re.search(" * USE:", textline):
+ error_log_list.append(textline)
+ if re.search(" * FEATURES:", textline):
+ error_log_list.append(textline)
+ if re.search("Error 1", textline):
+ x = i - 20
+ error_log_list.append(".....\n")
+ while x != i + 3:
+ error_log_list.append(logfile_text[x])
+ x = x +1
+ if re.search(" * ERROR:", textline):
+ x = i
+ endline= True
+ field = textline.split(" ")
+ sum_build_log_list.append("fail")
+ error_log_list.append(".....\n")
+ while endline:
+ try:
+ error_log_list.append(logfile_text[x])
+ except:
+ endline = False
+ else:
+ x = x +1
+ i = i + 1
+ print "build_error:\n", error_log_list
+ print "sum:", sum_build_log_list
+ build_log_dict = {}
+ build_log_dict['error_log_list:'] = error_log_list
+ # Run repoman check_repoman()
+ portdb = portage.portdbapi(mysettings=settings)
+ categories = build_dict['categories']
+ package = build_dict['package']
+ ebuild_version = build_dict['ebuild_version']
+ repoman_error_list = check_repoman(settings, portdb, categories, package, ebuild_version, config_profile)
+ print repoman_error_list
+ if repoman_error_list != []:
+ build_log_dict['repoman_error_list'] = repoman_error_list
+ sum_build_log_list.append("repoman")
+ else:
+ build_log_dict['repoman_error_list'] = None
+ if qa_error_list != []:
+ build_log_dict['qa_error_list'] = qa_error_list
+ sum_build_log_list.append("qa")
+ else:
+ build_log_dict['qa_error_list'] = None
+ build_log_dict['summary_error_list'] = sum_build_log_list
+ return build_log_dict
+
+def add_buildlog_db(settings, database, build_dict, build_log_dict, config_profile):
+ ebuild_id = build_dict['ebuild_id']
+ queue_id = build_dict['queue_id']
+ sum_build_log_list = build_log_dict['summary_error_list']
+ error_log_list = build_log_dict['error_log_list:']
+ build_error = ""
+ if error_log_list != []:
+ for log_line in error_log_list:
+ build_error = build_error + log_line
+ summary_error = ""
+ if sum_build_log_list != []:
+ for sum_log_line in sum_build_log_list:
+ summary_error = summary_error + " " + sum_log_line
+ logfilename = re.sub("\/var\/log\/portage\/", "", settings.get("PORTAGE_LOG_FILE"))
+ build_id = database.move_queru_buildlog(queue_id, build_error, summary_error, logfilename, build_log_dict)
+ print "build_id", build_id
+
+def get_build_dict_db(mysettings, database, config_profile):
+ categories = mysettings['CATEGORY']
+ package = mysettings['PN']
+ ebuild_version = mysettings['PVR']
cpv = categories + "/" + package + "-" + ebuild_version
+ print "cpv", cpv
package_id = database.have_package_db(categories, package)
+ print "package_id", package_id
build_dict = {}
+ mybuild_dict = {}
build_dict['ebuild_version'] = ebuild_version
build_dict['package_id'] = package_id
build_dict['cpv'] = cpv
- build_dict['build_useflags'] = settings.get('PORTAGE_BUILT_USE')
- pkgdir = os.path.join(settings['PORTDIR'], categories + "/" + package)
+ if mysettings.get('PORTAGE_BUILT_USE') is None:
+ build_dict['build_useflags'] = []
+ else:
+ build_dict['build_useflags'] = mysettings.get('PORTAGE_BUILT_USE')
+ pkgdir = os.path.join(mysettings['PORTDIR'], categories + "/" + package)
ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" + package + "-" + ebuild_version + ".ebuild")[0]
queue_id = database.check_revision(build_dict, config_profile, ebuild_version_checksum_tree)
+ print "queue_id", queue_id
if queue_id is None:
- add_new_ebuild_buildquery_db(database, settings, build_dict, ebuild_version_checksum_tree, config_profile)
+ add_new_ebuild_buildquery_db(database, mysettings, build_dict, ebuild_version_checksum_tree, config_profile)
queue_id = database.check_revision(build_dict, config_profile, ebuild_version_checksum_tree)
- build_dict = database.get_package_to_build(config_profile, queue_id)
- return build_dict
-
-def main_hook(settings, mode):
+ mybuild_dict = database.get_package_to_build(config_profile, queue_id, new = True)
+ return mybuild_dict
+
+def main_hook(mysettings, mode):
# Main
get_conf_settings.__init__ # Init settings from the configfile
database = init_sql_backend() # Init the Database
config_profile = get_conf_settings.SetBuildConfig
- build_dict = get_build_dict_db(settings, database, config_profile)
- build_log_dict = get_buildlog_info(settings, mode)
- print settings.get("PORTAGE_LOG_FILE")
- print settings['PORTAGE_USE'], settings['PORTAGE_BUILDDIR']
- print build_dict
+ build_dict = get_build_dict_db(mysettings, database, config_profile)
+ build_log_dict = get_buildlog_info(mysettings, mode, build_dict, config_profile)
+ add_buildlog_db(mysettings, database, build_dict, build_log_dict, config_profile)
+ return
\ No newline at end of file
diff --git a/buildhost/portage_hook/sqlbackend.py b/buildhost/portage_hook/sqlbackend.py
index 399aaa5..89240fc 100644
--- a/buildhost/portage_hook/sqlbackend.py
+++ b/buildhost/portage_hook/sqlbackend.py
@@ -48,11 +48,14 @@ class SQLPackageDatabase(object):
cursor.execute(sqlQ, (config_profile,))
return cursor.fetchone()
- def get_package_to_build(self, config_profile, queue_id):
+ def get_package_to_build(self, config_profile, queue_id, new):
cursor = self.conn.cursor()
print queue_id, config_profile
# get what to build
- sqlQ1 = "SELECT ebuild_id, post_message FROM buildqueue WHERE config = %s AND extract(epoch from (NOW()) - timestamp) > 7200 AND queue_id = %s"
+ if new is True:
+ sqlQ1 = "SELECT ebuild_id, post_message FROM buildqueue WHERE config = %s AND queue_id = %s"
+ else:
+ sqlQ1 = "SELECT ebuild_id, post_message FROM buildqueue WHERE config = %s AND extract(epoch from (NOW()) - timestamp) > 7200 AND queue_id = %s"
# get use flags to use
sqlQ2 = "SELECT useflag, enabled FROM ebuildqueuedwithuses WHERE queue_id = %s"
# get ebuild version (v) and package id
@@ -63,7 +66,7 @@ class SQLPackageDatabase(object):
build_useflags_dict = {}
cursor.execute(sqlQ1, (config_profile, queue_id))
entries = cursor.fetchone()
- print entries
+ print "ebuild_id post_message", entries
if entries is None:
build_dict['ebuild_id'] = None
build_dict['post_message'] = None
@@ -85,6 +88,7 @@ class SQLPackageDatabase(object):
build_dict['ebuild_checksum'] = entries[2]
cursor.execute(sqlQ4, (entries[1],))
entries = cursor.fetchall()[0]
+ print "cp", entries
build_dict['categories'] = entries[0]
build_dict['package'] = entries[1]
if build_useflags is None:
@@ -98,31 +102,55 @@ class SQLPackageDatabase(object):
def check_revision(self, build_dict, config_profile, ebuild_version_checksum_tree):
cursor = self.conn.cursor()
+ sqlQ = 'SELECT id, ebuild_checksum FROM ebuilds WHERE ebuild_version = %s AND package_id = %s'
sqlQ1 = 'SELECT id FROM ebuilds WHERE ebuild_version = %s AND ebuild_checksum = %s AND package_id = %s'
sqlQ2 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s AND config = %s'
sqlQ3 = "SELECT useflag FROM ebuildqueuedwithuses WHERE queue_id = %s AND enabled = 'True'"
+ print build_dict['ebuild_version'], ebuild_version_checksum_tree, build_dict['package_id']
+ cursor.execute(sqlQ, (build_dict['ebuild_version'], build_dict['package_id']))
+ print cursor.fetchall()
cursor.execute(sqlQ1, (build_dict['ebuild_version'], ebuild_version_checksum_tree, build_dict['package_id']))
ebuild_id = cursor.fetchone()[0]
- print ebuild_id
+ print "ebuild_id", ebuild_id
if ebuild_id is None:
return None
cursor.execute(sqlQ2, (ebuild_id, config_profile))
queue_id_list = cursor.fetchall()
+ print "queue_id_list", queue_id_list
if queue_id_list is None:
return None
for queue_id in queue_id_list:
- cursor.execute(sqlQ3, (queue_id,))
+ print "queue_id", queue_id[0]
+ cursor.execute(sqlQ3, (queue_id[0],))
entries = cursor.fetchall()
if entries == []:
- build_useflags = None
+ build_useflags = []
else:
build_useflags = entries
print build_useflags, build_dict['build_useflags']
- if build_useflags is build_dict['build_useflags']:
+ if build_useflags == build_dict['build_useflags']:
print queue_id[0]
return queue_id[0]
return None
+ def have_cpv_buildlog_db(self, ebuild_id):
+ cursor = self.conn.cursor()
+ sqlQ = 'SELECT build_id FROM buildlog WHERE ebuild_id = %s'
+ cursor.execute(sqlQ, (ebuild_id,))
+ return cursor.fetchall()
+
+ def move_queru_buildlog(self, queue_id, build_error, summary_error, logfilename, build_log_dict):
+ cursor = self.conn.cursor()
+ sum_build_log_list = build_log_dict['summary_error_list']
+ repoman_error_list = build_log_dict['repoman_error_list']
+ qa_error_list = build_log_dict['qa_error_list']
+ sqlQ = 'SELECT make_buildlog( %s, %s, %s, %s, %s, %s)'
+ cursor.execute(sqlQ, (queue_id, summary_error, build_error, logfilename, qa_error_list, repoman_error_list))
+ entries = cursor.fetchone()
+ self.conn.commit()
+ return entries
+
+
def get_config_list_all(self):
cursor = self.conn.cursor()
sqlQ = 'SELECT id FROM configs'
@@ -140,11 +168,10 @@ class SQLPackageDatabase(object):
def have_package_db(self, categories, package):
cursor = self.conn.cursor()
-
sqlQ ='SELECT package_id FROM packages WHERE category = %s AND package_name = %s'
params = categories, package
cursor.execute(sqlQ, params)
- return cursor.fetchone()
+ return cursor.fetchone()[0]
def get_categories_db(self):
cursor = self.conn.cursor()
@@ -289,7 +316,7 @@ class SQLPackageDatabase(object):
cursor.execute(sqlQ, (package_id, ebuild_version_tree))
self.conn.commit()
- def get_ebuild_id_db(build_dict, ebuild_version_checksum_tree):
+ def get_ebuild_id_db(self, build_dict, ebuild_version_checksum_tree):
cursor = self.conn.cursor()
sqlQ = 'SELECT id FROM ebuilds WHERE ebuild_version = %s AND ebuild_checksum = %s AND package_id = %s'
cursor.execute(sqlQ, (build_dict['ebuild_version'], ebuild_version_checksum_tree, build_dict['package_id']))
diff --git a/buildhost/updatedb/core.py b/buildhost/updatedb/core.py
index 34be0e7..aeaa865 100644
--- a/buildhost/updatedb/core.py
+++ b/buildhost/updatedb/core.py
@@ -748,9 +748,10 @@ def update_arch_db(database):
arch_list.append("-*")
database.add_new_arch_db(arch_list)
-def update_package_list(settings, portdb, database):
+def update_package_list(mysettings, database):
print "Checking categories, package, ebuilds"
package_id_list_tree = []
+ portdb = portage.portdbapi(mysettings=mysettings)
# Will run some update checks and update package if needed
# Get categories/package list from portage
package_list_tree = portdb.cp_all()
@@ -783,9 +784,8 @@ def main():
get_conf_settings.__init__ # Init settings from the configfile
database = init_sql_backend() # Init the Database
settings = init_portage_settings(database) # Init settings for the default config
- portdb = portage.portdbapi(mysettings=settings) # Set portdb = portdbapi mysettings
update_arch_db(database) # Update the arch db if needed
- update_package_list(settings, portdb, database) # Update the package db
+ update_package_list(settings, database) # Update the package db
if __name__ == "__main__":
main()
diff --git a/ebuild/dev-python/gobs/gobs-9999.ebuild b/ebuild/dev-python/gobs/gobs-9999.ebuild
index e7c8c1c..e4cfeff 100644
--- a/ebuild/dev-python/gobs/gobs-9999.ebuild
+++ b/ebuild/dev-python/gobs/gobs-9999.ebuild
@@ -14,9 +14,11 @@ SRC_URI=""
LICENSE="GPL-2"
KEYWORDS="~amd64"
SLOT="0"
-IUSE=""
+IUSE="+postgresql"
-RDEPEND="sys-apps/portage"
+RDEPEND="sys-apps/portage
+ >=dev-python/git-python-0.3.2_rc1
+ postgresql? ( dev-python/psycopg )"
DEPEND="${RDEPEND}
dev-python/setuptools"
diff --git a/gobs/bin/portage_hooks b/gobs/bin/gobs_portage_hooks
similarity index 97%
rename from gobs/bin/portage_hooks
rename to gobs/bin/gobs_portage_hooks
index 41321df..aa7392b 100755
--- a/gobs/bin/portage_hooks
+++ b/gobs/bin/gobs_portage_hooks
@@ -26,7 +26,7 @@ def get_build_dict_db(mysettings, config_profile, gobs_settings_dict):
package = os.environ['PN']
ebuild_version = os.environ['PVR']
cpv = categories + "/" + package + "-" + ebuild_version
- init_package = gobs_package(mysettings, CM, myportdb, gobs_settings_dict)
+ init_package = gobs_package(mysettings, myportdb, gobs_settings_dict)
print("cpv", cpv)
package_id = have_package_db(conn, categories, package)
# print("package_id %s" % package_id, file=sys.stdout)
diff --git a/gobs/bin/gobs_updatedb b/gobs/bin/gobs_updatedb
index 36b26bd..f140e5f 100755
--- a/gobs/bin/gobs_updatedb
+++ b/gobs/bin/gobs_updatedb
@@ -8,6 +8,7 @@
import sys
import os
from threading import Thread
+from git import *
# Get the options from the config file set in gobs.readconf
from gobs.readconf import get_conf_settings
@@ -28,7 +29,7 @@ from gobs.old_cpv import gobs_old_cpv
from gobs.categories import gobs_categories
import portage
-def init_portage_settings(gobs_settings_dict):
+def init_portage_settings():
""" Get the BASE Setup/Config for portage.settings
@type: module
@@ -39,19 +40,26 @@ def init_portage_settings(gobs_settings_dict):
@returns new settings
"""
# check config setup
+ #git stuff
+ repo = Repo("/var/lib/gobs/" + gobs_settings_dict['gobs_gitreponame'] + "/")
+ repo_remote = repo.remotes.origin
+ repo_remote.pull()
+ master = repo.head.reference
+ print master.log()
+
conn=CM.getConnection()
- check_make_conf(conn, gobs_settings_dict)
+ check_make_conf
print "Check configs done"
# Get default config from the configs table and default_config=1
config_id = get_default_config(conn) # HostConfigDir = table configs id
CM.putConnection(conn);
- default_config_root = gobs_settings_dict['gobs_root'] +"config/" + config_id[0] + "/"
+ default_config_root = "/var/lib/gobs/" + gobs_settings_dict['gobs_gitreponame'] + "/" + config_id[0] + "/"
# Set config_root (PORTAGE_CONFIGROOT) to default_config_root
mysettings = portage.config(config_root = default_config_root)
print "Setting default config to:", config_id[0]
return mysettings
-def update_cpv_db(mysettings, gobs_settings_dict):
+def update_cpv_db(mysettings):
"""Code to update the cpv in the database.
@type:settings
@parms: portage.settings
@@ -65,7 +73,7 @@ def update_cpv_db(mysettings, gobs_settings_dict):
myportdb = portage.portdbapi(mysettings=mysettings)
init_categories = gobs_categories(mysettings)
init_old_cpv = gobs_old_cpv(myportdb, mysettings)
- init_package = gobs_package(mysettings, myportdb, gobs_settings_dict)
+ init_package = gobs_package(mysettings, myportdb)
package_id_list_tree = []
# Will run some update checks and update package if needed
# Get categories/package list from portage
@@ -101,11 +109,11 @@ def update_cpv_db(mysettings, gobs_settings_dict):
def main():
# Main
# Init settings for the default config
- mysettings = init_portage_settings(gobs_settings_dict)
+ mysettings = init_portage_settings()
init_arch = gobs_arch()
init_arch.update_arch_db()
# Update the cpv db
- update_cpv_db(mysettings, gobs_settings_dict)
+ update_cpv_db(mysettings)
CM.closeAllConnections()
if __name__ == "__main__":
diff --git a/gobs/pym/build_log.py b/gobs/pym/build_log.py
index ccd8225..6539b37 100644
--- a/gobs/pym/build_log.py
+++ b/gobs/pym/build_log.py
@@ -147,6 +147,296 @@ class gobs_buildlog(object):
build_log_dict['error_log_list'] = error_log_list
build_log_dict['summary_error_list'] = sum_build_log_list
return build_log_dict
+
+ # Copy of the portage action_info but fixed so it post info to a list.
+ def action_info(self, trees, myopts, myfiles):
+ msg = []
+ root_config = trees[self._mysettings['ROOT']]['root_config']
+
+ msg.append(getportageversion(self._mysettings["PORTDIR"], self._mysettings["ROOT"],
+ self._mysettings.profile_path, self._mysettings["CHOST"],
+ trees[self._mysettings["ROOT"]]["vartree"].dbapi))
+
+ header_width = 65
+ header_title = "System Settings"
+ if myfiles:
+ msg.append(header_width * "=")
+ msg.append(header_title.rjust(int(header_width/2 + len(header_title)/2)))
+ msg.append(header_width * "=")
+ msg.append("System uname: "+platform.platform(aliased=1))
+
+ lastSync = portage.grabfile(os.path.join(
+ self._mysettings["PORTDIR"], "metadata", "timestamp.chk"))
+ msg.append("Timestamp of tree:", end=' ')
+ if lastSync:
+ msg.append(lastSync[0])
+ else:
+ msg.append("Unknown")
+
+ output=subprocess_getstatusoutput("distcc --version")
+ if not output[0]:
+ msg.append(str(output[1].split("\n",1)[0]), end=' ')
+ if "distcc" in self._mysettings.features:
+ msg.append("[enabled]")
+ else:
+ msg.append("[disabled]")
+
+ output=subprocess_getstatusoutput("ccache -V")
+ if not output[0]:
+ msg.append(str(output[1].split("\n",1)[0]), end=' ')
+ if "ccache" in self._mysettings.features:
+ msg.append("[enabled]")
+ else:
+ msg.append("[disabled]")
+
+ myvars = ["sys-devel/autoconf", "sys-devel/automake", "virtual/os-headers",
+ "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"]
+ myvars += portage.util.grabfile(self._mysettings["PORTDIR"]+"/profiles/info_pkgs")
+ atoms = []
+ vardb = trees["/"]["vartree"].dbapi
+ for x in myvars:
+ try:
+ x = Atom(x)
+ except InvalidAtom:
+ writemsg_stdout("%-20s %s\n" % (x+":", "[NOT VALID]"),
+ noiselevel=-1)
+ else:
+ for atom in expand_new_virt(vardb, x):
+ if not atom.blocker:
+ atoms.append((x, atom))
+
+ myvars = sorted(set(atoms))
+
+ portdb = trees["/"]["porttree"].dbapi
+ main_repo = portdb.getRepositoryName(portdb.porttree_root)
+ cp_map = {}
+ cp_max_len = 0
+
+ for orig_atom, x in myvars:
+ pkg_matches = vardb.match(x)
+
+ versions = []
+ for cpv in pkg_matches:
+ matched_cp = portage.versions.cpv_getkey(cpv)
+ ver = portage.versions.cpv_getversion(cpv)
+ ver_map = cp_map.setdefault(matched_cp, {})
+ prev_match = ver_map.get(ver)
+ if prev_match is not None:
+ if prev_match.provide_suffix:
+ # prefer duplicate matches that include
+ # additional virtual provider info
+ continue
+
+ if len(matched_cp) > cp_max_len:
+ cp_max_len = len(matched_cp)
+ repo = vardb.aux_get(cpv, ["repository"])[0]
+ if repo == main_repo:
+ repo_suffix = ""
+ elif not repo:
+ repo_suffix = "::<unknown repository>"
+ else:
+ repo_suffix = "::" + repo
+
+ if matched_cp == orig_atom.cp:
+ provide_suffix = ""
+ else:
+ provide_suffix = " (%s)" % (orig_atom,)
+
+ ver_map[ver] = _info_pkgs_ver(ver, repo_suffix, provide_suffix)
+
+ for cp in sorted(cp_map):
+ versions = sorted(cp_map[cp].values())
+ versions = ", ".join(ver.toString() for ver in versions)
+ writemsg_stdout("%s %s\n" % \
+ ((cp + ":").ljust(cp_max_len + 1), versions),
+ noiselevel=-1)
+
+ libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool"))
+
+ repos = portdb.settings.repositories
+ writemsg_stdout("Repositories: %s\n" % \
+ " ".join(repo.name for repo in repos), noiselevel=-1)
+
+ if _ENABLE_SET_CONFIG:
+ sets_line = "Installed sets: "
+ sets_line += ", ".join(s for s in \
+ sorted(root_config.sets['selected'].getNonAtoms()) \
+ if s.startswith(SETPREFIX))
+ sets_line += "\n"
+ writemsg_stdout(sets_line, noiselevel=-1)
+
+ myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK',
+ 'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR',
+ 'PORTDIR_OVERLAY', 'PORTAGE_BUNZIP2_COMMAND',
+ 'PORTAGE_BZIP2_COMMAND',
+ 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS',
+ 'ACCEPT_KEYWORDS', 'ACCEPT_LICENSE', 'SYNC', 'FEATURES',
+ 'EMERGE_DEFAULT_OPTS']
+ myvars.extend(portage.util.grabfile(self._mysettings["PORTDIR"]+"/profiles/info_vars"))
+
+ myvars_ignore_defaults = {
+ 'PORTAGE_BZIP2_COMMAND' : 'bzip2',
+ }
+
+ myvars = portage.util.unique_array(myvars)
+ use_expand = self._mysettings.get('USE_EXPAND', '').split()
+ use_expand.sort()
+ use_expand_hidden = set(
+ self._mysettings.get('USE_EXPAND_HIDDEN', '').upper().split())
+ alphabetical_use = '--alphabetical' in myopts
+ unset_vars = []
+ myvars.sort()
+ for x in myvars:
+ if x in self._mysettings:
+ if x != "USE":
+ default = myvars_ignore_defaults.get(x)
+ if default is not None and \
+ default == self._mysettings[x]:
+ continue
+ writemsg_stdout('%s="%s"\n' % (x, self._mysettings[x]), noiselevel=-1)
+ else:
+ use = set(self._mysettings["USE"].split())
+ for varname in use_expand:
+ flag_prefix = varname.lower() + "_"
+ for f in list(use):
+ if f.startswith(flag_prefix):
+ use.remove(f)
+ use = list(use)
+ use.sort()
+ msg.append('USE="%s"' % " ".join(use), end=' ')
+ for varname in use_expand:
+ myval = self._mysettings.get(varname)
+ if myval:
+ msg.append('%s="%s"' % (varname, myval), end=' ')
+ else:
+ unset_vars.append(x)
+ if unset_vars:
+ msg.append("Unset: "+", ".join(unset_vars))
+
+ # See if we can find any packages installed matching the strings
+ # passed on the command line
+ mypkgs = []
+ vardb = trees[self._mysettings["ROOT"]]["vartree"].dbapi
+ portdb = trees[self._mysettings["ROOT"]]["porttree"].dbapi
+ bindb = trees[self._mysettings["ROOT"]]["bintree"].dbapi
+ for x in myfiles:
+ match_found = False
+ installed_match = vardb.match(x)
+ for installed in installed_match:
+ mypkgs.append((installed, "installed"))
+ match_found = True
+
+ if match_found:
+ continue
+
+ for db, pkg_type in ((portdb, "ebuild"), (bindb, "binary")):
+ if pkg_type == "binary" and "--usepkg" not in myopts:
+ continue
+
+ matches = db.match(x)
+ matches.reverse()
+ for match in matches:
+ if pkg_type == "binary":
+ if db.bintree.isremote(match):
+ continue
+ auxkeys = ["EAPI", "DEFINED_PHASES"]
+ metadata = dict(zip(auxkeys, db.aux_get(match, auxkeys)))
+ if metadata["EAPI"] not in ("0", "1", "2", "3") and \
+ "info" in metadata["DEFINED_PHASES"].split():
+ mypkgs.append((match, pkg_type))
+ break
+
+ # If some packages were found...
+ if mypkgs:
+ # Get our global settings (we only print stuff if it varies from
+ # the current config)
+ mydesiredvars = [ 'CHOST', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS' ]
+ auxkeys = mydesiredvars + list(vardb._aux_cache_keys)
+ auxkeys.append('DEFINED_PHASES')
+ global_vals = {}
+ pkgsettings = portage.config(clone=settings)
+
+ # Loop through each package
+ # Only print settings if they differ from global settings
+ header_title = "Package Settings"
+ msg.append(header_width * "=")
+ msg.append(header_title.rjust(int(header_width/2 + len(header_title)/2)))
+ msg.append(header_width * "=")
+ from portage.output import EOutput
+ out = EOutput()
+ for mypkg in mypkgs:
+ cpv = mypkg[0]
+ pkg_type = mypkg[1]
+ # Get all package specific variables
+ if pkg_type == "installed":
+ metadata = dict(zip(auxkeys, vardb.aux_get(cpv, auxkeys)))
+ elif pkg_type == "ebuild":
+ metadata = dict(zip(auxkeys, portdb.aux_get(cpv, auxkeys)))
+ elif pkg_type == "binary":
+ metadata = dict(zip(auxkeys, bindb.aux_get(cpv, auxkeys)))
+
+ pkg = Package(built=(pkg_type!="ebuild"), cpv=cpv,
+ installed=(pkg_type=="installed"), metadata=zip(Package.metadata_keys,
+ (metadata.get(x, '') for x in Package.metadata_keys)),
+ root_config=root_config, type_name=pkg_type)
+
+ if pkg_type == "installed":
+ msg.append("\n%s was built with the following:" % \
+ colorize("INFORM", str(pkg.cpv)))
+ elif pkg_type == "ebuild":
+ msg.append("\n%s would be build with the following:" % \
+ colorize("INFORM", str(pkg.cpv)))
+ elif pkg_type == "binary":
+ msg.append("\n%s (non-installed binary) was built with the following:" % \
+ colorize("INFORM", str(pkg.cpv)))
+
+ writemsg_stdout('%s\n' % pkg_use_display(pkg, myopts),
+ noiselevel=-1)
+ if pkg_type == "installed":
+ for myvar in mydesiredvars:
+ if metadata[myvar].split() != settings.get(myvar, '').split():
+ msg.append("%s=\"%s\"" % (myvar, metadata[myvar]))
+
+ if metadata['DEFINED_PHASES']:
+ if 'info' not in metadata['DEFINED_PHASES'].split():
+ continue
+
+ msg.append(">>> Attempting to run pkg_info() for '%s'" % pkg.cpv)
+
+ if pkg_type == "installed":
+ ebuildpath = vardb.findname(pkg.cpv)
+ elif pkg_type == "ebuild":
+ ebuildpath = portdb.findname(pkg.cpv, myrepo=pkg.repo)
+ elif pkg_type == "binary":
+ tbz2_file = bindb.bintree.getname(pkg.cpv)
+ ebuild_file_name = pkg.cpv.split("/")[1] + ".ebuild"
+ ebuild_file_contents = portage.xpak.tbz2(tbz2_file).getfile(ebuild_file_name)
+ tmpdir = tempfile.mkdtemp()
+ ebuildpath = os.path.join(tmpdir, ebuild_file_name)
+ file = open(ebuildpath, 'w')
+ file.write(ebuild_file_contents)
+ file.close()
+
+ if not ebuildpath or not os.path.exists(ebuildpath):
+ out.ewarn("No ebuild found for '%s'" % pkg.cpv)
+ continue
+
+ if pkg_type == "installed":
+ portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"],
+ pkgsettings, debug=(self._mysettings.get("PORTAGE_DEBUG", "") == 1),
+ mydbapi=trees[self._mysettings["ROOT"]]["vartree"].dbapi,
+ tree="vartree")
+ elif pkg_type == "ebuild":
+ portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"],
+ pkgsettings, debug=(self._mysettings.get("PORTAGE_DEBUG", "") == 1),
+ mydbapi=trees[self._mysettings["ROOT"]]["porttree"].dbapi,
+ tree="porttree")
+ elif pkg_type == "binary":
+ portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"],
+ pkgsettings, debug=(self._mysettings.get("PORTAGE_DEBUG", "") == 1),
+ mydbapi=trees[self._mysettings["ROOT"]]["bintree"].dbapi,
+ tree="bintree")
+ shutil.rmtree(tmpdir)
def add_buildlog_main(self):
conn=CM.getConnection()
diff --git a/gobs/pym/check_setup.py b/gobs/pym/check_setup.py
index b4d08e9..e3a9075 100644
--- a/gobs/pym/check_setup.py
+++ b/gobs/pym/check_setup.py
@@ -13,7 +13,7 @@ CM=connectionManager(gobs_settings_dict)
if CM.getName()=='pgsql':
from gobs.pgsql import *
-def check_make_conf(conn, gobs_settings_dict):
+def check_make_conf(conn):
# FIXME: mark any config updating true in the db when updating the configs
# Get the config list
##selective import the pgsql/mysql queries
@@ -23,7 +23,7 @@ def check_make_conf(conn, gobs_settings_dict):
for config_id in config_list_all:
attDict={}
# Set the config dir
- check_config_dir = gobs_settings_dict['gobs_root'] + "config/" + config_id[0] + "/"
+ check_config_dir = "/var/lib/gobs/" + gobs_settings_dict['gobs_gitreponame'] + config_id[0] + "/"
make_conf_file = check_config_dir + "etc/portage/make.conf"
# Check if we can open the file and close it
# Check if we have some error in the file (portage.util.getconfig)
@@ -56,7 +56,7 @@ def check_make_conf_guest(connection, config_profile):
make_conf_checksum_db = get_profile_checksum(connection,config_profile)[0]
if make_conf_checksum_db is None:
return "1"
- make_conf_file = "/etc/make.conf"
+ make_conf_file = "/etc/portage/make.conf"
make_conf_checksum_tree = portage.checksum.sha256hash(make_conf_file)[0]
if make_conf_checksum_tree != make_conf_checksum_db:
return "2"
diff --git a/gobs/pym/initsql.py b/gobs/pym/initsql.py
deleted file mode 100644
index 2de0e86..0000000
--- a/gobs/pym/initsql.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import sys, os, re
-from gobs.readconf import get_conf_settings
-
-class DBStateError(Exception):
- """If the DB is in a state that is not expected, we raise this."""
- def __init__(self, value):
- Exception.__init__(self)
- self.value = value
- def __str__(self):
- return repr(self.value)
-
-class DatabaseConfig(object):
- # No touchy
- sql_settings = {}
- gobs_settings = get_conf_settings()
- gobs_settings_dict = gobs_settings.read_gobs_settings_all()
- mode = gobs_settings_dict['sql_backend']
-
- # Settings for MySQL. You need to create one users in
- # your MySQL database.
- # The user needs:
- # DELETE, INSERT, UPDATE, SELECT
- # Do NOT change these, set the values in the config file /etc/buildhost/buildhost.conf
- sql_settings[mode] = {}
- # settings['sql_pgsql']['charset'] = 'utf8'
- sql_settings[mode]['host'] = gobs_settings_dict['sql_host']
- sql_settings[mode]['database'] = gobs_settings_dict['sql_db']
- sql_settings[mode]['user'] = gobs_settings_dict['sql_user']
- sql_settings[mode]['password'] = gobs_settings_dict['sql_passwd']
-
-class init_sql_db(object):
-
- def init_sql_backend(self):
- from gobs.initsql import DatabaseConfig # import DatebaseConfig
- database = None
- # Setup the database backent to use mysql
- if DatabaseConfig.mode == 'pgsql':
- from gobs.pgsqlbackend import PgSQLPackageDB
- database = PgSQLPackageDB(DatabaseConfig.sql_settings['pgsql'])
- print database
- return database
\ No newline at end of file
diff --git a/gobs/pym/package.py b/gobs/pym/package.py
index 8d94936..0efc8aa 100644
--- a/gobs/pym/package.py
+++ b/gobs/pym/package.py
@@ -18,14 +18,13 @@ if CM.getName()=='pgsql':
class gobs_package(object):
- def __init__(self, mysettings, myportdb, gobs_settings_dict):
+ def __init__(self, mysettings, myportdb):
self._mysettings = mysettings
- self._gobs_settings_dict = gobs_settings_dict
self._myportdb = myportdb
def change_config(self, config_id):
# Change config_root config_id = table configs.id
- my_new_setup = self._gobs_settings_dict['gobs_root'] +"config/" + config_id + "/"
+ my_new_setup = "/var/lib/gobs/" + gobs_settings_dict['gobs_gitreponame'] + "/" + config_id + "/"
mysettings_setup = portage.config(config_root = my_new_setup)
return mysettings_setup
diff --git a/gobs/pym/pgsqlbackend.py b/gobs/pym/pgsqlbackend.py
deleted file mode 100644
index d89feec..0000000
--- a/gobs/pym/pgsqlbackend.py
+++ /dev/null
@@ -1,418 +0,0 @@
-
-class SQLPackageDatabase(object):
- """we have to store our stuff somewhere
- subclass and redefine init to provide
- at least self.cursor"""
-
- # These are set by subclasses
- db = None
- cursor = None
- syntax_placeholder = None
- syntax_autoincrement = None
- sql = {}
-
- def get_default_config(self):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT id FROM configs WHERE default_config = True'
- cursor.execute(sqlQ)
- return cursor.fetchone()
-
- def get_config_list(self):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT id FROM configs WHERE default_config = False AND active = True'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
- def get_config_list_all(self):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT id FROM configs'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
- def update__make_conf(self, configsDict):
- cursor = self.conn.cursor()
- sqlQ = 'UPDATE configs SET make_conf_checksum = %s, make_conf_text = %s, active = %s, config_error = %s WHERE id = %s'
- for k, v in configsDict.iteritems():
- params = [v['make_conf_checksum_tree'], v['make_conf_text'], v['active'], v['config_error'], k]
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def have_package_db(self, categories, package):
- cursor = self.conn.cursor()
- sqlQ ='SELECT package_id FROM packages WHERE category = %s AND package_name = %s'
- params = categories, package
- cursor.execute(sqlQ, params)
- return cursor.fetchone()
-
- def get_categories_db(self):
- cursor = self.conn.cursor()
- sqlQ =' SELECT category FROM categories'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
- def get_categories_checksum_db(self, categories):
- cursor = self.conn.cursor()
- sqlQ =' SELECT metadata_xml_checksum FROM categories_meta WHERE category = %s'
- cursor.execute(sqlQ, (categories,))
- return cursor.fetchone()
-
- def add_new_categories_meta_sql(self, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
- cursor = self.conn.cursor()
- sqlQ = 'INSERT INTO categories_meta (category, metadata_xml_checksum, metadata_xml_text) VALUES ( %s, %s, %s )'
- params = categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def update_categories_meta_sql(self, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
- cursor = self.conn.cursor()
- sqlQ ='UPDATE categories_meta SET metadata_xml_checksum = %s, metadata_xml_text = %s WHERE category = %s'
- params = (categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree, categories)
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def add_new_manifest_sql(self, package_id, get_manifest_text, manifest_checksum_tree):
- cursor = self.conn.cursor()
- sqlQ = 'INSERT INTO manifest (package_id, manifest, checksum) VALUES ( %s, %s, %s )'
- params = package_id, get_manifest_text, manifest_checksum_tree
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def add_new_package_metadata(self, package_id, package_metadataDict):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT changelog_checksum FROM packages_meta WHERE package_id = %s'
- cursor.execute(sqlQ, (package_id,))
- if cursor.fetchone() is None:
- sqlQ = 'INSERT INTO packages_meta (package_id, changelog_text, changelog_checksum, metadata_text, metadata_checksum) VALUES ( %s, %s, %s, %s, %s )'
- for k, v in package_metadataDict.iteritems():
- params = package_id, v['changelog_text'], v['changelog_checksum'], v[' metadata_xml_text'], v['metadata_xml_checksum']
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def update_new_package_metadata(self, package_id, package_metadataDict):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT changelog_checksum, metadata_checksum FROM packages_meta WHERE package_id = %s'
- cursor.execute(sqlQ, package_id)
- entries = cursor.fetchone()
- if entries is None:
- changelog_checksum_db = None
- metadata_checksum_db = None
- else:
- changelog_checksum_db = entries[0]
- metadata_checksum_db = entries[1]
- for k, v in package_metadataDict.iteritems():
- if changelog_checksum_db != v['changelog_checksum']:
- sqlQ = 'UPDATE packages_meta SET changelog_text = %s, changelog_checksum = %s WHERE package_id = %s'
- params = v['changelog_text'], v['changelog_checksum'], package_id
- cursor.execute(sqlQ, params)
- if metadata_checksum_db != v['metadata_xml_checksum']:
- sqlQ = 'UPDATE packages_meta SET metadata_text = %s, metadata_checksum = %s WHERE package_id = %s'
- params = v[' metadata_xml_text'], v['metadata_xml_checksum'], package_id
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def get_manifest_db(self, package_id):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT checksum FROM manifest WHERE package_id = %s'
- cursor.execute(sqlQ, (package_id,))
- manifest_checksum = cursor.fetchone()
- if manifest_checksum is None:
- return None
- return manifest_checksum[0]
-
- def update_manifest_sql(self, package_id, get_manifest_text, manifest_checksum_tree):
- cursor = self.conn.cursor()
- sqlQ = 'UPDATE manifest SET checksum = %s, manifest = %s WHERE package_id = %s'
- params = (manifest_checksum_tree, get_manifest_text, package_id)
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def add_new_metadata(self, metadataDict):
- for k, v in metadataDict.iteritems():
- cursor = self.conn.cursor()
- sqlQ = 'SELECT updaterestrictions( %s, %s )'
- params = k, v['restrictions']
- cursor.execute(sqlQ, params)
- sqlQ = 'SELECT updatekeywords( %s, %s )'
- params = k, v['keyword']
- cursor.execute(sqlQ, params)
- sqlQ = 'SELECT updateiuse( %s, %s )'
- params = k, v['iuse']
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def add_new_package_sql(self, packageDict):
- #lets have a new cursor for each metod as per best practice
- cursor = self.conn.cursor()
- sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
- ebuild_id_list = []
- package_id_list = []
- for k, v in packageDict.iteritems():
- params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
- v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
- v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
- v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
- v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
- v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
- v['ebuild_version_metadata_tree'][16]]
- cursor.execute(sqlQ, params)
- mid = cursor.fetchone()
- mid=mid[0]
- ebuild_id_list.append(mid[1])
- package_id_list.append(mid[0])
- self.conn.commit()
- # add_new_metadata(metadataDict)
- return ebuild_id_list, package_id_list
-
- def add_new_ebuild_sql(packageDict, new_ebuild_list):
- #lets have a new cursor for each metod as per best practice
- cursor = self.conn.cursor()
- sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
- ebuild_id_list = []
- package_id_list = []
- for k, v in packageDict.iteritems():
- for x in new_ebuild_list:
- if x == v['ebuild_version_tree']:
- params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
- v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
- v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
- v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
- v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
- v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
- v['ebuild_version_metadata_tree'][16]]
- cursor.execute(sqlQ, params)
- mid = cursor.fetchone()
- mid=mid[0]
- ebuild_id_list.append(mid[1])
- package_id_list.append(mid[0])
- self.conn.commit()
- # add_new_metadata(metadataDict)
- return ebuild_id_list, package_id_list
-
- def update_active_ebuild(self, package_id, ebuild_version_tree):
- cursor = self.conn.cursor()
- sqlQ ="UPDATE ebuilds SET active = 'False', timestamp = now() WHERE package_id = %s AND ebuild_version = %s AND active = 'True'"
- cursor.execute(sqlQ, (package_id, ebuild_version_tree))
- self.conn.commit()
-
- def get_cpv_from_ebuild_id(self, ebuild_id):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT package_id FROM ebuild WHERE id = %s'
- self.cursor.execute(sql, ebuild_id)
- entries = self.cursor.fetchone()
- return entries
-
- def get_cp_from_package_id(self, package_id):
- cursor = self.conn.cursor()
- sqlQ = "SELECT ARRAY_TO_STRING(ARRAY[category, package_name] , '/') AS cp FROM packages WHERE package_id = %s"
- cursor.execute(sqlQ, (package_id,))
- return cursor.fetchone()
-
- def add_new_package_buildqueue(self, ebuild_id, config_id, iuse_flags_list, use_enable, message):
- cursor = self.conn.cursor()
- sqlQ="SELECT insert_buildqueue( %s, %s, %s, %s, %s )"
- if not iuse_flags_list:
- iuse_flags_list=None
- use_enable=None
- params = ebuild_id, unicode(config_id), iuse_flags_list, use_enable, message
- cursor.execute(sqlQ, params)
- self.conn.commit()
-
- def get_ebuild_checksum(self, package_id, ebuild_version_tree):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT ebuild_checksum FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = TRUE'
- cursor.execute(sqlQ, (package_id, ebuild_version_tree))
- entries = cursor.fetchone()
- if entries is None:
- return None
- return entries[0]
-
- def add_old_package(self, old_package_list):
- cursor = self.conn.cursor()
- mark_old_list = []
- sqlQ = "UPDATE ebuilds SET active = 'FALSE', timestamp = NOW() WHERE package_id = %s AND active = 'TRUE' RETURNING package_id"
- for old_package in old_package_list:
- cursor.execute(sqlQ, (old_package[0],))
- entries = cursor.fetchone()
- if entries is not None:
- mark_old_list.append(entries[0])
- self.conn.commit()
- return mark_old_list
-
- def get_old_categories(self, categories_line):
- cursor = self.conn.cursor()
- sqlQ = "SELECT package_name FROM packages WHERE category = %s"
- cursor.execute(sqlQ (categories_line))
- return cursor.fetchone()
-
- def del_old_categories(self, real_old_categoriess):
- cursor = self.conn.cursor()
- sqlQ1 = 'DELETE FROM categories_meta WHERE category = %s'
- sqlQ2 = 'DELETE FROM categories categories_meta WHERE category = %s'
- cursor.execute(sqlQ1 (real_old_categories))
- cursor.execute(sqlQ2 (real_old_categories))
- self.conn.commit()
-
- def add_old_ebuild(self, package_id, old_ebuild_list):
- cursor = self.conn.cursor()
- sqlQ1 = "UPDATE ebuilds SET active = 'FALSE' WHERE package_id = %s AND ebuild_version = %s"
- sqlQ2 = "SELECT id FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = 'TRUE'"
- sqlQ3 = "SELECT queue_id FROM buildqueue WHERE ebuild_id = %s"
- sqlQ4 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
- sqlQ5 = 'DELETE FROM buildqueue WHERE queue_id = %s'
- for old_ebuild in old_ebuild_list:
- cursor.execute(sqlQ2, (package_id, old_ebuild[0]))
- ebuild_id_list = cursor.fetchall()
- if ebuild_id_list is not None:
- for ebuild_id in ebuild_id_list:
- cursor.execute(sqlQ3, (ebuild_id))
- queue_id_list = cursor.fetchall()
- if queue_id_list is not None:
- for queue_id in queue_id_list:
- cursor.execute(sqlQ4, (queue_id))
- cursor.execute(sqlQ5, (queue_id))
- cursor.execute(sqlQ1, (package_id, old_ebuild[0]))
- self.conn.commit()
-
- def cp_all_old_db(self, old_package_id_list):
- cursor = self.conn.cursor()
- old_package_list = []
- for old_package in old_package_id_list:
- sqlQ = "SELECT package_id FROM ebuilds WHERE package_id = %s AND active = 'FALSE' AND date_part('days', NOW() - timestamp) < 60"
- cursor.execute(sqlQ, old_package)
- entries = cursor.fetchone()
- if entries is None:
- old_package_list.append(old_package)
- return old_package_list
-
- def cp_all_db(self):
- cursor = self.conn.cursor()
- sqlQ = "SELECT package_id FROM packages"
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
- def del_old_ebuild(self, ebuild_old_list_db):
- cursor = self.conn.cursor()
- sqlQ1 = 'SELECT build_id FROM buildlog WHERE ebuild_id = %s'
- sqlQ2 = 'DELETE FROM qa_problems WHERE build_id = %s'
- sqlQ3 = 'DELETE FROM repoman_problems WHERE build_id = %s'
- sqlQ4 = 'DELETE FROM ebuildbuildwithuses WHERE build_id = %s'
- sqlQ5 = 'DELETE FROM ebuildhaveskeywords WHERE ebuild_id = %s'
- sqlQ6 = 'DELETE FROM ebuildhavesiuses WHERE ebuild_id = %s'
- sqlQ7 = 'DELETE FROM ebuildhavesrestrictions WHERE ebuild_id = %s'
- sqlQ8 = 'DELETE FROM buildlog WHERE ebuild_id = %s'
- sqlQ9 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s'
- sqlQ10 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
- sqlQ11 = 'DELETE FROM buildqueue WHERE ebuild_id = %s'
- sqlQ12 = 'DELETE FROM ebuilds WHERE id = %s'
- for ebuild_id in ebuild_old_list_db:
- cursor.execute(sqlQ1, (ebuild_id[0],))
- build_id_list = cursor.fetchall()
- for build_id in build_id_list:
- cursor.execute(sqlQ2, (build_id,))
- cursor.execute(sqlQ3, (build_id,))
- cursor.execute(sqlQ4, (build_id,))
- cursor.execute(sqlQ9, (ebuild_id[0],))
- queue_id_list = cursor.fetchall()
- for queue_id in queue_id_list:
- cursor.execute(sqlQ10, (queue_id,))
- cursor.execute(sqlQ5, (ebuild_id[0],))
- cursor.execute(sqlQ6, (ebuild_id[0],))
- cursor.execute(sqlQ7, (ebuild_id[0],))
- cursor.execute(sqlQ8, (ebuild_id[0],))
- cursor.execute(sqlQ11, (ebuild_id[0],))
- cursor.execute(sqlQ12, (ebuild_id[0],))
- self.conn.commit()
-
- def del_old_package(self, package_id_list):
- cursor = self.conn.cursor()
- sqlQ1 = 'SELECT id FROM ebuilds WHERE package_id = %s'
- sqlQ2 = 'DELETE FROM ebuilds WHERE package_id = %s'
- sqlQ3 = 'DELETE FROM manifest WHERE package_id = %s'
- sqlQ4 = 'DELETE FROM packages_meta WHERE package_id = %s'
- sqlQ5 = 'DELETE FROM packages WHERE package_id = %s'
- for package_id in package_id_list:
- cursor.execute(sqlQ1, package_id)
- ebuild_id_list = cursor.fetchall()
- self.del_old_ebuild(ebuild_id_list)
- cursor.execute(sqlQ2, (package_id,))
- cursor.execute(sqlQ3, (package_id,))
- cursor.execute(sqlQ4, (package_id,))
- cursor.execute(sqlQ5, (package_id,))
- self.conn.commit()
-
- def cp_list_db(self, package_id):
- cursor = self.conn.cursor()
- sqlQ = "SELECT ebuild_version FROM ebuilds WHERE active = 'TRUE' AND package_id = %s"
- cursor.execute(sqlQ, (package_id))
- return cursor.fetchall()
-
- def cp_list_old_db(self, package_id):
- cursor = self.conn.cursor()
- sqlQ ="SELECT id, ebuild_version FROM ebuilds WHERE active = 'FALSE' AND package_id = %s AND date_part('days', NOW() - timestamp) > 60"
- cursor.execute(sqlQ, package_id)
- return cursor.fetchall()
-
- def add_qa_repoman(self, ebuild_id_list, qa_error, packageDict, config_id):
- ebuild_i = 0
- cursor = self.conn.cursor()
- for k, v in packageDict.iteritems():
- ebuild_id = ebuild_id_list[ebuild_i]
- sqlQ = "INSERT INTO buildlog (ebuild_id, config, error_summary, timestamp, hash ) VALUES ( %s, %s, %s, now(), '0' ) RETURNING build_id"
- if v['ebuild_error'] != [] or qa_error != []:
- if v['ebuild_error'] != [] or qa_error == []:
- summary = "Repoman"
- elif v['ebuild_error'] == [] or qa_error != []:
- summary = "QA"
- else:
- summary = "QA:Repoman"
- params = (ebuild_id, config_id, summary)
- cursor.execute(sqlQ, params)
- build_id = cursor.fetchone()
- if v['ebuild_error'] != []:
- sqlQ = 'INSERT INTO repoman_problems (problem, build_id ) VALUES ( %s, %s )'
- for x in v['ebuild_error']:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- if qa_error != []:
- sqlQ = 'INSERT INTO qa_problems (problem, build_id ) VALUES ( %s, %s )'
- for x in qa_error:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- ebuild_i = ebuild_i +1
- self.conn.commit()
-
- def get_arch_db(self):
- cursor = self.conn.cursor()
- sqlQ = 'SELECT keyword FROM keywords WHERE keyword = %s'
- cursor.execute(sqlQ, ('ppc',))
- return cursor.fetchone()
-
- def add_new_arch_db(self, arch_list):
- cursor = self.conn.cursor()
- sqlQ = 'INSERT INTO keywords (keyword) VALUES ( %s )'
- for arch in arch_list:
- cursor.execute(sqlQ, (arch,))
- self.conn.commit()
-
- def closeconnection(self):
- self.conn.close()
-
-class PgSQLPackageDB(SQLPackageDatabase):
- """override for MySQL backend"""
-
- syntax_placeholder = "%s"
-
- def __init__(self, config=None):
- # Do not complain about correct usage of ** magic
- # pylint: disable-msg=W0142
- SQLPackageDatabase.__init__(self)
-
- if config is None or 'database' not in config:
- print "No configuration available!"
- sys.exit(1)
- try:
- import psycopg2
- except ImportError:
- print "Please install a recent version of dev-python/psycopg for Python"
- sys.exit(1)
- self.conn = psycopg2.connect(**config)
diff --git a/gobs/pym/querys/__init__.py b/gobs/pym/querys/__init__.py
deleted file mode 100644
index 8d1c8b6..0000000
--- a/gobs/pym/querys/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/gobs/pym/querys/mysql.py b/gobs/pym/querys/mysql.py
deleted file mode 100644
index 436fa30..0000000
--- a/gobs/pym/querys/mysql.py
+++ /dev/null
@@ -1 +0,0 @@
-#should contain the same functions as in pgsql.py but for mysql
diff --git a/gobs/pym/querys/pgsql.py b/gobs/pym/querys/pgsql.py
deleted file mode 100644
index 021c37f..0000000
--- a/gobs/pym/querys/pgsql.py
+++ /dev/null
@@ -1,596 +0,0 @@
-#every function takes a connection as a parameter that is provided by the CM
-from __future__ import print_function
-
-def get_default_config(connection):
- cursor = connection.cursor()
- sqlQ = 'SELECT id FROM configs WHERE default_config = True'
- cursor.execute(sqlQ)
- return cursor.fetchone()
-
-def get_profile_checksum(connection, config_profile):
- cursor = connection.cursor()
- sqlQ = "SELECT make_conf_checksum FROM configs WHERE active = 'True' AND id = %s AND updateing = 'False'"
- cursor.execute(sqlQ, (config_profile,))
- return cursor.fetchone()
-
-def get_packages_to_build(connection, config_profile):
- cursor =connection.cursor()
- # no point in returning dead ebuilds, to just chuck em out later
- sqlQ1 = '''SELECT post_message, queue_id, ebuild_id FROM buildqueue WHERE config = %s AND extract(epoch from (NOW()) - timestamp) > 7200 ORDER BY queue_id LIMIT 1'''
-
- sqlQ2 ='''SELECT ebuild_id,category,package_name,ebuild_version,ebuild_checksum FROM ebuilds,buildqueue,packages
- WHERE buildqueue.ebuild_id=ebuilds.id AND ebuilds.package_id=packages.package_id AND queue_id = %s AND ebuilds.active = TRUE'''
-
- # get use flags to use
- sqlQ3 = "SELECT useflag, enabled FROM ebuildqueuedwithuses WHERE queue_id = %s"
- cursor.execute(sqlQ1, (config_profile,))
- build_dict={}
- entries = cursor.fetchone()
- if entries is None:
- return None
- if entries[2] is None:
- build_dict['ebuild_id'] = None
- build_dict['queue_id'] = entries[1]
- return build_dict
- msg_list = []
- if not entries[0] is None:
- for msg in entries[0].split(" "):
- msg_list.append(msg)
- build_dict['post_message'] = msg_list
- build_dict['queue_id'] = entries[1]
- build_dict['ebuild_id']=entries[2]
- cursor.execute(sqlQ2, (build_dict['queue_id'],))
- #make a list that contains objects that haves ebuild_id and post_message +the lot as attributes
- entries = cursor.fetchone()
- if entries is None:
- build_dict['checksum']= None
- return build_dict
- build_dict['ebuild_id']=entries[0]
- build_dict['category']=entries[1]
- build_dict['package']=entries[2]
- build_dict['ebuild_version']=entries[3]
- build_dict['checksum']=entries[4]
-
- #add a enabled and disabled list to the objects in the item list
- cursor.execute(sqlQ3, (build_dict['queue_id'],))
- uses={}
- for row in cursor.fetchall():
- uses[ row[0] ] = row[1]
- build_dict['build_useflags']=uses
- return build_dict
-
-def check_revision(connection, build_dict):
- cursor = connection.cursor()
- sqlQ1 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s AND config = %s'
- sqlQ2 = "SELECT useflag FROM ebuildqueuedwithuses WHERE queue_id = %s AND enabled = 'True'"
- cursor.execute(sqlQ1, (build_dict['ebuild_id'], build_dict['config_profile']))
- queue_id_list = cursor.fetchall()
- print('queue_id_list', queue_id_list)
- if queue_id_list == []:
- return None
- for queue_id in queue_id_list:
- print('queue_id after list', queue_id[0])
- cursor.execute(sqlQ2, (queue_id[0],))
- entries = cursor.fetchall()
- build_useflags = []
- if entries == []:
- build_useflags = None
- else:
- for use_line in sorted(entries):
- build_useflags.append(use_line[0])
- print("build_useflags build_dict['build_useflags']", build_useflags, build_dict['build_useflags'])
- if build_useflags == build_dict['build_useflags']:
- print('queue_id', queue_id[0])
- return queue_id[0]
- return None
-
-def get_config_list(connection):
- cursor = connection.cursor()
- sqlQ = 'SELECT id FROM configs WHERE default_config = False AND active = True'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
-def get_config_list_all(connection):
- cursor = connection.cursor()
- sqlQ = 'SELECT id FROM configs'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
-def update__make_conf(connection, configsDict):
- cursor = connection.cursor()
- sqlQ = 'UPDATE configs SET make_conf_checksum = %s, make_conf_text = %s, active = %s, config_error = %s WHERE id = %s'
- for k, v in configsDict.iteritems():
- params = [v['make_conf_checksum_tree'], v['make_conf_text'], v['active'], v['config_error'], k]
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def have_package_db(connection, categories, package):
- cursor = connection.cursor()
- sqlQ ='SELECT package_id FROM packages WHERE category = %s AND package_name = %s'
- params = categories, package
- cursor.execute(sqlQ, params)
- return cursor.fetchone()
-
-def have_activ_ebuild_id(connection, ebuild_id):
- cursor = connection.cursor()
- sqlQ = 'SELECT ebuild_checksum FROM ebuilds WHERE id = %s AND active = TRUE'
- cursor.execute(sqlQ, (ebuild_id,))
- entries = cursor.fetchone()
- if entries is None:
- return None
- # If entries is not None we need [0]
- return entries[0]
-
-def get_categories_db(connection):
- cursor = connection.cursor()
- sqlQ =' SELECT category FROM categories'
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
-def get_categories_checksum_db(connection, categories):
- cursor = connection.cursor()
- sqlQ =' SELECT metadata_xml_checksum FROM categories_meta WHERE category = %s'
- cursor.execute(sqlQ, (categories,))
- return cursor.fetchone()
-
-def add_new_categories_meta_sql(connection, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
- cursor = connection.cursor()
- sqlQ = 'INSERT INTO categories_meta (category, metadata_xml_checksum, metadata_xml_text) VALUES ( %s, %s, %s )'
- params = categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def update_categories_meta_sql(connection, categories, categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree):
- cursor = connection.cursor()
- sqlQ ='UPDATE categories_meta SET metadata_xml_checksum = %s, metadata_xml_text = %s WHERE category = %s'
- params = (categories_metadata_xml_checksum_tree, categories_metadata_xml_text_tree, categories)
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def add_new_manifest_sql(connection, package_id, get_manifest_text, manifest_checksum_tree):
- cursor = connection.cursor()
- sqlQ = 'INSERT INTO manifest (package_id, manifest, checksum) VALUES ( %s, %s, %s )'
- params = package_id, get_manifest_text, manifest_checksum_tree
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def add_new_package_metadata(connection, package_id, package_metadataDict):
- cursor = connection.cursor()
- sqlQ = 'SELECT changelog_checksum FROM packages_meta WHERE package_id = %s'
- cursor.execute(sqlQ, (package_id,))
- if cursor.fetchone() is None:
- sqlQ = 'INSERT INTO packages_meta (package_id, changelog_text, changelog_checksum, metadata_text, metadata_checksum) VALUES ( %s, %s, %s, %s, %s )'
- for k, v in package_metadataDict.iteritems():
- params = package_id, v['changelog_text'], v['changelog_checksum'], v[' metadata_xml_text'], v['metadata_xml_checksum']
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def update_new_package_metadata(connection, package_id, package_metadataDict):
- cursor = connection.cursor()
- sqlQ = 'SELECT changelog_checksum, metadata_checksum FROM packages_meta WHERE package_id = %s'
- cursor.execute(sqlQ, package_id)
- entries = cursor.fetchone()
- if entries is None:
- changelog_checksum_db = None
- metadata_checksum_db = None
- else:
- changelog_checksum_db = entries[0]
- metadata_checksum_db = entries[1]
- for k, v in package_metadataDict.iteritems():
- if changelog_checksum_db != v['changelog_checksum']:
- sqlQ = 'UPDATE packages_meta SET changelog_text = %s, changelog_checksum = %s WHERE package_id = %s'
- params = v['changelog_text'], v['changelog_checksum'], package_id
- cursor.execute(sqlQ, params)
- if metadata_checksum_db != v['metadata_xml_checksum']:
- sqlQ = 'UPDATE packages_meta SET metadata_text = %s, metadata_checksum = %s WHERE package_id = %s'
- params = v[' metadata_xml_text'], v['metadata_xml_checksum'], package_id
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def get_manifest_db(connection, package_id):
- cursor = connection.cursor()
- sqlQ = 'SELECT checksum FROM manifest WHERE package_id = %s'
- cursor.execute(sqlQ, package_id)
- entries = cursor.fetchone()
- if entries is None:
- return None
- # If entries is not None we need [0]
- return entries[0]
-
-def update_manifest_sql(connection, package_id, get_manifest_text, manifest_checksum_tree):
- cursor = connection.cursor()
- sqlQ = 'UPDATE manifest SET checksum = %s, manifest = %s WHERE package_id = %s'
- params = (manifest_checksum_tree, get_manifest_text, package_id)
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def add_new_metadata(connection, metadataDict):
- cursor = connection.cursor()
- for k, v in metadataDict.iteritems():
- #moved the cursor out side of the loop
- sqlQ = 'SELECT updaterestrictions( %s, %s )'
- params = k, v['restrictions']
- cursor.execute(sqlQ, params)
- sqlQ = 'SELECT updatekeywords( %s, %s )'
- params = k, v['keyword']
- cursor.execute(sqlQ, params)
- sqlQ = 'SELECT updateiuse( %s, %s )'
- params = k, v['iuse']
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def add_new_package_sql(connection, packageDict):
- #lets have a new cursor for each metod as per best practice
- cursor = connection.cursor()
- sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
- ebuild_id_list = []
- package_id_list = []
- for k, v in packageDict.iteritems():
- params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
- v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
- v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
- v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
- v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
- v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
- v['ebuild_version_metadata_tree'][16]]
- cursor.execute(sqlQ, params)
- mid = cursor.fetchone()
- mid=mid[0]
- ebuild_id_list.append(mid[1])
- package_id_list.append(mid[0])
- connection.commit()
- # add_new_metadata(metadataDict)
- return ebuild_id_list, package_id_list
-
-def add_new_ebuild_sql(connection, packageDict, new_ebuild_list):
- #lets have a new cursor for each metod as per best practice
- cursor = connection.cursor()
- sqlQ="SELECT insert_ebuild( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 'True')"
- ebuild_id_list = []
- package_id_list = []
- for k, v in packageDict.iteritems():
- for x in new_ebuild_list:
- if x == v['ebuild_version_tree']:
- params = [v['categories'], v['package'], v['ebuild_version_tree'], v['ebuild_version_revision'], v['ebuild_version_checksum_tree'],
- v['ebuild_version_text'], v['ebuild_version_metadata_tree'][0], v['ebuild_version_metadata_tree'][1],
- v['ebuild_version_metadata_tree'][12], v['ebuild_version_metadata_tree'][2], v['ebuild_version_metadata_tree'][3],
- v['ebuild_version_metadata_tree'][5],v['ebuild_version_metadata_tree'][6], v['ebuild_version_metadata_tree'][7],
- v['ebuild_version_metadata_tree'][9], v['ebuild_version_metadata_tree'][11],
- v['ebuild_version_metadata_tree'][13],v['ebuild_version_metadata_tree'][14], v['ebuild_version_metadata_tree'][15],
- v['ebuild_version_metadata_tree'][16]]
- cursor.execute(sqlQ, params)
- mid = cursor.fetchone()
- mid=mid[0]
- ebuild_id_list.append(mid[1])
- package_id_list.append(mid[0])
- connection.commit()
- # add_new_metadata(metadataDict)
- return ebuild_id_list, package_id_list
-
-def update_active_ebuild(connection, package_id, ebuild_version_tree):
- cursor = connection.cursor()
- sqlQ ="UPDATE ebuilds SET active = 'False', timestamp = now() WHERE package_id = %s AND ebuild_version = %s AND active = 'True'"
- cursor.execute(sqlQ, (package_id, ebuild_version_tree))
- connection.commit()
-
-def get_ebuild_id_db(connection, categories, package, ebuild_version_tree):
- cursor = connection.cursor()
- sqlQ ='SELECT id FROM packages WHERE category = %s AND ebuild_name = %s AND ebuild_version = %s'
- cursor.execute(sqlQ, (categories, package, ebuild_version_tree))
- entries = cursor.fetchone()
- return entries
-
-def get_ebuild_id_db_checksum(connection, build_dict):
- cursor = connection.cursor()
- sqlQ = 'SELECT id FROM ebuilds WHERE ebuild_version = %s AND ebuild_checksum = %s AND package_id = %s'
- cursor.execute(sqlQ, (build_dict['ebuild_version'], build_dict['checksum'], build_dict['package_id']))
- ebuild_id = cursor.fetchone()
- print('ebuild_id', ebuild_id)
- if ebuild_id is None:
- return None
- return ebuild_id[0]
-
-def get_cpv_from_ebuild_id(connection, ebuild_id):
- cursor = connection.cursor()
- #wasent used
- #sqlQ = 'SELECT package_id FROM ebuild WHERE id = %s'
- sqlQ='SELECT category, ebuild_name, ebuild_version FROM packages WHERE id = %s'
- cursor.execute(sqlQ, ebuild_id)
- entries = cursor.fetchone()
- return entries
-
-def get_cp_from_package_id(connection, package_id):
- cursor =connection.cursor()
- sqlQ = "SELECT ARRAY_TO_STRING(ARRAY[category, package_name] , '/') AS cp FROM packages WHERE package_id = %s"
- cursor.execute(sqlQ, (package_id,))
- return cursor.fetchone()
-
-def get_keyword_id_db(connection, arch, stable):
- cursor =connection.cursor()
- sqlQ ='SELECT id_keyword FROM keywords WHERE ARCH = %s AND stable = %s'
- cursor.execute(sqlQ, (arch, stable))
- entries = cursor.fetchone()
- #why only return 1 entery? if that IS the point use top(1)
- return entries
-
-def add_new_keywords(connection, ebuild_id, keyword_id):
- cursor = connection.cursor()
- sqlQ ='INSERT INTO keywordsToEbuild (ebuild_id, id_keyword) VALUES ( %s, %s )'
- cursor.execute(sqlQ, (ebuild_id, keyword_id))
- connection.commit()
-
-def have_package_buildqueue(connection, ebuild_id, config_id):
- cursor = connection.cursor()
- sqlQ = 'SELECT useflags FROM buildqueue WHERE ebuild_id = %s AND config_id = %s'
- params = (ebuild_id[0], config_id)
- cursor.execute(sqlQ, params)
- entries = cursor.fetchone()
- return entries
-
-def add_new_package_buildqueue(connection, ebuild_id, config_id, iuse_flags_list, use_enable, message):
- cursor = connection.cursor()
- sqlQ="SELECT insert_buildqueue( %s, %s, %s, %s, %s )"
- if not iuse_flags_list:
- iuse_flags_list=None
- use_enable=None
- params = ebuild_id, config_id, iuse_flags_list, use_enable, message
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def get_ebuild_checksum(connection, package_id, ebuild_version_tree):
- cursor = connection.cursor()
- sqlQ = 'SELECT ebuild_checksum FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = TRUE'
- cursor.execute(sqlQ, (package_id, ebuild_version_tree))
- entries = cursor.fetchone()
- if entries is None:
- return None
- # If entries is not None we need [0]
- return entries[0]
-
-def cp_all_db(connection):
- cursor = connection.cursor()
- sqlQ = "SELECT package_id FROM packages"
- cursor.execute(sqlQ)
- return cursor.fetchall()
-
-def add_old_package(connection, old_package_list):
- cursor = connection.cursor()
- mark_old_list = []
- sqlQ = "UPDATE ebuilds SET active = 'FALSE', timestamp = NOW() WHERE package_id = %s AND active = 'TRUE' RETURNING package_id"
- for old_package in old_package_list:
- cursor.execute(sqlQ, (old_package[0],))
- entries = cursor.fetchone()
- if entries is not None:
- mark_old_list.append(entries[0])
- connection.commit()
- return mark_old_list
-
-def get_old_categories(connection, categories_line):
- cursor = connection.cursor()
- sqlQ = "SELECT package_name FROM packages WHERE category = %s"
- cursor.execute(sqlQ (categories_line))
- return cursor.fetchone()
-
-def del_old_categories(connection, real_old_categoriess):
- cursor = connection.cursor()
- sqlQ1 = 'DELETE FROM categories_meta WHERE category = %s'
- sqlQ2 = 'DELETE FROM categories categories_meta WHERE category = %s'
- cursor.execute(sqlQ1 (real_old_categories))
- cursor.execute(sqlQ2 (real_old_categories))
- connection.commit()
-
-def add_old_ebuild(connection, package_id, old_ebuild_list):
- cursor = connection.cursor()
- sqlQ1 = "UPDATE ebuilds SET active = 'FALSE' WHERE package_id = %s AND ebuild_version = %s"
- sqlQ2 = "SELECT id FROM ebuilds WHERE package_id = %s AND ebuild_version = %s AND active = 'TRUE'"
- sqlQ3 = "SELECT queue_id FROM buildqueue WHERE ebuild_id = %s"
- sqlQ4 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
- sqlQ5 = 'DELETE FROM buildqueue WHERE queue_id = %s'
- for old_ebuild in old_ebuild_list:
- cursor.execute(sqlQ2, (package_id, old_ebuild[0]))
- ebuild_id_list = cursor.fetchall()
- if ebuild_id_list is not None:
- for ebuild_id in ebuild_id_list:
- cursor.execute(sqlQ3, (ebuild_id))
- queue_id_list = cursor.fetchall()
- if queue_id_list is not None:
- for queue_id in queue_id_list:
- cursor.execute(sqlQ4, (queue_id))
- cursor.execute(sqlQ5, (queue_id))
- cursor.execute(sqlQ1, (package_id, old_ebuild[0]))
- connection.commit()
-
-def cp_all_old_db(connection, old_package_id_list):
- cursor = connection.cursor()
- old_package_list = []
- for old_package in old_package_id_list:
- sqlQ = "SELECT package_id FROM ebuilds WHERE package_id = %s AND active = 'FALSE' AND date_part('days', NOW() - timestamp) < 60"
- cursor.execute(sqlQ, old_package)
- entries = cursor.fetchone()
- if entries is None:
- old_package_list.append(old_package)
- return old_package_list
-
-def del_old_queue(connection, queue_id):
- cursor = connection.cursor()
- sqlQ1 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
- sqlQ2 = 'DELETE FROM temp_errors_queue_qa WHERE queue_id = %s'
- sqlQ3 = 'DELETE FROM buildqueue WHERE queue_id = %s'
- cursor.execute(sqlQ1, (queue_id,))
- cursor.execute(sqlQ2, (queue_id,))
- cursor.execute(sqlQ3, (queue_id,))
- connection.commit()
-
-def del_old_ebuild(connection, ebuild_old_list_db):
- cursor = connection.cursor()
- sqlQ1 = 'SELECT build_id FROM buildlog WHERE ebuild_id = %s'
- sqlQ2 = 'DELETE FROM qa_problems WHERE build_id = %s'
- sqlQ3 = 'DELETE FROM repoman_problems WHERE build_id = %s'
- sqlQ4 = 'DELETE FROM ebuildbuildwithuses WHERE build_id = %s'
- sqlQ5 = 'DELETE FROM ebuildhaveskeywords WHERE ebuild_id = %s'
- sqlQ6 = 'DELETE FROM ebuildhavesiuses WHERE ebuild_id = %s'
- sqlQ7 = 'DELETE FROM ebuildhavesrestrictions WHERE ebuild_id = %s'
- sqlQ8 = 'DELETE FROM buildlog WHERE ebuild_id = %s'
- sqlQ9 = 'SELECT queue_id FROM buildqueue WHERE ebuild_id = %s'
- sqlQ10 = 'DELETE FROM ebuildqueuedwithuses WHERE queue_id = %s'
- sqlQ11 = 'DELETE FROM buildqueue WHERE ebuild_id = %s'
- sqlQ12 = 'DELETE FROM ebuilds WHERE id = %s'
- for ebuild_id in ebuild_old_list_db:
- cursor.execute(sqlQ1, (ebuild_id[0],))
- build_id_list = cursor.fetchall()
- if build_id_list != []:
- for build_id in build_id_list:
- cursor.execute(sqlQ2, (build_id[0],))
- cursor.execute(sqlQ3, (build_id[0],))
- cursor.execute(sqlQ4, (build_id[0],))
- cursor.execute(sqlQ9, (ebuild_id[0],))
- queue_id_list = cursor.fetchall()
- if queue_id_list != []:
- for queue_id in queue_id_list:
- cursor.execute(sqlQ10, (queue_id[0],))
- cursor.execute(sqlQ5, (ebuild_id[0],))
- cursor.execute(sqlQ6, (ebuild_id[0],))
- cursor.execute(sqlQ7, (ebuild_id[0],))
- cursor.execute(sqlQ8, (ebuild_id[0],))
- cursor.execute(sqlQ11, (ebuild_id[0],))
- cursor.execute(sqlQ12, (ebuild_id[0],))
- connection.commit()
-
-def del_old_package(connection, package_id_list):
- cursor = connection.cursor()
- sqlQ1 = 'SELECT id FROM ebuilds WHERE package_id = %s'
- sqlQ2 = 'DELETE FROM ebuilds WHERE package_id = %s'
- sqlQ3 = 'DELETE FROM manifest WHERE package_id = %s'
- sqlQ4 = 'DELETE FROM packages_meta WHERE package_id = %s'
- sqlQ5 = 'DELETE FROM packages WHERE package_id = %s'
- for package_id in package_id_list:
- cursor.execute(sqlQ1, package_id)
- ebuild_id_list = cursor.fetchall()
- del_old_ebuild(connection, ebuild_id_list)
- cursor.execute(sqlQ2, (package_id,))
- cursor.execute(sqlQ3, (package_id,))
- cursor.execute(sqlQ4, (package_id,))
- cursor.execute(sqlQ5, (package_id,))
- connection.commit()
-
-def cp_list_db(connection, package_id):
- cursor = connection.cursor()
- sqlQ = "SELECT ebuild_version FROM ebuilds WHERE active = 'TRUE' AND package_id = %s"
- cursor.execute(sqlQ, (package_id))
- return cursor.fetchall()
-
-def cp_list_old_db(connection, package_id):
- cursor = connection.cursor()
- sqlQ ="SELECT id, ebuild_version FROM ebuilds WHERE active = 'FALSE' AND package_id = %s AND date_part('days', NOW() - timestamp) > 60"
- cursor.execute(sqlQ, package_id)
- return cursor.fetchall()
-
-def move_queru_buildlog(connection, queue_id, build_error, summary_error, build_log_dict):
- cursor = connection.cursor()
- print('queue_id', queue_id)
- print('build_log_dict', build_log_dict)
- repoman_error_list = build_log_dict['repoman_error_list']
- qa_error_list = build_log_dict['qa_error_list']
- sqlQ = 'SELECT make_buildlog( %s, %s, %s, %s, %s, %s)'
- cursor.execute(sqlQ, (queue_id, summary_error, build_error, build_log_dict['logfilename'], qa_error_list, repoman_error_list))
- entries = cursor.fetchone()
- connection.commit()
- return entries
-
-def add_new_buildlog(connection, build_dict, use_flags_list, use_enable_list, build_error, summary_error, build_log_dict):
- cursor = connection.cursor()
- repoman_error_list = build_log_dict['repoman_error_list']
- qa_error_list = build_log_dict['qa_error_list']
- if not use_flags_list:
- use_flags_list=None
- use_enable=None
- print('make_deplog', build_dict['ebuild_id'], build_dict['config_profile'], use_flags_list, use_enable_list, summary_error, build_error, build_log_dict['logfilename'], qa_error_list, repoman_error_list)
- sqlQ = 'SELECT make_deplog( %s, %s, %s, %s, %s, %s, %s, %s, %s)'
- params = (build_dict['ebuild_id'], build_dict['config_profile'], use_flags_list, use_enable_list, summary_error, build_error, build_log_dict['logfilename'], qa_error_list, repoman_error_list)
- cursor.execute(sqlQ, params)
- entries = cursor.fetchone()
- connection.commit()
- return entries
-
-def add_qa_repoman(connection, ebuild_id_list, qa_error, packageDict, config_id):
- cursor = connection.cursor()
- ebuild_i = 0
- for k, v in packageDict.iteritems():
- ebuild_id = ebuild_id_list[ebuild_i]
- sqlQ = "INSERT INTO buildlog (ebuild_id, config, error_summary, timestamp, hash ) VALUES ( %s, %s, %s, now(), '1' ) RETURNING build_id"
- if v['ebuild_error'] != [] or qa_error != []:
- if v['ebuild_error'] != [] or qa_error == []:
- summary = "Repoman"
- elif v['ebuild_error'] == [] or qa_error != []:
- summary = "QA"
- else:
- summary = "QA:Repoman"
- params = (ebuild_id, config_id, summary)
- cursor.execute(sqlQ, params)
- build_id = cursor.fetchone()
- if v['ebuild_error'] != []:
- sqlQ = 'INSERT INTO repoman_problems (problem, build_id ) VALUES ( %s, %s )'
- for x in v['ebuild_error']:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- if qa_error != []:
- sqlQ = 'INSERT INTO qa_problems (problem, build_id ) VALUES ( %s, %s )'
- for x in qa_error:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- ebuild_i = ebuild_i +1
- connection.commit()
-
-def update_qa_repoman(connection, build_id, build_log_dict):
- cursor = connection.cursor()
- sqlQ1 = 'INSERT INTO repoman_problems (problem, build_id ) VALUES ( %s, %s )'
- sqlQ2 = 'INSERT INTO qa_problems (problem, build_id ) VALUES ( %s, %s )'
- if build_log_dict['repoman_error_list'] != []:
- for x in build_log_dict['repoman_error_list']:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- if build_log_dict['qa_error_list'] != []:
- for x in build_log_dict['qa_error_list']:
- params = (x, build_id)
- cursor.execute(sqlQ, params)
- connection.commit()
-
-def get_arch_db(connection):
- cursor = connection.cursor()
- sqlQ = "SELECT keyword FROM keywords WHERE keyword = 'amd64'"
- cursor.execute(sqlQ)
- return cursor.fetchone()
-
-def add_new_arch_db(connection, arch_list):
- cursor = connection.cursor()
- sqlQ = 'INSERT INTO keywords (keyword) VALUES ( %s )'
- for arch in arch_list:
- cursor.execute(sqlQ, (arch,))
- connection.commit()
-
-def check_fail_times(connection, logDict):
- queue_id = logDict['queue_id']
- qa_error = logDict['qa_error_list'][0]
- cursor = connection.cursor()
- sqlQ = 'SELECT errors FROM temp_errors_queue_qa WHERE queue_id = %s AND what_error = %s'
- cursor.execute(sqlQ, (queue_id, qa_error,))
- return cursor.fetchone()
-
-def add_fail_times(connection, logDict):
- queue_id = logDict['queue_id']
- qa_error = logDict['qa_error_list'][0]
- cursor = connection.cursor()
- sqlQ = 'INSERT INTO temp_errors_queue_qa (queue_id, what_error) VALUES ( %s, %s)'
- cursor.execute(sqlQ, (queue_id, qa_error,))
- connection.commit()
-
-def update_fail_times(connection, logDict):
- queue_id = logDict['queue_id']
- qa_error = logDict['qa_error_list'][0]
- cursor = connection.cursor()
- sqlQ1 = 'UPDATE buildqueue SET timestamp = NOW() WHERE queue_id = %s'
- cursor.execute(sqlQ1, (queue_id,))
- connection.commit()
-
-def make_conf_error(connection,config_profile):
- pass
diff --git a/gobs/pym/readconf.py b/gobs/pym/readconf.py
index c017561..89d0319 100644
--- a/gobs/pym/readconf.py
+++ b/gobs/pym/readconf.py
@@ -28,8 +28,8 @@ class get_conf_settings(object):
if element[0] == 'SQLPASSWD': # Password
get_sql_passwd = element[1]
# Buildhost root (dir for host/setup on host)
- if element[0] == 'GOBSROOT':
- get_gobs_root = element[1]
+ if element[0] == 'GOBSGITREPONAME':
+ get_gobs_gitreponame = element[1]
# Buildhost setup (host/setup on guest)
if element[0] == 'GOBSCONFIG':
get_gobs_config = element[1]
@@ -41,6 +41,6 @@ class get_conf_settings(object):
gobs_settings_dict['sql_host'] = get_sql_host.rstrip('\n')
gobs_settings_dict['sql_user'] = get_sql_user.rstrip('\n')
gobs_settings_dict['sql_passwd'] = get_sql_passwd.rstrip('\n')
- gobs_settings_dict['gobs_root'] = get_gobs_root.rstrip('\n')
+ gobs_settings_dict['gobs_gitreponame'] = get_gobs_gitreponame.rstrip('\n')
gobs_settings_dict['gobs_config'] = get_gobs_config.rstrip('\n')
return gobs_settings_dict
^ permalink raw reply related [flat|nested] only message in thread
only message in thread, other threads:[~2011-08-28 12:34 UTC | newest]
Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2011-08-28 12:34 [gentoo-commits] dev/zorry:master commit in: buildhost/updatedb/, gobs/bin/, ebuild/dev-python/gobs/, buildhost/portage_hook/, Magnus Granberg
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox