From: "Magnus Granberg" <zorry@gentoo.org>
To: gentoo-commits@lists.gentoo.org
Subject: [gentoo-commits] dev/zorry:master commit in: gobs/pym/
Date: Sat, 26 Jan 2013 22:23:36 +0000 (UTC) [thread overview]
Message-ID: <1359242575.6d621d7d07dc3fd9a984fb36091d8603dee5d8a5.zorry@gentoo> (raw)
commit: 6d621d7d07dc3fd9a984fb36091d8603dee5d8a5
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sat Jan 26 23:22:55 2013 +0000
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sat Jan 26 23:22:55 2013 +0000
URL: http://git.overlays.gentoo.org/gitweb/?p=dev/zorry.git;a=commit;h=6d621d7d
fix to add unactive ebuilds
---
gobs/pym/ConnectionManager.py | 66 ++++++++++-------------
gobs/pym/buildquerydb.py | 14 +----
gobs/pym/mysql_querys.py | 100 +++++++++++++++++++----------------
gobs/pym/package.py | 118 ++++++++++++++++++++++-------------------
gobs/pym/updatedb.py | 28 ++++------
5 files changed, 162 insertions(+), 164 deletions(-)
diff --git a/gobs/pym/ConnectionManager.py b/gobs/pym/ConnectionManager.py
index dd91e1d..4dac318 100644
--- a/gobs/pym/ConnectionManager.py
+++ b/gobs/pym/ConnectionManager.py
@@ -1,46 +1,38 @@
-# FIXME: Redo the class
from __future__ import print_function
from gobs.readconf import get_conf_settings
reader = get_conf_settings()
gobs_settings_dict=reader.read_gobs_settings_all()
class connectionManager(object):
- _instance = None
- def __new__(cls, numberOfconnections=20, *args, **kwargs):
- if not cls._instance:
- cls._instance = super(connectionManager, cls).__new__(cls, *args, **kwargs)
- #read the sql user/host etc and store it in the local object
- cls._backend=gobs_settings_dict['sql_backend']
- cls._host=gobs_settings_dict['sql_host']
- cls._user=gobs_settings_dict['sql_user']
- cls._password=gobs_settings_dict['sql_passwd']
- cls._database=gobs_settings_dict['sql_db']
- #shouldnt we include port also?
- if cls._backend == 'mysql':
- try:
- import mysql.connector
- from mysql.connector import errorcode
- except ImportError:
- print("Please install a recent version of dev-python/mysql-connector-python for Python")
- sys.exit(1)
- db_config = {}
- db_config['user'] = cls._user
- db_config['password'] = cls._password
- db_config['host'] = cls._host
- db_config['database'] = cls._database
- db_config['raise_on_warnings'] = True
- try:
- cls._cnx = mysql.connector.connect(**db_config)
- except mysql.connector.Error as err:
- if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
- print("Something is wrong your username or password")
- elif err.errno == errorcode.ER_BAD_DB_ERROR:
- print("Database does not exists")
- else:
- print(err)
- return cls._instance
+ def __init__(self):
+ self._backend=gobs_settings_dict['sql_backend']
+ self._host=gobs_settings_dict['sql_host']
+ self._user=gobs_settings_dict['sql_user']
+ self._password=gobs_settings_dict['sql_passwd']
+ self._database=gobs_settings_dict['sql_db']
def newConnection(self):
- return self._cnx
-
+ if self._backend == 'mysql':
+ try:
+ import mysql.connector
+ from mysql.connector import errorcode
+ except ImportError:
+ print("Please install a recent version of dev-python/mysql-connector-python for Python")
+ sys.exit(1)
+ db_config = {}
+ db_config['user'] = self._user
+ db_config['password'] = self._password
+ db_config['host'] = self._host
+ db_config['database'] = self._database
+ db_config['raise_on_warnings'] = True
+ try:
+ cnx = mysql.connector.connect(**db_config)
+ except mysql.connector.Error as err:
+ if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
+ print("Something is wrong your username or password")
+ elif err.errno == errorcode.ER_BAD_DB_ERROR:
+ print("Database does not exists")
+ else:
+ print(err)
+ return cnx
diff --git a/gobs/pym/buildquerydb.py b/gobs/pym/buildquerydb.py
index c7cd49d..8edfd1e 100644
--- a/gobs/pym/buildquerydb.py
+++ b/gobs/pym/buildquerydb.py
@@ -11,11 +11,6 @@ from gobs.readconf import get_conf_settings
reader=get_conf_settings()
gobs_settings_dict=reader.read_gobs_settings_all()
config_profile = gobs_settings_dict['gobs_config']
-# make a CM
-from gobs.ConnectionManager import connectionManager
-CM=connectionManager(gobs_settings_dict)
-
-
from gobs.check_setup import check_make_conf
from gobs.sync import git_pull
@@ -24,7 +19,7 @@ import portage
import multiprocessing
def add_cpv_query_pool(mysettings, myportdb, config_id, cp, repo):
- conn=CM.getConnection()
+ conn =0
init_package = gobs_package(mysettings, myportdb)
# FIXME: remove the check for gobs when in tree
if cp != "dev-python/gobs":
@@ -53,11 +48,10 @@ def add_cpv_query_pool(mysettings, myportdb, config_id, cp, repo):
init_package.add_new_ebuild_buildquery_db(ebuild_id_list, packageDict, config_cpv_listDict)
log_msg = "C %s:%s ... Done." % (cp, repo,)
add_gobs_logs(conn, log_msg, "info", config_id)
- CM.putConnection(conn)
return
def add_buildquery_main(config_id):
- conn=CM.getConnection()
+ conn = 0
config_setup = get_config(conn, config_id)
log_msg = "Adding build jobs for: %s" % (config_setup,)
add_gobs_logs(conn, log_msg, "info", config_id)
@@ -94,11 +88,10 @@ def add_buildquery_main(config_id):
pool.join()
log_msg = "Adding build jobs for: %s ... Done." % (config_setup,)
add_gobs_logs(conn, log_msg, "info", config_profile)
- CM.putConnection(conn)
return True
def del_buildquery_main(config_id):
- conn=CM.getConnection()
+ conn=0
config_setup = get_config(conn, config_id)
log_msg = "Removeing build jobs for: %s" % (config_setup,)
add_gobs_logs(conn, log_msg, "info", config_id)
@@ -108,5 +101,4 @@ def del_buildquery_main(config_id):
del_old_build_jobs(conn, build_job_id)
log_msg = "Removeing build jobs for: %s ... Done." % (config_setup,)
add_gobs_logs(conn, log_msg, "info", config_id)
- CM.putConnection(conn)
return True
diff --git a/gobs/pym/mysql_querys.py b/gobs/pym/mysql_querys.py
index 7da98c0..8b8bacd 100644
--- a/gobs/pym/mysql_querys.py
+++ b/gobs/pym/mysql_querys.py
@@ -70,11 +70,11 @@ def get_config_list_all(connection):
def get_config(connection, config_id):
cursor = connection.cursor()
- sqlQ ='SELECT host, config FROM configs WHERE config_id = %s'
+ sqlQ ='SELECT hostname, config FROM configs WHERE config_id = %s'
cursor.execute(sqlQ, (config_id,))
hostname, config = cursor.fetchone()
cursor.close()
- return hostname[0], config[0]
+ return hostname, config
def update_make_conf(connection, configsDict):
cursor = connection.cursor()
@@ -90,7 +90,7 @@ def get_default_config(connection):
cursor.execute(sqlQ)
hostname, config = cursor.fetchone()
cursor.close()
- return hostname[0], config[0]
+ return hostname, config
def get_repo_id(connection, repo):
cursor = connection.cursor()
@@ -140,13 +140,13 @@ def get_package_id(connection, categories, package, repo):
if not entries is None:
return entries[0]
-def add_new_manifest_sql(connection, categories, package, repo, manifest_checksum_tree):
+def add_new_manifest_sql(connection, categories, package, repo):
cursor = connection.cursor()
- sqlQ1 = "INSERT INTO packages (category_id, package, repo_id, checksum, active) VALUES (%s, %s, %s, %s, 'True')"
+ sqlQ1 = "INSERT INTO packages (category_id, package, repo_id, checksum, active) VALUES (%s, %s, %s, '0', 'True')"
sqlQ2 = 'SELECT LAST_INSERT_ID()'
repo_id = get_repo_id(connection, repo)
category_id = get_category_id(connection, categories)
- cursor.execute(sqlQ1, (category_id, package, repo_id, manifest_checksum_tree,))
+ cursor.execute(sqlQ1, (category_id, package, repo_id, ))
cursor.execute(sqlQ2)
package_id = cursor.fetchone()[0]
connection.commit()
@@ -211,13 +211,13 @@ def add_new_ebuild_metadata_sql(connection, ebuild_id, keywords, restrictions, i
use_id = cursor.fetchone()[0]
cursor.execute(sqlQ6, (ebuild_id, use_id, set_iuse,))
for keyword in keywords:
- set_keyword = 'stable'
+ set_keyword = 'sStable'
if keyword[0] in ["~"]:
keyword = keyword[1:]
- set_keyword = 'unstable'
+ set_keyword = 'Unstable'
elif keyword[0] in ["-"]:
keyword = keyword[1:]
- set_keyword = 'testing'
+ set_keyword = 'Negative'
keyword_id = get_keyword_id(connection, keyword)
if keyword_id is None:
cursor.execute(sqlQ1, (keyword,))
@@ -329,37 +329,60 @@ def get_ebuild_checksum(connection, package_id, ebuild_version_tree):
cursor = connection.cursor()
sqlQ = "SELECT checksum FROM ebuilds WHERE package_id = %s AND version = %s AND active = 'True'"
cursor.execute(sqlQ, (package_id, ebuild_version_tree))
+ entries = cursor.fetchall()
+ cursor.close()
+ if entries == []:
+ return None
+ checksums = []
+ for i in entries:
+ checksums.append(i[0])
+ return checksums
+
+def get_ebuild_id_list(connection, package_id):
+ cursor = connection.cursor()
+ sqlQ = "SELECT ebuild_id FROM ebuilds WHERE package_id = %s AND active = 'True'"
+ cursor.execute(sqlQ, (package_id,))
+ entries = cursor.fetchall()
+ cursor.close()
+ ebuilds_id = []
+ for i in entries:
+ ebuilds_id.append(i[0])
+ return ebuilds_id
+
+def get_ebuild_id_db(connection, checksum, package_id):
+ cursor = connection.cursor()
+ sqlQ = "SELECT ebuild_id FROM ebuilds WHERE package_id = %s AND checksum = %s"
+ cursor.execute(sqlQ, (package_id, checksum,))
entries = cursor.fetchone()
cursor.close()
- if not entries is None:
- return entries[0]
+ for i in entries:
+ ebuilds_id.append(i[0])
+ return ebuilds_id
-def add_old_ebuild(connection, package_id, old_ebuild_list):
+def del_old_build_jobs(connection, build_job_id):
cursor = connection.cursor()
- sqlQ1 = "UPDATE ebuilds SET active = 'False' WHERE ebuild_id = %s"
- sqlQ2 = "SELECT ebuild_id FROM ebuilds WHERE package_id = %s AND version = %s AND active = 'True'"
- sqlQ3 = "SELECT build_job_id FROM build_jobs WHERE ebuild_id = %s"
- sqlQ4 = 'DELETE FROM build_jobs_use WHERE build_job_id = %s'
- sqlQ5 = 'DELETE FROM build_jobs WHERE build_job_id = %s'
- for old_ebuild in old_ebuild_list:
- cursor.execute(sqlQ2, (package_id, old_ebuild[0]))
- ebuild_id_list = cursor.fetchall()
- if ebuild_id_list is not None:
- for ebuild_id in ebuild_id_list:
- cursor.execute(sqlQ3, (ebuild_id))
- build_job_id_list = cursor.fetchall()
- if build_job_id_list is not None:
- for build_job_id in build_job_id_list:
- cursor.execute(sqlQ4, (build_job_id))
- cursor.execute(sqlQ5, (build_job_id))
- cursor.execute(sqlQ1, (ebuild_id,))
+ sqlQ1 = 'DELETE FROM build_jobs_use WHERE build_job_id = %s'
+ sqlQ2 = 'DELETE FROM build_jobs_redo WHERE build_job_id = %s'
+ sqlQ3 = 'DELETE FROM build_jobs_emerge_options WHERE build_job_id = %s'
+ sqlQ4 = 'DELETE FROM build_jobs WHERE build_job_id = %s'
+ cursor.execute(sqlQ1, (build_job_id,))
+ cursor.execute(sqlQ2, (build_job_id,))
+ cursor.execute(sqlQ3, (build_job_id,))
+ cursor.execute(sqlQ4, (build_job_id,))
connection.commit()
cursor.close()
-def update_active_ebuild_to_fales(connection, package_id, ebuild_version_tree):
+def add_old_ebuild(connection, package_id, old_ebuild_list):
cursor = connection.cursor()
- sqlQ ="UPDATE ebuilds SET active = 'False' WHERE package_id = %s AND version = %s AND active = 'True'"
- cursor.execute(sqlQ, (package_id, ebuild_version_tree))
+ sqlQ1 = "UPDATE ebuilds SET active = 'False' WHERE ebuild_id = %s"
+ sqlQ3 = "SELECT build_job_id FROM build_jobs WHERE ebuild_id = %s"
+ for ebuild_id in old_ebuild_list:
+ cursor.execute(sqlQ3, (ebuild_id))
+ build_job_id_list = cursor.fetchall()
+ if build_job_id_list is not None:
+ for build_job_id in build_job_id_list:
+ del_old_build_jobs(connection, build_job_id[0])
+ cursor.execute(sqlQ1, (ebuild_id,))
connection.commit()
cursor.close()
@@ -384,19 +407,6 @@ def get_build_jobs_id_list_config(connection, config_id):
build_log_id_list = None
return build_jobs_id_list
-def del_old_build_jobs(connection, build_job_id):
- cursor = connection.cursor()
- sqlQ1 = 'DELETE FROM build_jobs_use WHERE build_job_id = %s'
- sqlQ2 = 'DELETE FROM build_jobs_redo WHERE build_job_id = %s'
- sqlQ3 = 'DELETE FROM build_jobs WHERE build_job_id = %s'
- sqlQ4 = 'DELETE FROM build_jobs_emerge_options WHERE build_job_id = %s'
- cursor.execute(sqlQ1, (build_job_id,))
- cursor.execute(sqlQ2, (build_job_id,))
- cursor.execute(sqlQ4, (build_job_id,))
- cursor.execute(sqlQ3, (build_job_id,))
- connection.commit()
- cursor.close()
-
def get_profile_checksum(connection, config_id):
cursor = connection.cursor()
sqlQ = "SELECT checksum FROM configs_metadata WHERE active = 'True' AND config_id = %s AND auto = 'True'"
diff --git a/gobs/pym/package.py b/gobs/pym/package.py
index f424dde..9354811 100644
--- a/gobs/pym/package.py
+++ b/gobs/pym/package.py
@@ -7,7 +7,7 @@ from gobs.text import get_ebuild_cvs_revision
from gobs.flags import gobs_use_flags
from gobs.mysql_querys import get_config, get_config_id, add_gobs_logs, get_default_config, \
add_new_build_job, get_config_id_list, update_manifest_sql, add_new_manifest_sql, \
- add_new_ebuild_sql, update_active_ebuild_to_fales, add_old_ebuild, \
+ add_new_ebuild_sql, get_ebuild_id_db, add_old_ebuild, get_ebuild_id_list, \
get_ebuild_checksum, get_manifest_db, get_cp_repo_from_package_id
from gobs.readconf import get_conf_settings
reader=get_conf_settings()
@@ -129,7 +129,6 @@ class gobs_package(object):
return attDict
def add_new_build_job_db(self, ebuild_id_list, packageDict, config_cpv_listDict):
- conn=CM.getConnection()
# Get the needed info from packageDict and config_cpv_listDict and put that in buildqueue
# Only add it if ebuild_version in packageDict and config_cpv_listDict match
if config_cpv_listDict is not None:
@@ -178,72 +177,83 @@ class gobs_package(object):
package_metadataDict[package] = attDict
return package_metadataDict
- def add_new_package_db(self, categories, package, repo):
+ def add_package(self, packageDict, package_id, new_ebuild_id_list, old_ebuild_list, manifest_checksum_tree):
+ # Use packageDict to update the db
+ ebuild_id_list = add_new_ebuild_sql(self._conn, package_id, packageDict)
+
+ # Make old ebuilds unactive
+ for ebuild_id in ebuild_id_list:
+ new_ebuild_id_list.append(ebuild_id)
+ for ebuild_id in get_ebuild_id_list(self._conn, package_id):
+ if not ebuild_id in new_ebuild_id_list:
+ if not ebuild_id in old_ebuild_id_list:
+ old_ebuild_id_list.append(ebuild_id)
+ if not old_ebuild_id_list == []:
+ add_old_ebuild(self._conn, package_id, old_ebuild_id_list)
+
+ # update the cp manifest checksum
+ update_manifest_sql(self._conn, package_id, manifest_checksum_tree)
+
+ # Get the best cpv for the configs and add it to config_cpv_listDict
+ configs_id_list = get_config_id_list(self._conn)
+ config_cpv_listDict = self.config_match_ebuild(cp, configs_id_list)
+
+ # Add the ebuild to the build jobs table if needed
+ self.add_new_build_job_db(ebuild_id_list, packageDict, config_cpv_listDict)
+
+ def add_new_package_db(self, cp, repo):
# Add new categories package ebuild to tables package and ebuilds
# C = Checking
# N = New Package
- log_msg = "C %s/%s:%s" % (categories, package, repo)
+ log_msg = "C %s:%s" % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- log_msg = "N %s/%s:%s" % (categories, package, repo)
+ log_msg = "N %s:%s" % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- pkgdir = self._myportdb.getRepositoryPath(repo) + "/" + categories + "/" + package # Get RepoDIR + cp
+ repodir = self._myportdb.getRepositoryPath(repo)
+ pkgdir = repodir + "/" + cp # Get RepoDIR + cp
# Get the cp manifest file checksum.
try:
manifest_checksum_tree = portage.checksum.sha256hash(pkgdir + "/Manifest")[0]
except:
manifest_checksum_tree = "0"
- log_msg = "QA: Can't checksum the Manifest file. %s/%s:%s" % (categories, package, repo,)
+ log_msg = "QA: Can't checksum the Manifest file. :%s" % (cp, repo,)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- log_msg = "C %s/%s:%s ... Fail." % (categories, package, repo)
+ log_msg = "C %s:%s ... Fail." % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
return
- package_id = add_new_manifest_sql(self._conn, categories, package, repo, manifest_checksum_tree)
+ package_id = add_new_manifest_sql(self._conn, cp, repo)
# Get the ebuild list for cp
mytree = []
- mytree.append(self._myportdb.getRepositoryPath(repo))
- ebuild_list_tree = self._myportdb.cp_list((categories + "/" + package), use_cache=1, mytree=mytree)
+ mytree.append(repodir)
+ ebuild_list_tree = self._myportdb.cp_list((cp, use_cache=1, mytree=mytree)
if ebuild_list_tree == []:
- log_msg = "QA: Can't get the ebuilds list. %s/%s:%s" % (categories, package, repo,)
+ log_msg = "QA: Can't get the ebuilds list. %s:%s" % (cp, repo,)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- log_msg = "C %s/%s:%s ... Fail." % (categories, package, repo)
+ log_msg = "C %s:%s ... Fail." % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
return
- # set config to default config
- default_config = get_default_config(self._conn)
-
# Make the needed packageDict with ebuild infos so we can add it later to the db.
packageDict ={}
- ebuild_id_list = []
+ new_ebuild_id_list = []
+ old_ebuild_id_list = []
for cpv in sorted(ebuild_list_tree):
packageDict[cpv] = self.get_packageDict(pkgdir, cpv, repo)
- # Add new ebuilds to the db
- ebuild_id_list = add_new_ebuild_sql(self._conn, package_id, packageDict)
-
- # Get the best cpv for the configs and add it to config_cpv_listDict
- configs_id_list = get_config_id_list(self._conn)
- config_cpv_listDict = self.config_match_ebuild(categories + "/" + package, configs_id_list)
-
- # Add the ebuild to the buildquery table if needed
- self.add_new_build_job_db(ebuild_id_list, packageDict, config_cpv_listDict)
-
- log_msg = "C %s/%s:%s ... Done." % (categories, package, repo)
+ self.add_package(packageDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree)
+ log_msg = "C %s:%s ... Done." % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- print(categories, package, repo)
- CM.putConnection(conn)
def update_package_db(self, package_id):
# Update the categories and package with new info
# C = Checking
cp, repo = get_cp_repo_from_package_id(self._conn, package_id)
- element = cp.split('/')
- package = element[1]
log_msg = "C %s:%s" % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
- pkgdir = self._myportdb.getRepositoryPath(repo) + "/" + cp # Get RepoDIR + cp
+ repodir = self._myportdb.getRepositoryPath(repo)
+ pkgdir = repodir + "/" + cp # Get RepoDIR + cp
# Get the cp mainfest file checksum
try:
@@ -265,7 +275,7 @@ class gobs_package(object):
# Get the ebuild list for cp
mytree = []
- mytree.append(self._myportdb.getRepositoryPath(repo))
+ mytree.append(repodir)
ebuild_list_tree = self._myportdb.cp_list(cp, use_cache=1, mytree=mytree)
if ebuild_list_tree == []:
log_msg = "QA: Can't get the ebuilds list. %s:%s" % (cp, repo,)
@@ -274,8 +284,9 @@ class gobs_package(object):
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
return
packageDict ={}
+ new_ebuild_id_list = []
+ old_ebuild_id_list = []
for cpv in sorted(ebuild_list_tree):
- old_ebuild_list = []
# split out ebuild version
ebuild_version_tree = portage.versions.cpv_getversion(cpv)
@@ -285,7 +296,20 @@ class gobs_package(object):
# Get the checksum of the ebuild in tree and db
ebuild_version_checksum_tree = packageDict[cpv]['ebuild_version_checksum_tree']
- ebuild_version_manifest_checksum_db = get_ebuild_checksum(self._conn, package_id, ebuild_version_tree)
+ checksums_db = get_ebuild_checksum(self._conn, package_id, ebuild_version_tree)
+ # check if we have dupes of the checksum from db
+ if checksums_db is None:
+ ebuild_version_manifest_checksum_db = None
+ elif len(checksums_db) >= 2:
+ for checksum in checksums_db:
+ ebuilds_id = get_ebuild_id_db(self._conn, checksum, package_id)
+ log_msg = "U %s:%s:%s Dups of checksums" % (cpv, repo, ebuilds_id,)
+ add_gobs_logs(self._conn, log_msg, "error", self._config_id)
+ log_msg = "C %s:%s ... Fail." % (cp, repo)
+ add_gobs_logs(self._conn, log_msg, "error", self._config_id)
+ return
+ else:
+ ebuild_version_manifest_checksum_db = checksums_db[0]
# Check if the checksum have change
if ebuild_version_manifest_checksum_db is None:
@@ -296,28 +320,12 @@ class gobs_package(object):
# U = Updated ebuild
log_msg = "U %s:%s" % (cpv, repo,)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
-
- # Fix so we can use add_new_ebuild_sql() to update the ebuilds
- old_ebuild_list.append(ebuild_version_tree)
else:
- # Remove cpv from packageDict
+ # Remove cpv from packageDict and add ebuild to new ebuils list
del packageDict[cpv]
+ new_ebuild_id_list.append(get_ebuild_id_db(self._conn, ebuild_version_checksum_tree, package_id)[0])
- # Make old ebuilds unactive
- add_old_ebuild(self._conn, package_id, old_ebuild_list)
-
- # Use packageDict and to update the db
- ebuild_id_list = add_new_ebuild_sql(self._conn, package_id, packageDict)
-
- # update the cp manifest checksum
- update_manifest_sql(self._conn, package_id, manifest_checksum_tree)
-
- # Get the best cpv for the configs and add it to config_cpv_listDict
- configs_id_list = get_config_id_list(self._conn)
- config_cpv_listDict = self.config_match_ebuild(cp, configs_id_list)
-
- # Add the ebuild to the buildqueru table if needed
- self.add_new_build_job_db(ebuild_id_list, packageDict, config_cpv_listDict)
+ self.add_package(packageDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree)
log_msg = "C %s:%s ... Done." % (cp, repo)
add_gobs_logs(self._conn, log_msg, "info", self._config_id)
diff --git a/gobs/pym/updatedb.py b/gobs/pym/updatedb.py
index 56d0894..cbf0dbc 100644
--- a/gobs/pym/updatedb.py
+++ b/gobs/pym/updatedb.py
@@ -27,8 +27,7 @@ def init_portage_settings(conn, config_id):
add_gobs_logs(conn, log_msg, "info", config_id)
# Get default config from the configs table and default_config=1
- hostname, config = get_default_config(conn) # HostConfigDir = table configs id
- host_config = hostname +"/" + config
+ host_config = _hostname +"/" + _config
default_config_root = "/var/cache/gobs/" + gobs_settings_dict['gobs_gitreponame'] + "/" + host_config + "/"
# Set config_root (PORTAGE_CONFIGROOT) to default_config_root
@@ -38,32 +37,29 @@ def init_portage_settings(conn, config_id):
return mysettings
def update_cpv_db_pool(mysettings, myportdb, cp, repo):
- CM2=connectionManager()
- conn2 = CM2.newConnection()
- if not conn2.is_connected() is True:
- conn2.reconnect(attempts=2, delay=1)
- init_package = gobs_package(conn2, mysettings, myportdb)
+ CM = connectionManager()
+ conn = CM.newConnection()
+ init_package = gobs_package(conn, mysettings, myportdb)
# split the cp to categories and package
element = cp.split('/')
categories = element[0]
package = element[1]
# update the categories table
- update_categories_db(conn2, categories)
+ update_categories_db(conn, categories)
- # Check if we don't have the cp in the package table
- package_id = get_package_id(conn2, categories, package, repo)
+ # Check if we have the cp in the package table
+ package_id = get_package_id(conn, categories, package, repo)
if package_id is None:
# Add new package with ebuilds
- init_package.add_new_package_db(categories, package, repo)
+ init_package.add_new_package_db(cp, repo)
- # Ceck if we have the cp in the package table
- elif package_id is not None:
+ else:
# Update the packages with ebuilds
init_package.update_package_db(package_id)
- conn2.close
+ conn.close()
def update_cpv_db(conn, config_id):
mysettings = init_portage_settings(conn, config_id)
@@ -80,12 +76,12 @@ def update_cpv_db(conn, config_id):
pool = multiprocessing.Pool(processes=pool_cores)
# Will run some update checks and update package if needed
- # Get categories/package list from portage and repos
+
# Get the repos and update the repos db
repo_list = myportdb.getRepositories()
update_repo_db(conn, repo_list)
- # close the db for the multiprocessing pool will make new ones
+ # Close the db for the multiprocessing pool will make new ones
# and we don't need this one for some time.
conn.close()
next reply other threads:[~2013-01-26 22:23 UTC|newest]
Thread overview: 174+ messages / expand[flat|nested] mbox.gz Atom feed top
2013-01-26 22:23 Magnus Granberg [this message]
-- strict thread matches above, loose matches on Subject: below --
2013-04-25 0:34 [gentoo-commits] dev/zorry:master commit in: gobs/pym/ Magnus Granberg
2013-04-24 0:37 Magnus Granberg
2013-04-24 0:11 Magnus Granberg
2013-03-22 19:05 Magnus Granberg
2013-01-27 12:03 Magnus Granberg
2013-01-22 21:06 Magnus Granberg
2013-01-22 20:59 Magnus Granberg
2013-01-22 20:56 Magnus Granberg
2012-12-29 12:12 Magnus Granberg
2012-12-27 23:52 Magnus Granberg
2012-12-27 23:09 Magnus Granberg
2012-12-22 11:45 Magnus Granberg
2012-12-21 23:50 Magnus Granberg
2012-12-21 23:31 Magnus Granberg
2012-12-21 23:23 Magnus Granberg
2012-12-21 20:41 Magnus Granberg
2012-12-21 20:31 Magnus Granberg
2012-12-21 17:33 Magnus Granberg
2012-12-21 2:24 Magnus Granberg
2012-12-21 2:11 Magnus Granberg
2012-12-21 1:50 Magnus Granberg
2012-12-21 1:49 Magnus Granberg
2012-12-21 1:44 Magnus Granberg
2012-12-19 2:17 Magnus Granberg
2012-12-17 1:18 Magnus Granberg
2012-12-17 0:33 Magnus Granberg
2012-12-16 20:50 Magnus Granberg
2012-12-16 20:45 Magnus Granberg
2012-12-15 16:14 Magnus Granberg
2012-12-15 0:31 Magnus Granberg
2012-12-14 14:17 Magnus Granberg
2012-12-13 22:57 Magnus Granberg
2012-12-13 15:18 Magnus Granberg
2012-12-13 15:15 Magnus Granberg
2012-12-13 15:09 Magnus Granberg
2012-12-12 0:29 Magnus Granberg
2012-12-12 0:14 Magnus Granberg
2012-12-12 0:11 Magnus Granberg
2012-12-12 0:09 Magnus Granberg
2012-12-12 0:04 Magnus Granberg
2012-12-12 0:00 Magnus Granberg
2012-12-11 23:52 Magnus Granberg
2012-12-11 23:48 Magnus Granberg
2012-12-11 23:38 Magnus Granberg
2012-12-07 14:58 Magnus Granberg
2012-12-07 14:33 Magnus Granberg
2012-12-07 14:29 Magnus Granberg
2012-12-07 14:22 Magnus Granberg
2012-12-07 0:07 Magnus Granberg
2012-12-07 0:02 Magnus Granberg
2012-12-06 23:56 Magnus Granberg
2012-12-06 23:52 Magnus Granberg
2012-12-06 2:51 Magnus Granberg
2012-12-06 2:41 Magnus Granberg
2012-12-06 2:34 Magnus Granberg
2012-12-06 2:22 Magnus Granberg
2012-12-06 2:18 Magnus Granberg
2012-12-06 0:11 Magnus Granberg
2012-12-06 0:08 Magnus Granberg
2012-12-06 0:04 Magnus Granberg
2012-12-02 11:53 Magnus Granberg
2012-12-02 11:49 Magnus Granberg
2012-12-02 0:06 Magnus Granberg
2012-12-02 0:05 Magnus Granberg
2012-12-01 23:58 Magnus Granberg
2012-12-01 23:35 Magnus Granberg
2012-12-01 23:33 Magnus Granberg
2012-12-01 23:28 Magnus Granberg
2012-12-01 23:24 Magnus Granberg
2012-12-01 23:12 Magnus Granberg
2012-12-01 23:03 Magnus Granberg
2012-12-01 22:58 Magnus Granberg
2012-12-01 11:31 Magnus Granberg
2012-12-01 11:26 Magnus Granberg
2012-07-18 0:10 Magnus Granberg
2012-07-17 15:02 Magnus Granberg
2012-07-17 13:00 Magnus Granberg
2012-07-17 1:07 Magnus Granberg
2012-07-17 0:38 Magnus Granberg
2012-07-17 0:18 Magnus Granberg
2012-06-27 15:26 Magnus Granberg
2012-06-27 15:15 Magnus Granberg
2012-06-27 14:57 Magnus Granberg
2012-06-27 14:43 Magnus Granberg
2012-06-27 14:39 Magnus Granberg
2012-06-27 14:24 Magnus Granberg
2012-06-27 14:19 Magnus Granberg
2012-06-27 14:14 Magnus Granberg
2012-06-27 14:11 Magnus Granberg
2012-06-27 14:07 Magnus Granberg
2012-06-04 23:45 Magnus Granberg
2012-06-03 22:18 Magnus Granberg
2012-05-25 0:15 Magnus Granberg
2012-05-20 14:33 Magnus Granberg
2012-05-20 14:29 Magnus Granberg
2012-05-09 23:12 Magnus Granberg
2012-05-07 23:44 Magnus Granberg
2012-05-07 23:39 Magnus Granberg
2012-05-07 23:35 Magnus Granberg
2012-05-07 23:31 Magnus Granberg
2012-05-07 23:25 Magnus Granberg
2012-05-06 10:47 Magnus Granberg
2012-05-02 14:33 Magnus Granberg
2012-05-01 10:00 Magnus Granberg
2012-05-01 0:15 Magnus Granberg
2012-05-01 0:02 Magnus Granberg
2012-04-30 16:45 Magnus Granberg
2012-04-30 14:33 Magnus Granberg
2012-04-30 14:17 Magnus Granberg
2012-04-30 14:15 Magnus Granberg
2012-04-30 13:13 Magnus Granberg
2012-04-30 13:12 Magnus Granberg
2012-04-29 15:56 Magnus Granberg
2012-04-29 13:24 Magnus Granberg
2012-04-29 13:17 Magnus Granberg
2012-04-28 19:29 Magnus Granberg
2012-04-28 17:24 Magnus Granberg
2012-04-28 17:03 Magnus Granberg
2012-04-28 16:09 Magnus Granberg
2012-04-28 16:07 Magnus Granberg
2012-04-28 16:05 Magnus Granberg
2012-04-28 14:29 Magnus Granberg
2012-04-28 14:20 Magnus Granberg
2012-04-28 14:01 Magnus Granberg
2012-04-28 12:37 Magnus Granberg
2012-04-28 1:53 Magnus Granberg
2012-04-28 1:25 Magnus Granberg
2012-04-28 0:51 Magnus Granberg
2012-04-27 21:03 Magnus Granberg
2012-04-27 20:42 Magnus Granberg
2012-04-27 20:33 Magnus Granberg
2012-04-27 18:27 Magnus Granberg
2012-04-27 18:23 Magnus Granberg
2011-10-31 21:32 Magnus Granberg
2011-10-29 22:48 Magnus Granberg
2011-10-29 22:38 Magnus Granberg
2011-10-29 22:28 Magnus Granberg
2011-10-29 22:24 Magnus Granberg
2011-10-29 0:21 Magnus Granberg
2011-10-29 0:19 Magnus Granberg
2011-10-19 21:31 Magnus Granberg
2011-10-19 21:28 Magnus Granberg
2011-10-19 20:20 Magnus Granberg
2011-10-13 10:41 Magnus Granberg
2011-10-12 10:33 Magnus Granberg
2011-10-12 10:26 Magnus Granberg
2011-10-11 23:51 Magnus Granberg
2011-10-11 23:32 Magnus Granberg
2011-10-11 11:20 Magnus Granberg
2011-10-10 23:57 Magnus Granberg
2011-10-10 23:49 Magnus Granberg
2011-10-10 23:46 Magnus Granberg
2011-10-10 23:43 Magnus Granberg
2011-10-10 23:30 Magnus Granberg
2011-10-09 21:49 Magnus Granberg
2011-09-30 13:38 Magnus Granberg
2011-09-30 13:33 Magnus Granberg
2011-09-30 13:17 Magnus Granberg
2011-09-28 1:41 Magnus Granberg
2011-09-28 1:39 Magnus Granberg
2011-09-28 1:04 Magnus Granberg
2011-09-27 23:43 Magnus Granberg
2011-09-27 11:05 Magnus Granberg
2011-09-13 23:06 Magnus Granberg
2011-09-13 1:02 Magnus Granberg
2011-09-01 23:34 Magnus Granberg
2011-08-31 23:31 Magnus Granberg
2011-08-31 2:05 Magnus Granberg
2011-08-30 23:41 Magnus Granberg
2011-07-29 15:31 Magnus Granberg
2011-04-24 22:21 Magnus Granberg
2011-04-23 15:26 Magnus Granberg
2011-04-23 14:28 Magnus Granberg
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=1359242575.6d621d7d07dc3fd9a984fb36091d8603dee5d8a5.zorry@gentoo \
--to=zorry@gentoo.org \
--cc=gentoo-commits@lists.gentoo.org \
--cc=gentoo-dev@lists.gentoo.org \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox