* [gentoo-commits] proj/g-sorcery:master commit in: /, g_sorcery/, gs_ctan/, g_elpa/
@ 2013-07-22 0:56 Jauhien Piatlicki
0 siblings, 0 replies; only message in thread
From: Jauhien Piatlicki @ 2013-07-22 0:56 UTC (permalink / raw
To: gentoo-commits
commit: fb47987d9780bf39506ca9e15a3160eaed567d46
Author: Jauhien Piatlicki (jauhien) <piatlicki <AT> gmail <DOT> com>
AuthorDate: Mon Jul 22 00:56:10 2013 +0000
Commit: Jauhien Piatlicki <piatlicki <AT> gmail <DOT> com>
CommitDate: Mon Jul 22 00:56:10 2013 +0000
URL: http://git.overlays.gentoo.org/gitweb/?p=proj/g-sorcery.git;a=commit;h=fb47987d
PackadeDB splitted into PackageDB and DBGenerator, more than one repository for overlay allowed
---
g_elpa/backend.py | 6 +-
g_elpa/elpa_db.py | 26 ++---
g_sorcery/backend.py | 93 ++++++++--------
g_sorcery/package_db.py | 262 +++++++++++++++++++++------------------------
gs_ctan/backend.py | 6 +-
gs_ctan/ctan_db.py | 46 ++------
layman-git-g-sorcery.patch | 2 +-
7 files changed, 203 insertions(+), 238 deletions(-)
diff --git a/g_elpa/backend.py b/g_elpa/backend.py
index b77ed2e..cad82c9 100644
--- a/g_elpa/backend.py
+++ b/g_elpa/backend.py
@@ -18,7 +18,7 @@ from g_sorcery.metadata import MetadataGenerator
from g_sorcery.eclass import EclassGenerator
from g_sorcery.fileutils import get_pkgpath
-from .elpa_db import ElpaDB
+from .elpa_db import ElpaDBGenerator
from .ebuild import ElpaEbuildWithDigestGenerator, ElpaEbuildWithoutDigestGenerator
@@ -27,6 +27,6 @@ class ElpaEclassGenerator(EclassGenerator):
super(ElpaEclassGenerator, self).__init__(os.path.join(get_pkgpath(__file__), 'data'))
-instance = Backend(ElpaDB,
+instance = Backend(ElpaDBGenerator,
ElpaEbuildWithDigestGenerator, ElpaEbuildWithoutDigestGenerator,
- ElpaEclassGenerator, MetadataGenerator, sync_db=False)
+ ElpaEclassGenerator, MetadataGenerator)
diff --git a/g_elpa/elpa_db.py b/g_elpa/elpa_db.py
index 7628502..0196b89 100644
--- a/g_elpa/elpa_db.py
+++ b/g_elpa/elpa_db.py
@@ -23,26 +23,22 @@ else:
from urllib.parse import urljoin
from g_sorcery.g_collections import Dependency, Package, serializable_elist
-from g_sorcery.package_db import PackageDB
-from g_sorcery.fileutils import load_remote_file
+from g_sorcery.package_db import DBGenerator
from g_sorcery.exceptions import SyncError
-class ElpaDB(PackageDB):
- def __init__(self, directory, config = None, common_config = None):
- super(ElpaDB, self).__init__(directory, config, common_config)
-
- def get_download_uries(self):
- ac_uri = urljoin(self.repo_uri, 'archive-contents')
+class ElpaDBGenerator(DBGenerator):
+ def get_download_uries(self, common_config, config):
+ ac_uri = urljoin(config["repo_uri"], 'archive-contents')
return [{"uri" : ac_uri, "parser" : sexpdata.load}]
- def process_data(self, data):
-
+ def process_data(self, pkg_db, data, common_config, config):
archive_contents = data['archive-contents']
+ repo_uri = config["repo_uri"]
if sexpdata.car(archive_contents) != 1:
- raise SyncError('sync failed: ' + self.repo_uri + ' bad archive contents format')
+ raise SyncError('sync failed: ' + repo_uri + ' bad archive contents format')
- self.add_category('app-emacs')
+ pkg_db.add_category('app-emacs')
PKG_INFO = 2
PKG_NAME = 0
@@ -79,8 +75,8 @@ class ElpaDB(PackageDB):
'dependencies' : dependencies,
'depend' : depend,
'rdepend' : depend,
- 'homepage' : self.repo_uri,
- 'repo_uri' : self.repo_uri,
+ 'homepage' : repo_uri,
+ 'repo_uri' : repo_uri,
'realname' : realname,
#eclass entry
'eclasses' : ['g-elpa'],
@@ -89,7 +85,7 @@ class ElpaDB(PackageDB):
'name' : 'Jauhien Piatlicki'}],
'longdescription' : description
}
- self.add_package(pkg, properties)
+ pkg_db.add_package(pkg, properties)
def _s_get_package(self, name, version):
diff --git a/g_sorcery/backend.py b/g_sorcery/backend.py
index 9dae652..cd32c9c 100644
--- a/g_sorcery/backend.py
+++ b/g_sorcery/backend.py
@@ -24,20 +24,21 @@ else:
import configparser
from .g_collections import Package
-from .fileutils import fast_manifest
+from .fileutils import fast_manifest, FileJSON
from .exceptions import DependencyError, DigestError
from .logger import Logger
from .mangler import package_managers
+from .package_db import PackageDB
class Backend(object):
"""
Backend for a repository.
Command format is as follows:
- g-backend [-o overlay_dir] command
+ g-backend [-o overlay_dir] [-r repository] command
where command is one of the following:
- sync [-u url] [-r repository]
+ sync
list
search word
generate package_name
@@ -47,12 +48,12 @@ class Backend(object):
If no overlay directory is given the default one from backend config is used.
"""
- def __init__(self, package_db_class,
+ def __init__(self, package_db_generator_class,
ebuild_g_with_digest_class, ebuild_g_without_digest_class,
- eclass_g_class, metadata_g_class, sync_db=True):
- self.db_dir = '.db'
+ eclass_g_class, metadata_g_class, package_db_class=PackageDB, sync_db=False):
+ self.sorcery_dir = '.g-sorcery'
self.sync_db = sync_db
- self.package_db_class = package_db_class
+ self.package_db_generator = package_db_generator_class(package_db_class)
self.ebuild_g_with_digest_class = ebuild_g_with_digest_class
self.ebuild_g_without_digest_class = ebuild_g_without_digest_class
self.eclass_g_class = eclass_g_class
@@ -60,12 +61,11 @@ class Backend(object):
self.parser = argparse.ArgumentParser(description='Automatic ebuild generator.')
self.parser.add_argument('-o', '--overlay')
+ self.parser.add_argument('-r', '--repository')
subparsers = self.parser.add_subparsers()
p_sync = subparsers.add_parser('sync')
- p_sync.add_argument('-u', '--url')
- p_sync.add_argument('-r', '--repository')
p_sync.set_defaults(func=self.sync)
p_list = subparsers.add_parser('list')
@@ -99,18 +99,15 @@ class Backend(object):
def _get_package_db(self, args, config, global_config):
overlay = self._get_overlay(args, config, global_config)
- db_path = os.path.join(overlay, self.db_dir)
- if not db_path:
- return -1
- pkg_db = self.package_db_class(db_path)
+ backend_path = os.path.join(overlay, self.sorcery_dir, config["package"])
+ repository = args.repository
+ pkg_db = self.package_db_generator(backend_path, repository, generate=False)
return pkg_db
def sync(self, args, config, global_config):
overlay = self._get_overlay(args, config, global_config)
- db_path = os.path.join(overlay, self.db_dir)
- if not db_path:
- return -1
- url = args.url
+ backend_path = os.path.join(overlay, self.sorcery_dir, config["package"])
+ repository = args.repository
repository = args.repository
repository_config = {}
@@ -118,7 +115,7 @@ class Backend(object):
common_config = config["common_config"]
else:
common_config = {}
-
+
if repository:
if not "repositories" in config:
self.logger.error("repository " + repository +
@@ -130,34 +127,15 @@ class Backend(object):
return -1
repository_config = repositories[repository]
else:
- if url:
- repository_config = {"repo_uri" : url, "db_uri" : url}
- else:
- self.logger.error('no url given\n')
+ self.logger.error('no repository given\n')
+ return -1
if self.sync_db:
- pkg_db = self.package_db_class(db_path, repository_config, common_config)
- if not pkg_db.db_uri:
- self.logger.error('no url given\n')
- return -1
+ pkg_db = self.package_db_generator(backend_path, repository,
+ common_config, repository_config, generate=False)
+ pkg_db.sync(repository_config["db_uri"])
else:
- pkg_db = self.package_db_class(db_path, repository_config, common_config)
- if not pkg_db.repo_uri:
- self.logger.error('no url given\n')
- return -1
-
- if self.sync_db:
- try:
- pkg_db.sync()
- except Exception as e:
- self.logger.error('sync failed: ' + str(e) + '\n')
- return -1
- else:
- try:
- pkg_db.generate()
- except Exception as e:
- self.logger.error('sync failed: ' + str(e) + '\n')
- return -1
+ pkg_db = self.package_db_generator(backend_path, repository, common_config, repository_config)
return 0
def list(self, args, config, global_config):
@@ -396,4 +374,33 @@ class Backend(object):
def __call__(self, args, config, global_config):
args = self.parser.parse_args(args)
+ info_f = FileJSON(os.path.join(args.overlay, self.sorcery_dir), "info.json", ["repositories"])
+ self.info = info_f.read()
+ repos = self.info["repositories"]
+ if args.repository:
+ if not repos:
+ repos = {}
+ back = config["package"]
+ if back in repos:
+ brepos = set(repos[back])
+ else:
+ brepos = set()
+ brepos.add(args.repository)
+ repos[back] = list(brepos)
+ self.info["repositories"] = repos
+ info_f.write(self.info)
+ else:
+ back = config["package"]
+ if back in repos:
+ brepos = repos[back]
+ if len(brepos) == 1:
+ args.repository = brepos[0]
+ else:
+ self.logger.error("No repository specified, possible values:")
+ for repo in brepos:
+ print(" " + repo)
+ return -1
+ else:
+ self.logger.error("No repository for backend " + back + " in overlay " + args.overlay)
+ return -1
return args.func(args, config, global_config)
diff --git a/g_sorcery/package_db.py b/g_sorcery/package_db.py
index 0becf56..8d087c8 100644
--- a/g_sorcery/package_db.py
+++ b/g_sorcery/package_db.py
@@ -15,6 +15,7 @@ import glob
import hashlib
import os
import shutil
+import sys
import tarfile
import portage
@@ -23,10 +24,9 @@ from .compatibility import basestring, py2k, TemporaryDirectory
from .exceptions import DBStructureError, IntegrityError, \
InvalidKeyError, SyncError
-
from .fileutils import FileJSON, hash_file, load_remote_file, copy_all, wget
-
from .g_collections import Package
+from .logger import Logger
class PackageDB(object):
@@ -109,69 +109,21 @@ class PackageDB(object):
return (Package(category, name, ver), ebuild_data)
- def __init__(self, directory, config = None, common_config = None):
+ def __init__(self, directory):
"""
Args:
directory: database directory.
repo_uri: Repository URI.
db_uri: URI for synchronization with remote database.
"""
- self.URI_NAME = 'uri.json'
- self.CONFIG_NAME = 'config.json'
- self.COMMON_CONFIG_NAME = 'common_config.json'
self.CATEGORIES_NAME = 'categories.json'
self.PACKAGES_NAME = 'packages.json'
self.VERSIONS_NAME = 'versions.json'
self.directory = os.path.abspath(directory)
- if config:
- self.config = config
- config_f = FileJSON(self.directory, self.CONFIG_NAME, [])
- config_f.write(self.config)
-
- self.common_config = common_config
- config_f = FileJSON(self.directory, self.COMMON_CONFIG_NAME, [])
- config_f.write(self.common_config)
- else:
- self.config = {}
- self.common_config = {}
-
- if "repo_uri" in self.config:
- repo_uri = self.config["repo_uri"]
- else:
- repo_uri = ""
-
- if "db_uri" in self.config:
- db_uri = self.config["db_uri"]
- else:
- db_uri = ""
-
- self.reset_uri(repo_uri, db_uri)
self.reset_db()
def __iter__(self):
return(PackageDB.Iterator(self))
-
- def reset_uri(self, repo_uri="", db_uri=""):
- """
- Reset URI information.
-
- Args:
- repo_uri: Repository URI.
- db_uri: URI for synchronization with remote database.
- """
- uri_f = FileJSON(self.directory, self.URI_NAME, ['repo_uri', 'db_uri'])
- uri = uri_f.read()
- if not repo_uri:
- self.repo_uri = uri['repo_uri']
- else:
- self.repo_uri = repo_uri
- if not db_uri:
- self.db_uri = uri['db_uri']
- else:
- self.db_uri = db_uri
- uri['repo_uri'] = self.repo_uri
- uri['db_uri'] = self.db_uri
- uri_f.write(uri)
def reset_db(self):
"""
@@ -180,85 +132,14 @@ class PackageDB(object):
self.database = {}
self.categories = {}
- def generate(self, repo_uri=""):
- """
- Generate new package database
-
- Args:
- repo_uri: Repository URI
- """
- if repo_uri:
- self.repo_uri = repo_uri
- self.clean()
- self.generate_tree()
- self.write()
- self.manifest()
-
- def generate_tree(self):
- """
- Generate tree
- """
- data = self.download_data()
- self.process_data(data)
-
- def parse_data(self, data_f):
- pass
-
- def process_data(data):
- pass
-
- def convert(self, dict_name, value):
- result = value
- for config in [self.common_config, self.config]:
- if config:
- if dict_name in config:
- transform = config[dict_name]
- if value in transform:
- result = transform[value]
- return result
-
- def get_download_uries(self):
- pass
-
- def decode_download_uries(self, uries):
- decoded = []
- for uri in uries:
- decuri = {}
- if isinstance(uri, basestring):
- decuri["uri"] = uri
- decuri["parser"] = self.parse_data
- decuri["open_file"] = True
- decuri["open_mode"] = "r"
- else:
- decuri = uri
- if not "parser" in decuri:
- decuri["parser"] = self.parse_data
- if not "open_file" in decuri:
- decuri["open_file"] = True
- if not "open_mode" in decuri:
- decuri["open_mode"] = "r"
- decoded.append(decuri)
- return decoded
-
- def download_data(self):
- uries = self.get_download_uries()
- uries = self.decode_download_uries(uries)
- data = {}
- for uri in uries:
- data.update(load_remote_file(**uri))
- return data
-
- def sync(self, db_uri=""):
+ def sync(self, db_uri):
"""
Synchronize local database with remote database.
Args:
db_uri: URI for synchronization with remote database.
"""
- if db_uri:
- self.db_uri = db_uri
- self.clean()
- real_db_uri = self.get_real_db_uri()
+ real_db_uri = self.get_real_db_uri(db_uri)
download_dir = TemporaryDirectory()
if wget(real_db_uri, download_dir.name):
raise SyncError('sync failed: ' + real_db_uri)
@@ -292,7 +173,7 @@ class PackageDB(object):
self.read()
- def get_real_db_uri(self):
+ def get_real_db_uri(self, db_uri):
"""
Convert self.db_uri to URI where remote database can be
fetched from.
@@ -300,7 +181,7 @@ class PackageDB(object):
Returns:
URI of remote database file.
"""
- return self.db_uri
+ return db_uri
def manifest(self):
"""
@@ -309,7 +190,7 @@ class PackageDB(object):
categories = FileJSON(self.directory, self.CATEGORIES_NAME, [])
categories = categories.read()
manifest = {}
- names = [self.CONFIG_NAME, self.COMMON_CONFIG_NAME, self.CATEGORIES_NAME, self.URI_NAME]
+ names = [self.CATEGORIES_NAME]
for name in names:
manifest[name] = hash_file(os.path.join(self.directory, name),
hashlib.md5())
@@ -338,7 +219,7 @@ class PackageDB(object):
result = True
errors = []
- names = [self.CONFIG_NAME, self.COMMON_CONFIG_NAME, self.CATEGORIES_NAME, self.URI_NAME]
+ names = [self.CATEGORIES_NAME]
for name in names:
if not name in manifest:
raise DBStructureError('Bad manifest: no ' + name + ' entry')
@@ -355,21 +236,26 @@ class PackageDB(object):
"""
Clean database.
"""
- shutil.rmtree(self.directory)
- self.reset_uri(self.repo_uri, self.db_uri)
+ if os.path.exists(self.directory):
+ shutil.rmtree(self.directory)
self.reset_db()
+ self.write()
+ self.manifest()
def write(self):
"""
Write database.
"""
- config_f = FileJSON(self.directory, self.CONFIG_NAME, [])
- common_config_f = FileJSON(self.directory, self.COMMON_CONFIG_NAME, [])
categories_f = FileJSON(self.directory, self.CATEGORIES_NAME, [])
- config_f.write(self.config)
- common_config_f.write(self.common_config)
categories_f.write(self.categories)
+ if self.database:
+ logger = Logger()
+ logger.info("writing database")
+
+ number_of_packages = len(list(self.database))
+ written_number = 0
+
for pkgname, versions in self.database.items():
category, name = pkgname.split('/')
if not category or (not category in self.categories):
@@ -391,11 +277,29 @@ class PackageDB(object):
pkgs.append(name)
f.write(pkgs)
+ chars = ['-','\\','|','/']
+ show = chars[written_number % 4]
+ percent = (written_number * 100)//number_of_packages
+ length = 20
+ progress = (percent * 20)//100
+ blank = 20 - progress
+
+ sys.stdout.write("\r %s [%s%s] %s%%" % (show, "#" * progress, " " * blank, percent))
+ sys.stdout.flush()
+ written_number += 1
+
+
+
for category in self.categories:
self.additional_write_category(category)
self.additional_write()
+ if self.database:
+ sys.stdout.write("\r %s [%s] %s%%" % ("-", "#" * 20, 100))
+ sys.stdout.flush()
+ print("")
+
def additional_write_version(self, category, package, version):
"""
Hook to be overrided.
@@ -427,11 +331,7 @@ class PackageDB(object):
sane, errors = self.check_manifest()
if not sane:
raise IntegrityError('Manifest error: ' + str(errors))
- config_f = FileJSON(self.directory, self.CONFIG_NAME, [])
- common_config_f = FileJSON(self.directory, self.COMMON_CONFIG_NAME, [])
categories_f = FileJSON(self.directory, self.CATEGORIES_NAME, [])
- self.config = config_f.read()
- self.common_config = common_config_f.read()
self.categories = categories_f.read()
for category in self.categories:
category_path = os.path.join(self.directory, category)
@@ -630,3 +530,89 @@ class PackageDB(object):
portage.pkgsplit(pkgname + '-' + max_ver)) > 0:
max_ver = version
return max_ver
+
+
+class DBGenerator(object):
+ """
+ Generator for package databases.
+ Creates new databases or syncs with existing.
+ """
+
+ __slots__ = ('package_db_class')
+
+ def __init__(self, package_db_class=PackageDB):
+ self.package_db_class = package_db_class
+
+ def __call__(self, directory, repository, common_config=None, config=None, generate=True):
+ db_path = os.path.join(directory, repository, "db")
+ pkg_db = self.package_db_class(db_path)
+
+ config_f = FileJSON(os.path.join(directory, repository), "config.json", [])
+ if config:
+ config_f.write(config)
+ else:
+ config = config_f.read()
+
+ common_config_f = FileJSON(directory, "config.json", [])
+ if common_config:
+ common_config_f.write(common_config)
+ else:
+ common_config = common_config_f.read()
+
+ if generate:
+ pkg_db.clean()
+ self.generate_tree(pkg_db, common_config, config)
+ pkg_db.write() #todo: make db write on every add_package and only necessary info
+ pkg_db.manifest()
+ return pkg_db
+
+ def generate_tree(self, pkg_db, common_config, config):
+ data = self.download_data(common_config, config)
+ self.process_data(pkg_db, data, common_config, config)
+
+ def download_data(self, common_config, config):
+ uries = self.get_download_uries(common_config, config)
+ uries = self.decode_download_uries(uries)
+ data = {}
+ for uri in uries:
+ data.update(load_remote_file(**uri))
+ return data
+
+ def get_download_uries(self, common_config, config):
+ pass
+
+ def decode_download_uries(self, uries):
+ decoded = []
+ for uri in uries:
+ decuri = {}
+ if isinstance(uri, basestring):
+ decuri["uri"] = uri
+ decuri["parser"] = self.parse_data
+ decuri["open_file"] = True
+ decuri["open_mode"] = "r"
+ else:
+ decuri = uri
+ if not "parser" in decuri:
+ decuri["parser"] = self.parse_data
+ if not "open_file" in decuri:
+ decuri["open_file"] = True
+ if not "open_mode" in decuri:
+ decuri["open_mode"] = "r"
+ decoded.append(decuri)
+ return decoded
+
+ def parse_data(self):
+ pass #todo: raise no implemeted or add some reasonable default
+
+ def process_data(self):
+ pass
+
+ def convert(self, configs, dict_name, value):
+ result = value
+ for config in configs:
+ if config:
+ if dict_name in config:
+ transform = config[dict_name]
+ if value in transform:
+ result = transform[value]
+ return result
diff --git a/gs_ctan/backend.py b/gs_ctan/backend.py
index fe1f7a3..70f2e05 100644
--- a/gs_ctan/backend.py
+++ b/gs_ctan/backend.py
@@ -19,7 +19,7 @@ from g_sorcery.ebuild import EbuildGenerator
from g_sorcery.eclass import EclassGenerator
from g_sorcery.fileutils import get_pkgpath
-from .ctan_db import CtanDB
+from .ctan_db import CtanDBGenerator
from .ebuild import CtanEbuildWithoutDigestGenerator
@@ -28,6 +28,6 @@ class CtanEclassGenerator(EclassGenerator):
super(CtanEclassGenerator, self).__init__(os.path.join(get_pkgpath(__file__), 'data'))
-instance = Backend(CtanDB,
+instance = Backend(CtanDBGenerator,
EbuildGenerator, CtanEbuildWithoutDigestGenerator,
- CtanEclassGenerator, MetadataGenerator, sync_db=False)
+ CtanEclassGenerator, MetadataGenerator)
diff --git a/gs_ctan/ctan_db.py b/gs_ctan/ctan_db.py
index 690f7d7..0a60744 100644
--- a/gs_ctan/ctan_db.py
+++ b/gs_ctan/ctan_db.py
@@ -14,18 +14,17 @@
import itertools
import os
import re
-import sys
import portage
from g_sorcery.g_collections import Dependency, Package, serializable_elist
-from g_sorcery.logger import Logger
-from g_sorcery.package_db import PackageDB
+from g_sorcery.package_db import DBGenerator
from g_sorcery.exceptions import SyncError
+from g_sorcery.logger import Logger
-class CtanDB(PackageDB):
- def __init__(self, directory, config = None, common_config = None):
- super(CtanDB, self).__init__(directory, config, common_config)
+class CtanDBGenerator(DBGenerator):
+ def __init__(self, package_db_class):
+ super(CtanDBGenerator, self).__init__(package_db_class)
logger = Logger()
gentoo_arch = portage.settings['ARCH']
@@ -38,8 +37,8 @@ class CtanDB(PackageDB):
logger.warning("not supported arch: " + gentoo_arch)
- def get_download_uries(self):
- tlpdb_uri = self.repo_uri + "/tlpkg/texlive.tlpdb.xz"
+ def get_download_uries(self, common_config, config):
+ tlpdb_uri = config["repo_uri"] + "/tlpkg/texlive.tlpdb.xz"
return [tlpdb_uri]
def parse_data(self, data_f):
@@ -98,19 +97,16 @@ class CtanDB(PackageDB):
return result
- def process_data(self, data):
+ def process_data(self, pkg_db, data, common_config, config):
category = "dev-tex"
- self.add_category(category)
+ pkg_db.add_category(category)
ARCH_LENGTH = len("ARCH")
data = data["texlive.tlpdb"]
- self.number_of_packages = len(data)
- self.written_number = 0
-
for entry in data:
realname = entry["name"]
@@ -146,7 +142,7 @@ class CtanDB(PackageDB):
version = entry["revision"]
if "catalogue-license" in entry:
- license = self.convert("licenses", entry["catalogue-license"])
+ license = self.convert([common_config, config], "licenses", entry["catalogue-license"])
else:
license = "unknown"
@@ -188,24 +184,4 @@ class CtanDB(PackageDB):
'longdescription' : longdescription
}
- self.add_package(Package(category, realname, version), ebuild_data)
-
- logger = Logger()
- logger.info("writing database")
-
- def additional_write_version(self, category, package, version):
- chars = ['-','\\','|','/']
- show = chars[self.written_number % 4]
- percent = (self.written_number * 100)//self.number_of_packages
- length = 20
- progress = (percent * 20)//100
- blank = 20 - progress
-
- sys.stdout.write("\r %s [%s%s] %s%%" % (show, "#" * progress, " " * blank, percent))
- sys.stdout.flush()
- self.written_number += 1
-
- def additional_write_category(self, category):
- sys.stdout.write("\r %s [%s] %s%%" % ("-", "#" * 20, 100))
- sys.stdout.flush()
- print("")
+ pkg_db.add_package(Package(category, realname, version), ebuild_data)
diff --git a/layman-git-g-sorcery.patch b/layman-git-g-sorcery.patch
index 1b93043..523bfd5 100644
--- a/layman-git-g-sorcery.patch
+++ b/layman-git-g-sorcery.patch
@@ -131,7 +131,7 @@ index 0000000..6e0b200
+
+ target = path([base, self.parent.name])
+
-+ args = [self.backend, '-o', target, 'sync', '-r', self.repository]
++ args = [self.backend, '-o', target, '-r', self.repository, 'sync']
+ returncode = self.run_command(self.command(), args, cwd=target)
+ if returncode:
+ return returncode
^ permalink raw reply related [flat|nested] only message in thread
only message in thread, other threads:[~2013-07-22 0:56 UTC | newest]
Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2013-07-22 0:56 [gentoo-commits] proj/g-sorcery:master commit in: /, g_sorcery/, gs_ctan/, g_elpa/ Jauhien Piatlicki
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox