public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-commits] proj/gentoolkit:master commit in: pym/gentoolkit/, pym/gentoolkit/test/eclean/, pym/gentoolkit/ekeyword/, ...
@ 2021-09-20 22:57 Matt Turner
  0 siblings, 0 replies; only message in thread
From: Matt Turner @ 2021-09-20 22:57 UTC (permalink / raw
  To: gentoo-commits

commit:     bbcd72b5fe85fe9bbca1913f8aa22077d94e75d0
Author:     Wolfgang E. Sanyer <WolfgangESanyer <AT> gmail <DOT> com>
AuthorDate: Mon Sep 20 12:49:15 2021 +0000
Commit:     Matt Turner <mattst88 <AT> gentoo <DOT> org>
CommitDate: Mon Sep 20 22:51:52 2021 +0000
URL:        https://gitweb.gentoo.org/proj/gentoolkit.git/commit/?id=bbcd72b5

Change tabs to spaces (using autopep8). Also, format repo using black.

The following command was used to change the tabs to spaces:

autopep8 --in-place --select=E101,E11,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E133,E20,E211,E22,E224,E224,E226,E227,E228,E231,E241,E242,E251,E252,E26,E265,E266,E27,E301,E302,E303,E304,E305,E306,W291,W293,W391 -r .

And then black was run as `black .` on the entire tree

Signed-off-by: Wolfgang E. Sanyer <WolfgangESanyer <AT> gmail.com>
Signed-off-by: Matt Turner <mattst88 <AT> gentoo.org>

 bin/eclean                                 |   33 +-
 bin/eclean-dist                            |   33 +-
 bin/eclean-pkg                             |   33 +-
 bin/ekeyword                               |   28 +-
 bin/enalyze                                |   40 +-
 bin/epkginfo                               |   54 +-
 bin/equery                                 |   44 +-
 bin/eshowkw                                |    2 +-
 bin/imlate                                 |   28 +-
 bin/merge-driver-ekeyword                  |    9 +-
 bin/revdep-rebuild                         |   42 +-
 pym/gentoolkit/__init__.py                 |   10 +-
 pym/gentoolkit/atom.py                     |  626 +++++++-------
 pym/gentoolkit/base.py                     |  207 ++---
 pym/gentoolkit/cpv.py                      |  400 ++++-----
 pym/gentoolkit/dbapi.py                    |    8 +-
 pym/gentoolkit/dependencies.py             |  608 +++++++-------
 pym/gentoolkit/eclean/clean.py             |  274 +++----
 pym/gentoolkit/eclean/cli.py               | 1099 ++++++++++++++-----------
 pym/gentoolkit/eclean/exclude.py           |  483 +++++------
 pym/gentoolkit/eclean/output.py            |  354 ++++----
 pym/gentoolkit/eclean/pkgindex.py          |  148 ++--
 pym/gentoolkit/eclean/search.py            | 1122 ++++++++++++-------------
 pym/gentoolkit/ekeyword/ekeyword.py        |  825 ++++++++++---------
 pym/gentoolkit/ekeyword/test_ekeyword.py   |  638 +++++++--------
 pym/gentoolkit/enalyze/__init__.py         |  163 ++--
 pym/gentoolkit/enalyze/analyze.py          |  928 +++++++++++----------
 pym/gentoolkit/enalyze/lib.py              |  685 ++++++++--------
 pym/gentoolkit/enalyze/output.py           |  537 ++++++------
 pym/gentoolkit/enalyze/rebuild.py          |  708 ++++++++--------
 pym/gentoolkit/eprefix.py                  |    6 +-
 pym/gentoolkit/equery/__init__.py          |  556 ++++++-------
 pym/gentoolkit/equery/belongs.py           |  218 ++---
 pym/gentoolkit/equery/check.py             |  453 ++++++-----
 pym/gentoolkit/equery/depends.py           |  325 ++++----
 pym/gentoolkit/equery/depgraph.py          |  395 ++++-----
 pym/gentoolkit/equery/files.py             |  496 +++++------
 pym/gentoolkit/equery/has.py               |  326 ++++----
 pym/gentoolkit/equery/hasuse.py            |  272 ++++---
 pym/gentoolkit/equery/keywords.py          |    5 +-
 pym/gentoolkit/equery/list_.py             |  424 +++++-----
 pym/gentoolkit/equery/meta.py              |  955 +++++++++++-----------
 pym/gentoolkit/equery/size.py              |  257 +++---
 pym/gentoolkit/equery/uses.py              |  522 ++++++------
 pym/gentoolkit/equery/which.py             |  156 ++--
 pym/gentoolkit/errors.py                   |  205 ++---
 pym/gentoolkit/eshowkw/__init__.py         |  318 +++++---
 pym/gentoolkit/eshowkw/display_pretty.py   |  208 ++---
 pym/gentoolkit/eshowkw/keywords_content.py |  756 +++++++++--------
 pym/gentoolkit/eshowkw/keywords_header.py  |  273 ++++---
 pym/gentoolkit/flag.py                     |  276 +++----
 pym/gentoolkit/formatters.py               |  223 +++--
 pym/gentoolkit/helpers.py                  |  492 +++++------
 pym/gentoolkit/imlate/imlate.py            |  915 ++++++++++++---------
 pym/gentoolkit/keyword.py                  |  180 ++--
 pym/gentoolkit/metadata.py                 |  454 ++++++-----
 pym/gentoolkit/module_base.py              |  268 +++---
 pym/gentoolkit/package.py                  | 1118 ++++++++++++-------------
 pym/gentoolkit/pprinter.py                 |  253 +++---
 pym/gentoolkit/profile.py                  |  192 ++---
 pym/gentoolkit/query.py                    |  720 ++++++++--------
 pym/gentoolkit/revdep_rebuild/analyse.py   |  781 +++++++++---------
 pym/gentoolkit/revdep_rebuild/assign.py    |  312 +++----
 pym/gentoolkit/revdep_rebuild/cache.py     |  281 ++++---
 pym/gentoolkit/revdep_rebuild/collect.py   |  437 +++++-----
 pym/gentoolkit/revdep_rebuild/rebuild.py   |  266 +++---
 pym/gentoolkit/revdep_rebuild/runner.py    |   93 +--
 pym/gentoolkit/revdep_rebuild/settings.py  |  299 +++----
 pym/gentoolkit/revdep_rebuild/stuff.py     |  164 ++--
 pym/gentoolkit/sets.py                     |   73 +-
 pym/gentoolkit/test/eclean/creator.py      |  388 +++++----
 pym/gentoolkit/test/eclean/distsupport.py  |  885 ++++++++++----------
 pym/gentoolkit/test/eclean/test_clean.py   |    6 +-
 pym/gentoolkit/test/eclean/test_search.py  | 1221 +++++++++++++++-------------
 pym/gentoolkit/test/equery/test_init.py    |   74 +-
 pym/gentoolkit/test/test_atom.py           |  250 +++---
 pym/gentoolkit/test/test_cpv.py            |  219 ++---
 pym/gentoolkit/test/test_helpers.py        |  123 +--
 pym/gentoolkit/test/test_keyword.py        |   91 ++-
 pym/gentoolkit/test/test_profile.py        |   92 +--
 pym/gentoolkit/test/test_query.py          |  177 ++--
 pym/gentoolkit/test/test_syntax.py         |   38 +-
 pym/gentoolkit/textwrap_.py                |  180 ++--
 pym/gentoolkit/versionmatch.py             |  209 +++--
 setup.py                                   |  239 +++---
 85 files changed, 15252 insertions(+), 14032 deletions(-)

diff --git a/bin/eclean b/bin/eclean
index 90f9e55..c315c88 100755
--- a/bin/eclean
+++ b/bin/eclean
@@ -6,9 +6,9 @@ Distributed under the terms of the GNU General Public License v2
 
 # Meta:
 __author__ = "Thomas de Grenier de Latour (tgl), " + \
-	"modular re-write by: Brian Dolbec (dol-sen)"
+    "modular re-write by: Brian Dolbec (dol-sen)"
 __email__ = "degrenier@easyconnect.fr, " + \
-	"brian.dolbec@gmail.com"
+    "brian.dolbec@gmail.com"
 __version__ = "git"
 __productname__ = "eclean"
 __description__ = "A cleaning tool for Gentoo distfiles and binaries."
@@ -18,29 +18,28 @@ import sys
 
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 
 from gentoolkit.eclean.cli import main
 
 try:
-	main()
+    main()
 except KeyboardInterrupt:
-	print("Aborted.")
-	sys.exit(130)
+    print("Aborted.")
+    sys.exit(130)
 sys.exit(0)
-

diff --git a/bin/eclean-dist b/bin/eclean-dist
index 90f9e55..c315c88 100755
--- a/bin/eclean-dist
+++ b/bin/eclean-dist
@@ -6,9 +6,9 @@ Distributed under the terms of the GNU General Public License v2
 
 # Meta:
 __author__ = "Thomas de Grenier de Latour (tgl), " + \
-	"modular re-write by: Brian Dolbec (dol-sen)"
+    "modular re-write by: Brian Dolbec (dol-sen)"
 __email__ = "degrenier@easyconnect.fr, " + \
-	"brian.dolbec@gmail.com"
+    "brian.dolbec@gmail.com"
 __version__ = "git"
 __productname__ = "eclean"
 __description__ = "A cleaning tool for Gentoo distfiles and binaries."
@@ -18,29 +18,28 @@ import sys
 
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 
 from gentoolkit.eclean.cli import main
 
 try:
-	main()
+    main()
 except KeyboardInterrupt:
-	print("Aborted.")
-	sys.exit(130)
+    print("Aborted.")
+    sys.exit(130)
 sys.exit(0)
-

diff --git a/bin/eclean-pkg b/bin/eclean-pkg
index 90f9e55..c315c88 100755
--- a/bin/eclean-pkg
+++ b/bin/eclean-pkg
@@ -6,9 +6,9 @@ Distributed under the terms of the GNU General Public License v2
 
 # Meta:
 __author__ = "Thomas de Grenier de Latour (tgl), " + \
-	"modular re-write by: Brian Dolbec (dol-sen)"
+    "modular re-write by: Brian Dolbec (dol-sen)"
 __email__ = "degrenier@easyconnect.fr, " + \
-	"brian.dolbec@gmail.com"
+    "brian.dolbec@gmail.com"
 __version__ = "git"
 __productname__ = "eclean"
 __description__ = "A cleaning tool for Gentoo distfiles and binaries."
@@ -18,29 +18,28 @@ import sys
 
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 
 from gentoolkit.eclean.cli import main
 
 try:
-	main()
+    main()
 except KeyboardInterrupt:
-	print("Aborted.")
-	sys.exit(130)
+    print("Aborted.")
+    sys.exit(130)
 sys.exit(0)
-

diff --git a/bin/ekeyword b/bin/ekeyword
index 8767fe3..c45ff5c 100755
--- a/bin/ekeyword
+++ b/bin/ekeyword
@@ -15,27 +15,27 @@ import os
 import sys
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 from gentoolkit.ekeyword import ekeyword
 
 try:
-	ekeyword.main(sys.argv[1:])
+    ekeyword.main(sys.argv[1:])
 except KeyboardInterrupt:
-	print("Aborted.")
-	sys.exit(130)
+    print("Aborted.")
+    sys.exit(130)
 sys.exit(0)

diff --git a/bin/enalyze b/bin/enalyze
index 9e27bed..caa3362 100755
--- a/bin/enalyze
+++ b/bin/enalyze
@@ -13,32 +13,32 @@ files in the event of corruption, and possibly more.
 import sys
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 from gentoolkit import enalyze, errors
 
 try:
-	enalyze.main()
+    enalyze.main()
 except errors.GentoolkitException as err:
-	if '--debug' in sys.argv:
-		raise
-	else:
-		from gentoolkit import pprinter as pp
-		sys.stderr.write(pp.error(str(err)))
-		print()
-		print("Add '--debug' to global options for traceback.")
-		sys.exit(1)
+    if '--debug' in sys.argv:
+        raise
+    else:
+        from gentoolkit import pprinter as pp
+        sys.stderr.write(pp.error(str(err)))
+        print()
+        print("Add '--debug' to global options for traceback.")
+        sys.exit(1)

diff --git a/bin/epkginfo b/bin/epkginfo
index 5d3aab2..4cb483e 100755
--- a/bin/epkginfo
+++ b/bin/epkginfo
@@ -6,11 +6,11 @@
 """Shortcut to equery meta"""
 
 __authors__ = (
-	'Douglas Anderson <douglasjanderson@gmail.com>: equery meta',
-	'Ned Ludd <solar@gentoo.org>: first full implimentation'
-	'Eldad Zack <eldad@gentoo.org>: earch',
-	'Eric Olinger <EvvL AT RustedHalo DOT net>: metadata'
-	)
+    'Douglas Anderson <douglasjanderson@gmail.com>: equery meta',
+    'Ned Ludd <solar@gentoo.org>: first full implimentation'
+    'Eldad Zack <eldad@gentoo.org>: earch',
+    'Eric Olinger <EvvL AT RustedHalo DOT net>: metadata'
+)
 
 import sys
 
@@ -19,32 +19,34 @@ from gentoolkit.equery import mod_usage
 from gentoolkit.equery.meta import main, print_help
 from portage.exception import AmbiguousPackageName
 
+
 def print_epkginfo_help():
-	print(mod_usage(mod_name="epkginfo"))
-	print()
-	print_help(with_usage=False)
+    print(mod_usage(mod_name="epkginfo"))
+    print()
+    print_help(with_usage=False)
+
 
 equery.initialize_configuration()
 args = sys.argv[1:]
 if not args or set(('-h', '--help')).intersection(args):
-	print_epkginfo_help()
+    print_epkginfo_help()
 else:
-	try:
-		main(args)
-	except AmbiguousPackageName as e:
-		pkgs = e.args[0]
-		for candidate in pkgs:
-			print(candidate)
-
-		from gentoolkit import pprinter as pp
-		from os.path import basename # To get the short name
-
-		print(file=sys.stderr)
-		print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
-				file=sys.stderr, end="")
-		pp.die(1, "one of the above fully-qualified ebuild names instead.")
-	except errors.GentoolkitException as err:
-		from gentoolkit import pprinter as pp
-		pp.die(1, str(err))
+    try:
+        main(args)
+    except AmbiguousPackageName as e:
+        pkgs = e.args[0]
+        for candidate in pkgs:
+            print(candidate)
+
+        from gentoolkit import pprinter as pp
+        from os.path import basename  # To get the short name
+
+        print(file=sys.stderr)
+        print(pp.error("The short ebuild name '%s' is ambiguous. Please specify" % basename(pkgs[0])),
+              file=sys.stderr, end="")
+        pp.die(1, "one of the above fully-qualified ebuild names instead.")
+    except errors.GentoolkitException as err:
+        from gentoolkit import pprinter as pp
+        pp.die(1, str(err))
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/bin/equery b/bin/equery
index 386194d..0e52294 100755
--- a/bin/equery
+++ b/bin/equery
@@ -12,35 +12,35 @@ import os
 import sys
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 from gentoolkit import equery, errors
 
 try:
-	equery.main(sys.argv)
+    equery.main(sys.argv)
 except errors.GentoolkitNonZeroExit as err:
-	sys.exit(err.return_code)
+    sys.exit(err.return_code)
 except errors.GentoolkitException as err:
-	if '--debug' in sys.argv or bool(os.getenv('DEBUG', False)):
-		raise
-	else:
-		from gentoolkit import pprinter as pp
-		sys.stderr.write(pp.error(str(err)))
-		if err.is_serious:
-			print()
-			print("Add '--debug' to global options for traceback.")
-		sys.exit(1)
+    if '--debug' in sys.argv or bool(os.getenv('DEBUG', False)):
+        raise
+    else:
+        from gentoolkit import pprinter as pp
+        sys.stderr.write(pp.error(str(err)))
+        if err.is_serious:
+            print()
+            print("Add '--debug' to global options for traceback.")
+        sys.exit(1)

diff --git a/bin/eshowkw b/bin/eshowkw
index e987cce..0ef4dda 100755
--- a/bin/eshowkw
+++ b/bin/eshowkw
@@ -6,4 +6,4 @@
 import sys
 from gentoolkit.eshowkw import main as emain
 
-sys.exit(emain(sys.argv[1:]))
\ No newline at end of file
+sys.exit(emain(sys.argv[1:]))

diff --git a/bin/imlate b/bin/imlate
index cd4f7ab..318d612 100755
--- a/bin/imlate
+++ b/bin/imlate
@@ -15,27 +15,27 @@ import os
 import sys
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 from gentoolkit.imlate import imlate
 
 try:
-	imlate.main()
+    imlate.main()
 except KeyboardInterrupt:
-	print("Aborted.")
-	sys.exit(130)
+    print("Aborted.")
+    sys.exit(130)
 sys.exit(0)

diff --git a/bin/merge-driver-ekeyword b/bin/merge-driver-ekeyword
index 7f4a10b..73e0430 100755
--- a/bin/merge-driver-ekeyword
+++ b/bin/merge-driver-ekeyword
@@ -21,6 +21,7 @@ from gentoolkit.ekeyword import ekeyword
 
 KeywordChanges = List[Tuple[Optional[List[str]], Optional[List[str]]]]
 
+
 def keyword_array(keyword_line: str) -> List[str]:
     # Find indices of string inside the double-quotes
     i1: int = keyword_line.find('"') + 1
@@ -112,10 +113,10 @@ def main(argv: Sequence[str]) -> int:
     if len(argv) != 5:
         sys.exit(-1)
 
-    O = argv[1] # %O - filename of original
-    A = argv[2] # %A - filename of our current version
-    B = argv[3] # %B - filename of the other branch's version
-    P = argv[4] # %P - original path of the file
+    O = argv[1]  # %O - filename of original
+    A = argv[2]  # %A - filename of our current version
+    B = argv[3]  # %B - filename of the other branch's version
+    P = argv[4]  # %P - original path of the file
 
     # Get changes from %O to %B
     changes = keyword_changes(O, B)

diff --git a/bin/revdep-rebuild b/bin/revdep-rebuild
index 51783c5..332dfcf 100755
--- a/bin/revdep-rebuild
+++ b/bin/revdep-rebuild
@@ -12,35 +12,35 @@ dependent upon the upgraded package.
 import sys
 # This block ensures that ^C interrupts are handled quietly.
 try:
-	import signal
+    import signal
 
-	def exithandler(signum,frame):
-		signal.signal(signal.SIGINT, signal.SIG_IGN)
-		signal.signal(signal.SIGTERM, signal.SIG_IGN)
-		print()
-		sys.exit(1)
+    def exithandler(signum, frame):
+        signal.signal(signal.SIGINT, signal.SIG_IGN)
+        signal.signal(signal.SIGTERM, signal.SIG_IGN)
+        print()
+        sys.exit(1)
 
-	signal.signal(signal.SIGINT, exithandler)
-	signal.signal(signal.SIGTERM, exithandler)
-	signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    signal.signal(signal.SIGINT, exithandler)
+    signal.signal(signal.SIGTERM, exithandler)
+    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 
 except KeyboardInterrupt:
-	print()
-	sys.exit(1)
+    print()
+    sys.exit(1)
 
 from gentoolkit import errors
 from gentoolkit.revdep_rebuild import rebuild
 
 try:
-	success = rebuild.main(rebuild.parse_options())
-	sys.exit(success)
+    success = rebuild.main(rebuild.parse_options())
+    sys.exit(success)
 except errors.GentoolkitException as err:
-	if '--debug' in sys.argv:
-		raise
-	else:
-		from gentoolkit import pprinter as pp
-		sys.stderr.write(pp.error(str(err)))
-		print()
-		print("Add '--debug' to global options for traceback.")
-		sys.exit(1)
+    if '--debug' in sys.argv:
+        raise
+    else:
+        from gentoolkit import pprinter as pp
+        sys.stderr.write(pp.error(str(err)))
+        print()
+        print("Add '--debug' to global options for traceback.")
+        sys.exit(1)

diff --git a/pym/gentoolkit/__init__.py b/pym/gentoolkit/__init__.py
index 9af78fc..ab9ce9d 100644
--- a/pym/gentoolkit/__init__.py
+++ b/pym/gentoolkit/__init__.py
@@ -10,14 +10,14 @@ import sys
 
 CONFIG = {
     # Color handling: -1: Use Portage settings, 0: Force off, 1: Force on
-    'color': -1,
+    "color": -1,
     # Guess piping output:
-    'piping': False if sys.stdout.isatty() else True,
+    "piping": False if sys.stdout.isatty() else True,
     # Set some defaults:
-    'quiet': False,
+    "quiet": False,
     # verbose is True if not quiet and not piping
-    'verbose': True,
-    'debug': False
+    "verbose": True,
+    "debug": False,
 }
 
 # vim: set ts=8 sw=4 tw=79:

diff --git a/pym/gentoolkit/atom.py b/pym/gentoolkit/atom.py
index 364fe4e..dd843d7 100644
--- a/pym/gentoolkit/atom.py
+++ b/pym/gentoolkit/atom.py
@@ -6,7 +6,7 @@
 
 """Subclasses portage.dep.Atom to provide methods on a Gentoo atom string."""
 
-__all__ = ('Atom',)
+__all__ = ("Atom",)
 
 # =======
 # Imports
@@ -24,319 +24,319 @@ from gentoolkit import errors
 # Classes
 # =======
 
+
 class Atom(portage.dep.Atom, CPV):
-	"""Portage's Atom class with improvements from pkgcore.
+    """Portage's Atom class with improvements from pkgcore.
+
+    portage.dep.Atom provides the following instance variables:
+
+    @type operator: str
+    @ivar operator: one of ('=', '=*', '<', '>', '<=', '>=', '~', None)
+    @type cp: str
+    @ivar cp: cat/pkg
+    @type cpv: str
+    @ivar cpv: cat/pkg-ver (if ver)
+    @type slot: str or None (modified to tuple if not None)
+    @ivar slot: slot passed in as cpv:#
+    """
+
+    # Necessary for Portage versions < 2.1.7
+    _atoms = weakref.WeakValueDictionary()
+
+    def __init__(self, atom):
+        self.atom = atom
+        self.operator = self.blocker = self.use = self.slot = None
+
+        try:
+            portage.dep.Atom.__init__(self, atom)
+        except portage.exception.InvalidAtom:
+            raise errors.GentoolkitInvalidAtom(atom)
+
+        # Make operator compatible with intersects
+        if self.operator is None:
+            self.operator = ""
+
+        CPV.__init__(self, self.cpv)
+
+        # use_conditional is USE flag condition for this Atom to be required:
+        # For: !build? ( >=sys-apps/sed-4.0.5 ), use_conditional = '!build'
+        self.use_conditional = None
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            err = "other isn't of %s type, is %s"
+            raise TypeError(err % (self.__class__, other.__class__))
+
+        if self.operator != other.operator:
+            return False
+
+        if not CPV.__eq__(self, other):
+            return False
+
+        if bool(self.blocker) != bool(other.blocker):
+            return False
+
+        if self.blocker and other.blocker:
+            if self.blocker.overlap.forbid != other.blocker.overlap.forbid:
+                return False
+
+        if self.use_conditional != other.use_conditional:
+            return False
+
+        # Don't believe Portage has something like this
+        # c = cmp(self.negate_vers, other.negate_vers)
+        # if c:
+        #   return c
+
+        if self.slot != other.slot:
+            return False
+
+        this_use = None
+        if self.use is not None:
+            this_use = sorted(self.use.tokens)
+        that_use = None
+        if other.use is not None:
+            that_use = sorted(other.use.tokens)
+        if this_use != that_use:
+            return False
+
+        # Not supported by Portage Atom yet
+        # return cmp(self.repo_name, other.repo_name)
+        return True
+
+    def __hash__(self):
+        return hash(self.atom)
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __lt__(self, other):
+        if not isinstance(other, self.__class__):
+            err = "other isn't of %s type, is %s"
+            raise TypeError(err % (self.__class__, other.__class__))
+
+        if self.operator != other.operator:
+            return self.operator < other.operator
+
+        if not CPV.__eq__(self, other):
+            return CPV.__lt__(self, other)
+
+        if bool(self.blocker) != bool(other.blocker):
+            # We want non blockers, then blockers, so only return True
+            # if self.blocker is True and other.blocker is False.
+            return bool(self.blocker) > bool(other.blocker)
+
+        if self.blocker and other.blocker:
+            if self.blocker.overlap.forbid != other.blocker.overlap.forbid:
+                # we want !! prior to !
+                return self.blocker.overlap.forbid < other.blocker.overlap.forbid
+
+        # Don't believe Portage has something like this
+        # c = cmp(self.negate_vers, other.negate_vers)
+        # if c:
+        #   return c
+
+        if self.slot != other.slot:
+            if self.slot is None:
+                return False
+            elif other.slot is None:
+                return True
+            return self.slot < other.slot
+
+        this_use = []
+        if self.use is not None:
+            this_use = sorted(self.use.tokens)
+        that_use = []
+        if other.use is not None:
+            that_use = sorted(other.use.tokens)
+        if this_use != that_use:
+            return this_use < that_use
+
+        # Not supported by Portage Atom yet
+        # return cmp(self.repo_name, other.repo_name)
+
+        return False
+
+    def __gt__(self, other):
+        if not isinstance(other, self.__class__):
+            err = "other isn't of %s type, is %s"
+            raise TypeError(err % (self.__class__, other.__class__))
+
+        return not self <= other
+
+    def __le__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        return self < other or self == other
+
+    def __ge__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        return self > other or self == other
+
+    def __repr__(self):
+        uc = self.use_conditional
+        uc = "%s? " % uc if uc is not None else ""
+        return "<%s %r>" % (self.__class__.__name__, "%s%s" % (uc, self.atom))
+
+    def __setattr__(self, name, value):
+        object.__setattr__(self, name, value)
+
+    def intersects(self, other):
+        """Check if a passed in package atom "intersects" this atom.
+
+        Lifted from pkgcore.
+
+        Two atoms "intersect" if a package can be constructed that
+        matches both:
+          - if you query for just "dev-lang/python" it "intersects" both
+                "dev-lang/python" and ">=dev-lang/python-2.4"
+          - if you query for "=dev-lang/python-2.4" it "intersects"
+                ">=dev-lang/python-2.4" and "dev-lang/python" but not
+                "<dev-lang/python-2.3"
+
+        @type other: L{gentoolkit.atom.Atom} or
+                L{gentoolkit.versionmatch.VersionMatch}
+        @param other: other package to compare
+        @see: L{pkgcore.ebuild.atom}
+        """
+        # Our "cp" (cat/pkg) must match exactly:
+        if self.cp != other.cp:
+            # Check to see if one is name only:
+            # We don't bother checking if self.category is None: it can't be
+            # because we're an Atom subclass and that would be invalid.
+            return not other.category and self.name == other.name
+
+        # Slot dep only matters if we both have one. If we do they
+        # must be identical:
+        this_slot = getattr(self, "slot", None)
+        that_slot = getattr(other, "slot", None)
+        if this_slot is not None and that_slot is not None and this_slot != that_slot:
+            return False
+
+        if self.repo is not None and other.repo is not None and self.repo != other.repo:
+            return False
+
+        # Use deps are similar: if one of us forces a flag on and the
+        # other forces it off we do not intersect. If only one of us
+        # cares about a flag it is irrelevant.
+
+        # Skip the (very common) case of one of us not having use deps:
+        this_use = getattr(self, "use", None)
+        that_use = getattr(other, "use", None)
+        if this_use and that_use:
+            # Set of flags we do not have in common:
+            flags = set(this_use.tokens) ^ set(that_use.tokens)
+            for flag in flags:
+                # If this is unset and we also have the set version we fail:
+                if flag[0] == "-" and flag[1:] in flags:
+                    return False
+
+        # Remaining thing to check is version restrictions. Get the
+        # ones we can check without actual version comparisons out of
+        # the way first.
+
+        # If one of us is unversioned we intersect:
+        if not self.operator or not other.operator:
+            return True
+
+        # If we are both "unbounded" in the same direction we intersect:
+        if ("<" in self.operator and "<" in other.operator) or (
+            ">" in self.operator and ">" in other.operator
+        ):
+            return True
+
+        # If one of us is an exact match we intersect if the other matches it:
+        if self.operator == "=":
+            if other.operator == "=*":
+                return self.fullversion.startswith(other.fullversion)
+            return VersionMatch(other, op=other.operator).match(self)
+        if other.operator == "=":
+            if self.operator == "=*":
+                return other.fullversion.startswith(self.fullversion)
+            return VersionMatch(self, op=self.operator).match(other)
+
+        # If we are both ~ matches we match if we are identical:
+        if self.operator == other.operator == "~":
+            return self.version == other.version and self.revision == other.revision
+
+        # If we are both glob matches we match if one of us matches the other.
+        if self.operator == other.operator == "=*":
+            return self.fullversion.startswith(
+                other.fullversion
+            ) or other.fullversion.startswith(self.fullversion)
+
+        # If one of us is a glob match and the other a ~ we match if the glob
+        # matches the ~ (ignoring a revision on the glob):
+        if self.operator == "=*" and other.operator == "~":
+            return other.fullversion.startswith(self.version)
+        if other.operator == "=*" and self.operator == "~":
+            return self.fullversion.startswith(other.version)
+
+        # If we get here at least one of us is a <, <=, > or >=:
+        if self.operator in ("<", "<=", ">", ">="):
+            ranged, ranged.operator = self, self.operator
+        else:
+            ranged, ranged.operator = other, other.operator
+            other, other.operator = self, self.operator
+
+        if "<" in other.operator or ">" in other.operator:
+            # We are both ranged, and in the opposite "direction" (or
+            # we would have matched above). We intersect if we both
+            # match the other's endpoint (just checking one endpoint
+            # is not enough, it would give a false positive on <=2 vs >2)
+            return VersionMatch(other, op=other.operator).match(
+                ranged
+            ) and VersionMatch(ranged, op=ranged.operator).match(other)
+
+        if other.operator == "~":
+            # Other definitely matches its own version. If ranged also
+            # does we're done:
+            if VersionMatch(ranged, op=ranged.operator).match(other):
+                return True
+            # The only other case where we intersect is if ranged is a
+            # > or >= on other's version and a nonzero revision. In
+            # that case other will match ranged. Be careful not to
+            # give a false positive for ~2 vs <2 here:
+            return ranged.operator in (">", ">=") and VersionMatch(
+                other, op=other.operator
+            ).match(ranged)
+
+        if other.operator == "=*":
+            # a glob match definitely matches its own version, so if
+            # ranged does too we're done:
+            if VersionMatch(ranged, op=ranged.operator).match(other):
+                return True
+            if "<" in ranged.operator:
+                # If other.revision is not defined then other does not
+                # match anything smaller than its own fullversion:
+                if other.revision:
+                    return False
+
+                # If other.revision is defined then we can always
+                # construct a package smaller than other.fullversion by
+                # tagging e.g. an _alpha1 on.
+                return ranged.fullversion.startswith(other.version)
+            else:
+                # Remaining cases where this intersects: there is a
+                # package greater than ranged.fullversion and
+                # other.fullversion that they both match.
+                return ranged.fullversion.startswith(other.version)
+
+        # Handled all possible ops.
+        raise NotImplementedError(
+            "Someone added an operator without adding it to intersects"
+        )
+
+    def get_depstr(self):
+        """Returns a string representation of the original dep"""
+        uc = self.use_conditional
+        uc = "%s? " % uc if uc is not None else ""
+        return "%s%s" % (uc, self.atom)
 
-	portage.dep.Atom provides the following instance variables:
-
-	@type operator: str
-	@ivar operator: one of ('=', '=*', '<', '>', '<=', '>=', '~', None)
-	@type cp: str
-	@ivar cp: cat/pkg
-	@type cpv: str
-	@ivar cpv: cat/pkg-ver (if ver)
-	@type slot: str or None (modified to tuple if not None)
-	@ivar slot: slot passed in as cpv:#
-	"""
-
-	# Necessary for Portage versions < 2.1.7
-	_atoms = weakref.WeakValueDictionary()
-
-	def __init__(self, atom):
-		self.atom = atom
-		self.operator = self.blocker = self.use = self.slot = None
-
-		try:
-			portage.dep.Atom.__init__(self, atom)
-		except portage.exception.InvalidAtom:
-			raise errors.GentoolkitInvalidAtom(atom)
-
-		# Make operator compatible with intersects
-		if self.operator is None:
-			self.operator = ''
-
-		CPV.__init__(self, self.cpv)
-
-		# use_conditional is USE flag condition for this Atom to be required:
-		# For: !build? ( >=sys-apps/sed-4.0.5 ), use_conditional = '!build'
-		self.use_conditional = None
-
-	def __eq__(self, other):
-		if not isinstance(other, self.__class__):
-			err = "other isn't of %s type, is %s"
-			raise TypeError(err % (self.__class__, other.__class__))
-
-		if self.operator != other.operator:
-			return False
-
-		if not CPV.__eq__(self, other):
-			return False
-
-		if bool(self.blocker) != bool(other.blocker):
-			return False
-
-		if self.blocker and other.blocker:
-			if self.blocker.overlap.forbid != other.blocker.overlap.forbid:
-				return False
-
-		if self.use_conditional != other.use_conditional:
-			return False
-
-		# Don't believe Portage has something like this
-		#c = cmp(self.negate_vers, other.negate_vers)
-		#if c:
-		#   return c
-
-		if self.slot != other.slot:
-			return False
-
-		this_use = None
-		if self.use is not None:
-			this_use = sorted(self.use.tokens)
-		that_use = None
-		if other.use is not None:
-			that_use = sorted(other.use.tokens)
-		if this_use != that_use:
-			return False
-
-		# Not supported by Portage Atom yet
-		#return cmp(self.repo_name, other.repo_name)
-		return True
-
-	def __hash__(self):
-		return hash(self.atom)
-
-	def __ne__(self, other):
-		return not self == other
-
-	def __lt__(self, other):
-		if not isinstance(other, self.__class__):
-			err = "other isn't of %s type, is %s"
-			raise TypeError(err % (self.__class__, other.__class__))
-
-		if self.operator != other.operator:
-			return self.operator < other.operator
-
-		if not CPV.__eq__(self, other):
-			return CPV.__lt__(self, other)
-
-		if bool(self.blocker) != bool(other.blocker):
-			# We want non blockers, then blockers, so only return True
-			# if self.blocker is True and other.blocker is False.
-			return bool(self.blocker) > bool(other.blocker)
-
-		if self.blocker and other.blocker:
-			if self.blocker.overlap.forbid != other.blocker.overlap.forbid:
-				# we want !! prior to !
-				return (self.blocker.overlap.forbid <
-					other.blocker.overlap.forbid)
-
-		# Don't believe Portage has something like this
-		#c = cmp(self.negate_vers, other.negate_vers)
-		#if c:
-		#   return c
-
-		if self.slot != other.slot:
-			if self.slot is None:
-				return False
-			elif other.slot is None:
-				return True
-			return self.slot < other.slot
-
-		this_use = []
-		if self.use is not None:
-			this_use = sorted(self.use.tokens)
-		that_use = []
-		if other.use is not None:
-			that_use = sorted(other.use.tokens)
-		if this_use != that_use:
-			return this_use < that_use
-
-		# Not supported by Portage Atom yet
-		#return cmp(self.repo_name, other.repo_name)
-
-		return False
-
-	def __gt__(self, other):
-		if not isinstance(other, self.__class__):
-			err = "other isn't of %s type, is %s"
-			raise TypeError(err % (self.__class__, other.__class__))
-
-		return not self <= other
-
-	def __le__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		return self < other or self == other
-
-	def __ge__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		return self > other or self == other
-
-	def __repr__(self):
-		uc = self.use_conditional
-		uc = "%s? " % uc if uc is not None else ''
-		return "<%s %r>" % (self.__class__.__name__, "%s%s" % (uc, self.atom))
-
-	def __setattr__(self, name, value):
-		object.__setattr__(self, name, value)
-
-	def intersects(self, other):
-		"""Check if a passed in package atom "intersects" this atom.
-
-		Lifted from pkgcore.
-
-		Two atoms "intersect" if a package can be constructed that
-		matches both:
-		  - if you query for just "dev-lang/python" it "intersects" both
-			"dev-lang/python" and ">=dev-lang/python-2.4"
-		  - if you query for "=dev-lang/python-2.4" it "intersects"
-			">=dev-lang/python-2.4" and "dev-lang/python" but not
-			"<dev-lang/python-2.3"
-
-		@type other: L{gentoolkit.atom.Atom} or
-			L{gentoolkit.versionmatch.VersionMatch}
-		@param other: other package to compare
-		@see: L{pkgcore.ebuild.atom}
-		"""
-		# Our "cp" (cat/pkg) must match exactly:
-		if self.cp != other.cp:
-			# Check to see if one is name only:
-			# We don't bother checking if self.category is None: it can't be
-			# because we're an Atom subclass and that would be invalid.
-			return (not other.category and self.name == other.name)
-
-		# Slot dep only matters if we both have one. If we do they
-		# must be identical:
-		this_slot = getattr(self, 'slot', None)
-		that_slot = getattr(other, 'slot', None)
-		if (this_slot is not None and that_slot is not None and
-			this_slot != that_slot):
-			return False
-
-		if (self.repo is not None and other.repo is not None and
-			self.repo != other.repo):
-			return False
-
-		# Use deps are similar: if one of us forces a flag on and the
-		# other forces it off we do not intersect. If only one of us
-		# cares about a flag it is irrelevant.
-
-		# Skip the (very common) case of one of us not having use deps:
-		this_use = getattr(self, 'use', None)
-		that_use = getattr(other, 'use', None)
-		if this_use and that_use:
-			# Set of flags we do not have in common:
-			flags = set(this_use.tokens) ^ set(that_use.tokens)
-			for flag in flags:
-				# If this is unset and we also have the set version we fail:
-				if flag[0] == '-' and flag[1:] in flags:
-					return False
-
-		# Remaining thing to check is version restrictions. Get the
-		# ones we can check without actual version comparisons out of
-		# the way first.
-
-		# If one of us is unversioned we intersect:
-		if not self.operator or not other.operator:
-			return True
-
-		# If we are both "unbounded" in the same direction we intersect:
-		if (('<' in self.operator and '<' in other.operator) or
-			('>' in self.operator and '>' in other.operator)):
-			return True
-
-		# If one of us is an exact match we intersect if the other matches it:
-		if self.operator == '=':
-			if other.operator == '=*':
-				return self.fullversion.startswith(other.fullversion)
-			return VersionMatch(other, op=other.operator).match(self)
-		if other.operator == '=':
-			if self.operator == '=*':
-				return other.fullversion.startswith(self.fullversion)
-			return VersionMatch(self, op=self.operator).match(other)
-
-		# If we are both ~ matches we match if we are identical:
-		if self.operator == other.operator == '~':
-			return (self.version == other.version and
-				self.revision == other.revision)
-
-		# If we are both glob matches we match if one of us matches the other.
-		if self.operator == other.operator == '=*':
-			return (self.fullversion.startswith(other.fullversion) or
-				other.fullversion.startswith(self.fullversion))
-
-		# If one of us is a glob match and the other a ~ we match if the glob
-		# matches the ~ (ignoring a revision on the glob):
-		if self.operator == '=*' and other.operator == '~':
-			return other.fullversion.startswith(self.version)
-		if other.operator == '=*' and self.operator == '~':
-			return self.fullversion.startswith(other.version)
-
-		# If we get here at least one of us is a <, <=, > or >=:
-		if self.operator in ('<', '<=', '>', '>='):
-			ranged, ranged.operator = self, self.operator
-		else:
-			ranged, ranged.operator = other, other.operator
-			other, other.operator = self, self.operator
-
-		if '<' in other.operator or '>' in other.operator:
-			# We are both ranged, and in the opposite "direction" (or
-			# we would have matched above). We intersect if we both
-			# match the other's endpoint (just checking one endpoint
-			# is not enough, it would give a false positive on <=2 vs >2)
-			return (
-				VersionMatch(other, op=other.operator).match(ranged) and
-				VersionMatch(ranged, op=ranged.operator).match(other)
-			)
-
-		if other.operator == '~':
-			# Other definitely matches its own version. If ranged also
-			# does we're done:
-			if VersionMatch(ranged, op=ranged.operator).match(other):
-				return True
-			# The only other case where we intersect is if ranged is a
-			# > or >= on other's version and a nonzero revision. In
-			# that case other will match ranged. Be careful not to
-			# give a false positive for ~2 vs <2 here:
-			return (ranged.operator in ('>', '>=') and
-				VersionMatch(other, op=other.operator).match(ranged))
-
-		if other.operator == '=*':
-			# a glob match definitely matches its own version, so if
-			# ranged does too we're done:
-			if VersionMatch(ranged, op=ranged.operator).match(other):
-				return True
-			if '<' in ranged.operator:
-				# If other.revision is not defined then other does not
-				# match anything smaller than its own fullversion:
-				if other.revision:
-					return False
-
-				# If other.revision is defined then we can always
-				# construct a package smaller than other.fullversion by
-				# tagging e.g. an _alpha1 on.
-				return ranged.fullversion.startswith(other.version)
-			else:
-				# Remaining cases where this intersects: there is a
-				# package greater than ranged.fullversion and
-				# other.fullversion that they both match.
-				return ranged.fullversion.startswith(other.version)
-
-		# Handled all possible ops.
-		raise NotImplementedError(
-			'Someone added an operator without adding it to intersects')
-
-	def get_depstr(self):
-		"""Returns a string representation of the original dep
-		"""
-		uc = self.use_conditional
-		uc = "%s? " % uc if uc is not None else ''
-		return "%s%s" % (uc, self.atom)
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/base.py b/pym/gentoolkit/base.py
index 372ed74..8dfa2db 100644
--- a/pym/gentoolkit/base.py
+++ b/pym/gentoolkit/base.py
@@ -6,7 +6,7 @@
 """Gentoolkit Base Module class to hold common module operation functions
 """
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 
 import os
@@ -18,127 +18,132 @@ from gentoolkit.formatters import format_options
 
 
 GLOBAL_OPTIONS = (
-	("    -h, --help", "display this help message"),
-	("    -q, --quiet", "minimal output"),
-	("    -C, --no-color", "turn off colors"),
-	("    -N, --no-pipe", "turn off pipe detection"),
-	("    -V, --version", "display version info")
+    ("    -h, --help", "display this help message"),
+    ("    -q, --quiet", "minimal output"),
+    ("    -C, --no-color", "turn off colors"),
+    ("    -N, --no-pipe", "turn off pipe detection"),
+    ("    -V, --version", "display version info"),
 )
 
 
 def initialize_configuration():
-	"""Setup the standard equery config"""
-
-	# Get terminal size
-	term_width = pp.output.get_term_size()[1]
-	if term_width < 1:
-		# get_term_size() failed. Set a sane default width:
-		term_width = 80
-	# Terminal size, minus a 1-char margin for text wrapping
-	gentoolkit.CONFIG['termWidth'] = term_width - 1
-	# Guess color output
-	if (gentoolkit.CONFIG['color'] == -1 and (not sys.stdout.isatty() or
-		os.getenv("NOCOLOR") in ("yes", "true")) or gentoolkit.CONFIG['color'] == 0):
-		pp.output.nocolor()
-	gentoolkit.CONFIG['verbose'] = not gentoolkit.CONFIG['piping']
+    """Setup the standard equery config"""
+
+    # Get terminal size
+    term_width = pp.output.get_term_size()[1]
+    if term_width < 1:
+        # get_term_size() failed. Set a sane default width:
+        term_width = 80
+    # Terminal size, minus a 1-char margin for text wrapping
+    gentoolkit.CONFIG["termWidth"] = term_width - 1
+    # Guess color output
+    if (
+        gentoolkit.CONFIG["color"] == -1
+        and (not sys.stdout.isatty() or os.getenv("NOCOLOR") in ("yes", "true"))
+        or gentoolkit.CONFIG["color"] == 0
+    ):
+        pp.output.nocolor()
+    gentoolkit.CONFIG["verbose"] = not gentoolkit.CONFIG["piping"]
 
 
 def split_arguments(args):
-	"""Separate module name from module arguments"""
+    """Separate module name from module arguments"""
 
-	return args.pop(0), args
+    return args.pop(0), args
 
 
 def main_usage(module_info):
-	"""Return the main usage message for analyse"""
-	return "%(usage)s %(product)s [%(g_opts)s] %(mod_name)s [%(mod_opts)s]" % {
-		'usage': pp.emph("Usage:"),
-		'product': pp.productname(module_info["__productname__"]),
-		'g_opts': pp.globaloption("global-options"),
-		'mod_name': pp.command("module-name"),
-		'mod_opts': pp.localoption("module-options")
-	}
+    """Return the main usage message for analyse"""
+    return "%(usage)s %(product)s [%(g_opts)s] %(mod_name)s [%(mod_opts)s]" % {
+        "usage": pp.emph("Usage:"),
+        "product": pp.productname(module_info["__productname__"]),
+        "g_opts": pp.globaloption("global-options"),
+        "mod_name": pp.command("module-name"),
+        "mod_opts": pp.localoption("module-options"),
+    }
 
 
 def print_version(module_info):
-	"""Print the version of this tool to the console."""
+    """Print the version of this tool to the console."""
 
-	print("%(product)s (%(version)s) - %(docstring)s" % {
-		"product": pp.productname(module_info["__productname__"]),
-		"version": module_info["__version__"],
-		"docstring": module_info["__doc__"]
-	})
+    print(
+        "%(product)s (%(version)s) - %(docstring)s"
+        % {
+            "product": pp.productname(module_info["__productname__"]),
+            "version": module_info["__version__"],
+            "docstring": module_info["__doc__"],
+        }
+    )
 
 
 def print_help(module_info, formatted_options=None, with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@param with_description (bool): Option to print module's __doc__ or not
-	"""
-
-	if with_description:
-		print()
-		print(module_info["__doc__"])
-		print()
-	print(main_usage(module_info))
-	print()
-	print(pp.globaloption("global options"))
-	print(format_options(GLOBAL_OPTIONS))
-	print()
-	if formatted_options:
-		print(pp.command("modules") + " (" + pp.command("short name") + ")")
-		print(format_options(formatted_options))
-	else:
-		print("Error: calling function did not supply formatted options")
-		print()
+    """Print description, usage and a detailed help message.
+
+    @param with_description (bool): Option to print module's __doc__ or not
+    """
+
+    if with_description:
+        print()
+        print(module_info["__doc__"])
+        print()
+    print(main_usage(module_info))
+    print()
+    print(pp.globaloption("global options"))
+    print(format_options(GLOBAL_OPTIONS))
+    print()
+    if formatted_options:
+        print(pp.command("modules") + " (" + pp.command("short name") + ")")
+        print(format_options(formatted_options))
+    else:
+        print("Error: calling function did not supply formatted options")
+        print()
 
 
 def parse_global_options(global_opts, args, module_info, formatted_options):
-	"""Parse global input args and return True if we should display help for
-	the called module, else False (or display help and exit from here).
-	"""
-
-	need_help = False
-	do_help = False
-	opts = (opt[0] for opt in global_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			do_help = True
-			if args:
-				need_help = True
-			else:
-				do_help = True
-		elif opt in ('-q','--quiet'):
-			gentoolkit.CONFIG['quiet'] = True
-		elif opt in ('-C', '--no-color', '--nocolor'):
-			gentoolkit.CONFIG['color'] = 0
-			pp.output.nocolor()
-		elif opt in ('-N', '--no-pipe'):
-			gentoolkit.CONFIG['piping'] = False
-		elif opt in ('-V', '--version'):
-			print_version(module_info)
-			sys.exit(0)
-		elif opt in ('--debug'):
-			gentoolkit.CONFIG['debug'] = True
-	if do_help:
-		print_help( module_info, formatted_options)
-		sys.exit(0)
-	return need_help
+    """Parse global input args and return True if we should display help for
+    the called module, else False (or display help and exit from here).
+    """
+
+    need_help = False
+    do_help = False
+    opts = (opt[0] for opt in global_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            do_help = True
+            if args:
+                need_help = True
+            else:
+                do_help = True
+        elif opt in ("-q", "--quiet"):
+            gentoolkit.CONFIG["quiet"] = True
+        elif opt in ("-C", "--no-color", "--nocolor"):
+            gentoolkit.CONFIG["color"] = 0
+            pp.output.nocolor()
+        elif opt in ("-N", "--no-pipe"):
+            gentoolkit.CONFIG["piping"] = False
+        elif opt in ("-V", "--version"):
+            print_version(module_info)
+            sys.exit(0)
+        elif opt in ("--debug"):
+            gentoolkit.CONFIG["debug"] = True
+    if do_help:
+        print_help(module_info, formatted_options)
+        sys.exit(0)
+    return need_help
 
 
 def mod_usage(mod_name="module", arg="pkgspec", optional=False):
-	"""Provide a consistant usage message to the calling module.
-
-	@type arg: string
-	@param arg: what kind of argument the module takes (pkgspec, filename, etc)
-	@type optional: bool
-	@param optional: is the argument optional?
-	"""
-
-	return "%(usage)s: %(mod_name)s [%(opts)s] %(arg)s" % {
-		'usage': pp.emph("Usage"),
-		'mod_name': pp.command(mod_name),
-		'opts': pp.localoption("options"),
-		'arg': ("[%s]" % pp.emph(arg)) if optional else pp.emph(arg)
-	}
-
+    """Provide a consistant usage message to the calling module.
+
+    @type arg: string
+    @param arg: what kind of argument the module takes (pkgspec, filename, etc)
+    @type optional: bool
+    @param optional: is the argument optional?
+    """
+
+    return "%(usage)s: %(mod_name)s [%(opts)s] %(arg)s" % {
+        "usage": pp.emph("Usage"),
+        "mod_name": pp.command(mod_name),
+        "opts": pp.localoption("options"),
+        "arg": ("[%s]" % pp.emph(arg)) if optional else pp.emph(arg),
+    }

diff --git a/pym/gentoolkit/cpv.py b/pym/gentoolkit/cpv.py
index 5238e24..6b2a533 100644
--- a/pym/gentoolkit/cpv.py
+++ b/pym/gentoolkit/cpv.py
@@ -6,11 +6,7 @@
 
 """Provides attributes and methods for a category/package-version string."""
 
-__all__ = (
-	'CPV',
-	'compare_strs',
-	'split_cpv'
-)
+__all__ = ("CPV", "compare_strs", "split_cpv")
 
 # =======
 # Imports
@@ -26,228 +22,232 @@ from gentoolkit import errors
 # Globals
 # =======
 
-isvalid_version_re = re.compile(r"^(?:cvs\.)?(?:\d+)(?:\.\d+)*[a-z]?"
-	r"(?:_(p(?:re)?|beta|alpha|rc)\d*)*$")
+isvalid_version_re = re.compile(
+    r"^(?:cvs\.)?(?:\d+)(?:\.\d+)*[a-z]?" r"(?:_(p(?:re)?|beta|alpha|rc)\d*)*$"
+)
 isvalid_cat_re = re.compile(r"^(?:[a-zA-Z0-9][-a-zA-Z0-9+._]*(?:/(?!$))?)+$")
 _pkg_re = re.compile(r"^[a-zA-Z0-9+._]+$")
 # Prefix specific revision is of the form -r0<digit>+.<digit>+
-isvalid_rev_re = re.compile(r'(\d+|0\d+\.\d+)')
+isvalid_rev_re = re.compile(r"(\d+|0\d+\.\d+)")
 
 # =======
 # Classes
 # =======
 
+
 class CPV:
-	"""Provides methods on a category/package-version string.
-
-	Will also correctly split just a package or package-version string.
-
-	Example usage:
-		>>> from gentoolkit.cpv import CPV
-		>>> cpv = CPV('sys-apps/portage-2.2-r1')
-		>>> cpv.category, cpv.name, cpv.fullversion
-		('sys-apps', 'portage', '2.2-r1')
-		>>> str(cpv)
-		'sys-apps/portage-2.2-r1'
-		>>> # An 'rc' (release candidate) version is less than non 'rc' version:
-		... CPV('sys-apps/portage-2') > CPV('sys-apps/portage-2_rc10')
-		True
-	"""
-
-	def __init__(self, cpv, validate=False):
-		self.cpv = cpv
-		self._category = None
-		self._name = None
-		self._version = None
-		self._revision = None
-		self._cp = None
-		self._fullversion = None
-
-		self.validate = validate
-		if validate and not self.name:
-			raise errors.GentoolkitInvalidCPV(cpv)
-
-	@property
-	def category(self):
-		if self._category is None:
-			self._set_cpv_chunks()
-		return self._category
-
-	@property
-	def name(self):
-		if self._name is None:
-			self._set_cpv_chunks()
-		return self._name
-
-	@property
-	def version(self):
-		if self._version is None:
-			self._set_cpv_chunks()
-		return self._version
-
-	@property
-	def revision(self):
-		if self._revision is None:
-			self._set_cpv_chunks()
-		return self._revision
-
-	@property
-	def cp(self):
-		if self._cp is None:
-			sep = '/' if self.category else ''
-			self._cp = sep.join((self.category, self.name))
-		return self._cp
-
-	@property
-	def fullversion(self):
-		if self._fullversion is None:
-			sep = '-' if self.revision else ''
-			self._fullversion = sep.join((self.version, self.revision))
-		return self._fullversion
-
-	def _set_cpv_chunks(self):
-		chunks = split_cpv(self.cpv, validate=self.validate)
-		self._category = chunks[0]
-		self._name = chunks[1]
-		self._version = chunks[2]
-		self._revision = chunks[3]
-
-	def __eq__(self, other):
-		if not isinstance(other, self.__class__):
-			return False
-		return self.cpv == other.cpv
-
-	def __hash__(self):
-		return hash(self.cpv)
-
-	def __ne__(self, other):
-		return not self == other
-
-	def __lt__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-
-		if self.category != other.category:
-			return self.category < other.category
-		elif self.name != other.name:
-			return self.name < other.name
-		else:
-			# FIXME: this cmp() hack is for vercmp not using -1,0,1
-			# See bug 266493; this was fixed in portage-2.2_rc31
-			#return vercmp(self.fullversion, other.fullversion)
-			return vercmp(self.fullversion, other.fullversion) < 0
-
-	def __gt__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		return not self <= other
-
-	def __le__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		return self < other or self == other
-
-	def __ge__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		return self > other or self == other
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, str(self))
-
-	def __str__(self):
-		return self.cpv
+    """Provides methods on a category/package-version string.
+
+    Will also correctly split just a package or package-version string.
+
+    Example usage:
+            >>> from gentoolkit.cpv import CPV
+            >>> cpv = CPV('sys-apps/portage-2.2-r1')
+            >>> cpv.category, cpv.name, cpv.fullversion
+            ('sys-apps', 'portage', '2.2-r1')
+            >>> str(cpv)
+            'sys-apps/portage-2.2-r1'
+            >>> # An 'rc' (release candidate) version is less than non 'rc' version:
+            ... CPV('sys-apps/portage-2') > CPV('sys-apps/portage-2_rc10')
+            True
+    """
+
+    def __init__(self, cpv, validate=False):
+        self.cpv = cpv
+        self._category = None
+        self._name = None
+        self._version = None
+        self._revision = None
+        self._cp = None
+        self._fullversion = None
+
+        self.validate = validate
+        if validate and not self.name:
+            raise errors.GentoolkitInvalidCPV(cpv)
+
+    @property
+    def category(self):
+        if self._category is None:
+            self._set_cpv_chunks()
+        return self._category
+
+    @property
+    def name(self):
+        if self._name is None:
+            self._set_cpv_chunks()
+        return self._name
+
+    @property
+    def version(self):
+        if self._version is None:
+            self._set_cpv_chunks()
+        return self._version
+
+    @property
+    def revision(self):
+        if self._revision is None:
+            self._set_cpv_chunks()
+        return self._revision
+
+    @property
+    def cp(self):
+        if self._cp is None:
+            sep = "/" if self.category else ""
+            self._cp = sep.join((self.category, self.name))
+        return self._cp
+
+    @property
+    def fullversion(self):
+        if self._fullversion is None:
+            sep = "-" if self.revision else ""
+            self._fullversion = sep.join((self.version, self.revision))
+        return self._fullversion
+
+    def _set_cpv_chunks(self):
+        chunks = split_cpv(self.cpv, validate=self.validate)
+        self._category = chunks[0]
+        self._name = chunks[1]
+        self._version = chunks[2]
+        self._revision = chunks[3]
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            return False
+        return self.cpv == other.cpv
+
+    def __hash__(self):
+        return hash(self.cpv)
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __lt__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+
+        if self.category != other.category:
+            return self.category < other.category
+        elif self.name != other.name:
+            return self.name < other.name
+        else:
+            # FIXME: this cmp() hack is for vercmp not using -1,0,1
+            # See bug 266493; this was fixed in portage-2.2_rc31
+            # return vercmp(self.fullversion, other.fullversion)
+            return vercmp(self.fullversion, other.fullversion) < 0
+
+    def __gt__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        return not self <= other
+
+    def __le__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        return self < other or self == other
+
+    def __ge__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        return self > other or self == other
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, str(self))
+
+    def __str__(self):
+        return self.cpv
 
 
 # =========
 # Functions
 # =========
 
+
 def compare_strs(pkg1, pkg2):
-	"""Similar to the builtin cmp, but for package strings. Usually called
-	as: package_list.sort(cpv.compare_strs)
+    """Similar to the builtin cmp, but for package strings. Usually called
+    as: package_list.sort(cpv.compare_strs)
 
-	An alternative is to use the CPV descriptor from gentoolkit.cpv:
-	>>> package_list = ['sys-apps/portage-9999', 'media-video/ffmpeg-9999']
-	>>> cpvs = sorted(CPV(x) for x in package_list)
+    An alternative is to use the CPV descriptor from gentoolkit.cpv:
+    >>> package_list = ['sys-apps/portage-9999', 'media-video/ffmpeg-9999']
+    >>> cpvs = sorted(CPV(x) for x in package_list)
 
-	@see: >>> help(cmp)
-	"""
+    @see: >>> help(cmp)
+    """
 
-	pkg1 = catpkgsplit(pkg1)
-	pkg2 = catpkgsplit(pkg2)
-	if pkg1[0] != pkg2[0]:
-		return -1 if pkg1[0] < pkg2[0] else 1
-	elif pkg1[1] != pkg2[1]:
-		return -1 if pkg1[1] < pkg2[1] else 1
-	else:
-		return pkgcmp(pkg1[1:], pkg2[1:])
+    pkg1 = catpkgsplit(pkg1)
+    pkg2 = catpkgsplit(pkg2)
+    if pkg1[0] != pkg2[0]:
+        return -1 if pkg1[0] < pkg2[0] else 1
+    elif pkg1[1] != pkg2[1]:
+        return -1 if pkg1[1] < pkg2[1] else 1
+    else:
+        return pkgcmp(pkg1[1:], pkg2[1:])
 
 
 def split_cpv(cpv, validate=True):
-	"""Split a cpv into category, name, version and revision.
-
-	Modified from pkgcore.ebuild.cpv
-
-	@type cpv: str
-	@param cpv: pkg, cat/pkg, pkg-ver, cat/pkg-ver
-	@rtype: tuple
-	@return: (category, pkg_name, version, revision)
-		Each tuple element is a string or empty string ("").
-	"""
-
-	category = name = version = revision = ''
-
-	try:
-		category, pkgver = cpv.rsplit("/", 1)
-	except ValueError:
-		pkgver = cpv
-	if validate and category and not isvalid_cat_re.match(category):
-		raise errors.GentoolkitInvalidCPV(cpv)
-	pkg_chunks = pkgver.split("-")
-	lpkg_chunks = len(pkg_chunks)
-	if lpkg_chunks == 1:
-		return (category, pkg_chunks[0], version, revision)
-	if isvalid_rev(pkg_chunks[-1]):
-		if lpkg_chunks < 3:
-			# needs at least ('pkg', 'ver', 'rev')
-			raise errors.GentoolkitInvalidCPV(cpv)
-		rev = pkg_chunks.pop(-1)
-		if rev:
-			revision = rev
-
-	if isvalid_version_re.match(pkg_chunks[-1]):
-		version = pkg_chunks.pop(-1)
-
-	if not isvalid_pkg_name(pkg_chunks):
-		raise errors.GentoolkitInvalidCPV(cpv)
-	name = '-'.join(pkg_chunks)
-
-	return (category, name, version, revision)
+    """Split a cpv into category, name, version and revision.
+
+    Modified from pkgcore.ebuild.cpv
+
+    @type cpv: str
+    @param cpv: pkg, cat/pkg, pkg-ver, cat/pkg-ver
+    @rtype: tuple
+    @return: (category, pkg_name, version, revision)
+            Each tuple element is a string or empty string ("").
+    """
+
+    category = name = version = revision = ""
+
+    try:
+        category, pkgver = cpv.rsplit("/", 1)
+    except ValueError:
+        pkgver = cpv
+    if validate and category and not isvalid_cat_re.match(category):
+        raise errors.GentoolkitInvalidCPV(cpv)
+    pkg_chunks = pkgver.split("-")
+    lpkg_chunks = len(pkg_chunks)
+    if lpkg_chunks == 1:
+        return (category, pkg_chunks[0], version, revision)
+    if isvalid_rev(pkg_chunks[-1]):
+        if lpkg_chunks < 3:
+            # needs at least ('pkg', 'ver', 'rev')
+            raise errors.GentoolkitInvalidCPV(cpv)
+        rev = pkg_chunks.pop(-1)
+        if rev:
+            revision = rev
+
+    if isvalid_version_re.match(pkg_chunks[-1]):
+        version = pkg_chunks.pop(-1)
+
+    if not isvalid_pkg_name(pkg_chunks):
+        raise errors.GentoolkitInvalidCPV(cpv)
+    name = "-".join(pkg_chunks)
+
+    return (category, name, version, revision)
 
 
 def isvalid_pkg_name(chunks):
-	if not chunks[0]:
-		# this means a leading -
-		return False
-	mf = _pkg_re.match
-	if not all(not s or mf(s) for s in chunks):
-		return False
-	if len(chunks) > 1 and chunks[-1].isdigit():
-		# not allowed.
-		return False
-	return True
+    if not chunks[0]:
+        # this means a leading -
+        return False
+    mf = _pkg_re.match
+    if not all(not s or mf(s) for s in chunks):
+        return False
+    if len(chunks) > 1 and chunks[-1].isdigit():
+        # not allowed.
+        return False
+    return True
 
 
 def isvalid_rev(s):
-	return s and s[0] == 'r' and isvalid_rev_re.match(s[1:])
+    return s and s[0] == "r" and isvalid_rev_re.match(s[1:])
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/dbapi.py b/pym/gentoolkit/dbapi.py
index be37f32..9e480f8 100644
--- a/pym/gentoolkit/dbapi.py
+++ b/pym/gentoolkit/dbapi.py
@@ -9,14 +9,16 @@
 	take advantage of them being lazy-loaded.
 """
 
-print("gentoolkit.dbapi is deprecated.\n",
-	"Please migrate to using the assigned calls directly")
+print(
+    "gentoolkit.dbapi is deprecated.\n",
+    "Please migrate to using the assigned calls directly",
+)
 
 import portage
 
 BINDB = portage.db[portage.root]["bintree"].dbapi
 PORTDB = portage.db[portage.root]["porttree"].dbapi
 VARDB = portage.db[portage.root]["vartree"].dbapi
-#virtuals = portage.db[portage.root]["virtuals"]
+# virtuals = portage.db[portage.root]["virtuals"]
 
 # vim: set ts=8 sw=4 tw=79:

diff --git a/pym/gentoolkit/dependencies.py b/pym/gentoolkit/dependencies.py
index 38676a2..f94b82e 100644
--- a/pym/gentoolkit/dependencies.py
+++ b/pym/gentoolkit/dependencies.py
@@ -4,8 +4,8 @@
 
 """Provides a class for easy calculating dependencies for a given CPV."""
 
-__docformat__ = 'epytext'
-__all__ = ('Dependencies',)
+__docformat__ = "epytext"
+__all__ = ("Dependencies",)
 
 # =======
 # Imports
@@ -23,302 +23,314 @@ from gentoolkit.query import Query
 # Classes
 # =======
 
+
 class Dependencies(Query):
-	"""Access a package's dependencies and reverse dependencies.
-
-	Example usage:
-		>>> from gentoolkit.dependencies import Dependencies
-		>>> portage = Dependencies('sys-apps/portage-9999')
-		>>> portage
-		<Dependencies 'sys-apps/portage-9999'>
-		>>> # All methods return gentoolkit.atom.Atom instances
-		... portage.get_depend()
-		... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
-		[<Atom 'python3? =dev-lang/python-3*'>,
-		 <Atom '!python3? >=dev-lang/python-2.7'>, ...]
-
-	"""
-	def __init__(self, query, parser=None):
-		Query.__init__(self, query)
-		self.use = []
-		self.depatom = str()
-
-		# Allow a custom parser function:
-		self.parser = parser if parser else self._parser
-
-	def __eq__(self, other):
-		if self.atom != other.atom:
-			return False
-		else:
-			return True
-
-	def __ne__(self, other):
-		return not self == other
-
-	def __hash__(self):
-		return hash((self.atom, self.depatom, tuple(self.use)))
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.atom)
-
-	def environment(self, envvars):
-		"""Returns predefined env vars DEPEND, SRC_URI, etc."""
-
-		# Try to use the Portage tree first, since emerge only uses the tree
-		# when calculating dependencies
-		try:
-			result = portage.db[portage.root]["porttree"].dbapi.aux_get(self.cpv, envvars)
-		except KeyError:
-			try:
-				result = portage.db[portage.root]["vartree"].dbapi.aux_get(self.cpv, envvars)
-			except KeyError:
-				return []
-		return result
-
-	def _get_depend(self, env_vars, raw=False):
-		raw_depend = ' '.join(self.environment(env_vars))
-		if raw:
-			return raw_depend
-		try:
-			return self.parser(raw_depend)
-		except portage.exception.InvalidPackageName as err:
-			raise errors.GentoolkitInvalidCPV(err)
-
-	def get_depend(self, **kwargs):
-		"""Get the contents of DEPEND and parse it with self.parser."""
-		return self._get_depend(('DEPEND', ), **kwargs)
-
-	def get_pdepend(self, **kwargs):
-		"""Get the contents of PDEPEND and parse it with self.parser."""
-		return self._get_depend(('PDEPEND', ), **kwargs)
-
-	def get_rdepend(self, **kwargs):
-		"""Get the contents of RDEPEND and parse it with self.parser."""
-		return self._get_depend(('RDEPEND', ), **kwargs)
-
-	def get_all_depends(self, **kwargs):
-		"""Get the contents of ?DEPEND and parse it with self.parser."""
-		env_vars = ('DEPEND', 'PDEPEND', 'RDEPEND', 'BDEPEND')
-		return self._get_depend(env_vars, **kwargs)
-
-	def graph_depends(
-		self,
-		max_depth=1,
-		printer_fn=None,
-		# The rest of these are only used internally:
-		depth=1,
-		seen=None,
-		depcache=None,
-		result=None
-	):
-		"""Graph direct dependencies for self.
-
-		Optionally gather indirect dependencies.
-
-		@type max_depth: int
-		@keyword max_depth: Maximum depth to recurse if.
-			<1 means no maximum depth
-			>0 means recurse only this depth;
-		@type printer_fn: callable
-		@keyword printer_fn: If None, no effect. If set, it will be applied to
-			each result.
-		@rtype: list
-		@return: [(depth, pkg), ...]
-		"""
-		if seen is None:
-			seen = set()
-		if depcache is None:
-			depcache = dict()
-		if result is None:
-			result = list()
-
-		pkgdep = None
-		deps = self.get_all_depends()
-		for dep in deps:
-			if dep.atom in depcache:
-				continue
-			try:
-				pkgdep = depcache[dep.atom]
-			except KeyError:
-				pkgdep = Query(dep.atom).find_best()
-				depcache[dep.atom] = pkgdep
-			if not pkgdep:
-				continue
-			elif pkgdep.cpv in seen:
-				continue
-			if depth <= max_depth or max_depth == 0:
-				if printer_fn is not None:
-					printer_fn(depth, pkgdep, dep)
-				result.append((depth,pkgdep))
-
-				seen.add(pkgdep.cpv)
-				if depth < max_depth or max_depth == 0:
-					# result is passed in and added to directly
-					# so rdeps is disposable
-					rdeps = pkgdep.deps.graph_depends(  # noqa
-							max_depth=max_depth,
-							printer_fn=printer_fn,
-							# The rest of these are only used internally:
-							depth=depth+1,
-							seen=seen,
-							depcache=depcache,
-							result=result
-						)
-		return result
-
-	def graph_reverse_depends(
-		self,
-		pkgset=None,
-		max_depth=-1,
-		only_direct=True,
-		printer_fn=None,
-		# The rest of these are only used internally:
-		depth=0,
-		depcache=None,
-		seen=None,
-		result=None
-	):
-		"""Graph direct reverse dependencies for self.
-
-		Example usage:
-			>>> from gentoolkit.dependencies import Dependencies
-			>>> ffmpeg = Dependencies('media-video/ffmpeg-9999')
-			>>> # I only care about installed packages that depend on me:
-			... from gentoolkit.helpers import get_installed_cpvs
-			>>> # I want to pass in a sorted list. We can pass strings or
-			... # Package or Atom types, so I'll use Package to sort:
-			... from gentoolkit.package import Package
-			>>> installed = sorted(get_installed_cpvs())
-			>>> deptree = ffmpeg.graph_reverse_depends(
-			...     only_direct=False,  # Include indirect revdeps
-			...     pkgset=installed)   # from installed pkgset
-			>>> len(deptree)
-			24
-
-		@type pkgset: iterable
-		@keyword pkgset: sorted pkg cpv strings or anything sublassing
-			L{gentoolkit.cpv.CPV} to use for calculate our revdep graph.
-		@type max_depth: int
-		@keyword max_depth: Maximum depth to recurse if only_direct=False.
-			-1 means no maximum depth;
-			 0 is the same as only_direct=True;
-			>0 means recurse only this many times;
-		@type only_direct: bool
-		@keyword only_direct: to recurse or not to recurse
-		@type printer_fn: callable
-		@keyword printer_fn: If None, no effect. If set, it will be applied to
-			each L{gentoolkit.atom.Atom} object as it is added to the results.
-		@rtype: list
-		@return: L{gentoolkit.dependencies.Dependencies} objects
-		"""
-		if not pkgset:
-			err = ("%s kwarg 'pkgset' must be set. "
-				"Can be list of cpv strings or any 'intersectable' object.")
-			raise errors.GentoolkitFatalError(err % (self.__class__.__name__,))
-
-		if depcache is None:
-			depcache = dict()
-		if seen is None:
-			seen = set()
-		if result is None:
-			result = list()
-
-		if depth == 0:
-			pkgset = tuple(Dependencies(x) for x in pkgset)
-
-		pkgdep = None
-		for pkgdep in pkgset:
-			raw_depends = pkgdep.get_all_depends(raw=True)
-			if self.cp not in raw_depends:
-				# fast path for obviously non-matching packages. This saves
-				# us the work of instantiating a whole Atom() for *every*
-				# dependency of *every* package in pkgset.
-				continue
-			try:
-				all_depends = depcache[pkgdep]
-			except KeyError:
-				all_depends = uniqify(pkgdep.get_all_depends())
-				depcache[pkgdep] = all_depends
-
-			dep_is_displayed = False
-			for dep in all_depends:
-				# TODO: Add ability to determine if dep is enabled by USE flag.
-				#       Check portage.dep.use_reduce
-				if dep.intersects(self):
-					pkgdep.depth = depth
-					pkgdep.matching_dep = dep
-					if printer_fn is not None:
-						printer_fn(pkgdep, dep_is_displayed=dep_is_displayed)
-					result.append(pkgdep)
-					dep_is_displayed = True
-
-			# if --indirect specified, call ourselves again with the dep
-			# Do not call if we have already called ourselves.
-			if (
-				dep_is_displayed and not only_direct and
-				pkgdep.cpv not in seen and
-				(depth < max_depth or max_depth == -1)
-			):
-
-				seen.add(pkgdep.cpv)
-				result.append(
-					pkgdep.graph_reverse_depends(
-						pkgset=pkgset,
-						max_depth=max_depth,
-						only_direct=only_direct,
-						printer_fn=printer_fn,
-						depth=depth+1,
-						depcache=depcache,
-						seen=seen,
-						result=result
-					)
-				)
-
-		if depth == 0:
-			return result
-		return pkgdep
-
-	def _parser(self, deps, use_conditional=None, depth=0):
-		"""?DEPEND file parser.
-
-		@rtype: list
-		@return: L{gentoolkit.atom.Atom} objects
-		"""
-		result = []
-
-		if depth == 0:
-			deps = paren_reduce(deps)
-		for tok in deps:
-			if tok == '||':
-				continue
-			if tok[-1] == '?':
-				use_conditional = tok[:-1]
-				continue
-			if isinstance(tok, list):
-				sub_r = self._parser(tok, use_conditional, depth=depth+1)
-				result.extend(sub_r)
-				use_conditional = None
-				continue
-			# FIXME: This is a quick fix for bug #299260.
-			#        A better fix is to not discard blockers in the parser,
-			#        but to check for atom.blocker in whatever equery/depends
-			#        (in this case) and ignore them there.
-			# TODO: Test to see how much a performance impact ignoring
-			#       blockers here rather than checking for atom.blocker has.
-			if tok[0] == '!':
-				# We're not interested in blockers
-				continue
-			# skip it if it's empty
-			if tok and tok != '':
-				atom = Atom(tok)
-				if use_conditional is not None:
-					atom.use_conditional = use_conditional
-				result.append(atom)
-			else:
-				message = "dependencies.py: _parser() found an empty " +\
-					"dep string token for: %s, deps= %s"
-				raise errors.GentoolkitInvalidAtom(message %(self.cpv, deps))
-
-		return result
+    """Access a package's dependencies and reverse dependencies.
+
+    Example usage:
+            >>> from gentoolkit.dependencies import Dependencies
+            >>> portage = Dependencies('sys-apps/portage-9999')
+            >>> portage
+            <Dependencies 'sys-apps/portage-9999'>
+            >>> # All methods return gentoolkit.atom.Atom instances
+            ... portage.get_depend()
+            ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
+            [<Atom 'python3? =dev-lang/python-3*'>,
+             <Atom '!python3? >=dev-lang/python-2.7'>, ...]
+
+    """
+
+    def __init__(self, query, parser=None):
+        Query.__init__(self, query)
+        self.use = []
+        self.depatom = str()
+
+        # Allow a custom parser function:
+        self.parser = parser if parser else self._parser
+
+    def __eq__(self, other):
+        if self.atom != other.atom:
+            return False
+        else:
+            return True
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __hash__(self):
+        return hash((self.atom, self.depatom, tuple(self.use)))
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.atom)
+
+    def environment(self, envvars):
+        """Returns predefined env vars DEPEND, SRC_URI, etc."""
+
+        # Try to use the Portage tree first, since emerge only uses the tree
+        # when calculating dependencies
+        try:
+            result = portage.db[portage.root]["porttree"].dbapi.aux_get(
+                self.cpv, envvars
+            )
+        except KeyError:
+            try:
+                result = portage.db[portage.root]["vartree"].dbapi.aux_get(
+                    self.cpv, envvars
+                )
+            except KeyError:
+                return []
+        return result
+
+    def _get_depend(self, env_vars, raw=False):
+        raw_depend = " ".join(self.environment(env_vars))
+        if raw:
+            return raw_depend
+        try:
+            return self.parser(raw_depend)
+        except portage.exception.InvalidPackageName as err:
+            raise errors.GentoolkitInvalidCPV(err)
+
+    def get_depend(self, **kwargs):
+        """Get the contents of DEPEND and parse it with self.parser."""
+        return self._get_depend(("DEPEND",), **kwargs)
+
+    def get_pdepend(self, **kwargs):
+        """Get the contents of PDEPEND and parse it with self.parser."""
+        return self._get_depend(("PDEPEND",), **kwargs)
+
+    def get_rdepend(self, **kwargs):
+        """Get the contents of RDEPEND and parse it with self.parser."""
+        return self._get_depend(("RDEPEND",), **kwargs)
+
+    def get_all_depends(self, **kwargs):
+        """Get the contents of ?DEPEND and parse it with self.parser."""
+        env_vars = ("DEPEND", "PDEPEND", "RDEPEND", "BDEPEND")
+        return self._get_depend(env_vars, **kwargs)
+
+    def graph_depends(
+        self,
+        max_depth=1,
+        printer_fn=None,
+        # The rest of these are only used internally:
+        depth=1,
+        seen=None,
+        depcache=None,
+        result=None,
+    ):
+        """Graph direct dependencies for self.
+
+        Optionally gather indirect dependencies.
+
+        @type max_depth: int
+        @keyword max_depth: Maximum depth to recurse if.
+                <1 means no maximum depth
+                >0 means recurse only this depth;
+        @type printer_fn: callable
+        @keyword printer_fn: If None, no effect. If set, it will be applied to
+                each result.
+        @rtype: list
+        @return: [(depth, pkg), ...]
+        """
+        if seen is None:
+            seen = set()
+        if depcache is None:
+            depcache = dict()
+        if result is None:
+            result = list()
+
+        pkgdep = None
+        deps = self.get_all_depends()
+        for dep in deps:
+            if dep.atom in depcache:
+                continue
+            try:
+                pkgdep = depcache[dep.atom]
+            except KeyError:
+                pkgdep = Query(dep.atom).find_best()
+                depcache[dep.atom] = pkgdep
+            if not pkgdep:
+                continue
+            elif pkgdep.cpv in seen:
+                continue
+            if depth <= max_depth or max_depth == 0:
+                if printer_fn is not None:
+                    printer_fn(depth, pkgdep, dep)
+                result.append((depth, pkgdep))
+
+                seen.add(pkgdep.cpv)
+                if depth < max_depth or max_depth == 0:
+                    # result is passed in and added to directly
+                    # so rdeps is disposable
+                    rdeps = pkgdep.deps.graph_depends(  # noqa
+                        max_depth=max_depth,
+                        printer_fn=printer_fn,
+                        # The rest of these are only used internally:
+                        depth=depth + 1,
+                        seen=seen,
+                        depcache=depcache,
+                        result=result,
+                    )
+        return result
+
+    def graph_reverse_depends(
+        self,
+        pkgset=None,
+        max_depth=-1,
+        only_direct=True,
+        printer_fn=None,
+        # The rest of these are only used internally:
+        depth=0,
+        depcache=None,
+        seen=None,
+        result=None,
+    ):
+        """Graph direct reverse dependencies for self.
+
+        Example usage:
+                >>> from gentoolkit.dependencies import Dependencies
+                >>> ffmpeg = Dependencies('media-video/ffmpeg-9999')
+                >>> # I only care about installed packages that depend on me:
+                ... from gentoolkit.helpers import get_installed_cpvs
+                >>> # I want to pass in a sorted list. We can pass strings or
+                ... # Package or Atom types, so I'll use Package to sort:
+                ... from gentoolkit.package import Package
+                >>> installed = sorted(get_installed_cpvs())
+                >>> deptree = ffmpeg.graph_reverse_depends(
+                ...     only_direct=False,  # Include indirect revdeps
+                ...     pkgset=installed)   # from installed pkgset
+                >>> len(deptree)
+                24
+
+        @type pkgset: iterable
+        @keyword pkgset: sorted pkg cpv strings or anything sublassing
+                L{gentoolkit.cpv.CPV} to use for calculate our revdep graph.
+        @type max_depth: int
+        @keyword max_depth: Maximum depth to recurse if only_direct=False.
+                -1 means no maximum depth;
+                 0 is the same as only_direct=True;
+                >0 means recurse only this many times;
+        @type only_direct: bool
+        @keyword only_direct: to recurse or not to recurse
+        @type printer_fn: callable
+        @keyword printer_fn: If None, no effect. If set, it will be applied to
+                each L{gentoolkit.atom.Atom} object as it is added to the results.
+        @rtype: list
+        @return: L{gentoolkit.dependencies.Dependencies} objects
+        """
+        if not pkgset:
+            err = (
+                "%s kwarg 'pkgset' must be set. "
+                "Can be list of cpv strings or any 'intersectable' object."
+            )
+            raise errors.GentoolkitFatalError(err % (self.__class__.__name__,))
+
+        if depcache is None:
+            depcache = dict()
+        if seen is None:
+            seen = set()
+        if result is None:
+            result = list()
+
+        if depth == 0:
+            pkgset = tuple(Dependencies(x) for x in pkgset)
+
+        pkgdep = None
+        for pkgdep in pkgset:
+            raw_depends = pkgdep.get_all_depends(raw=True)
+            if self.cp not in raw_depends:
+                # fast path for obviously non-matching packages. This saves
+                # us the work of instantiating a whole Atom() for *every*
+                # dependency of *every* package in pkgset.
+                continue
+            try:
+                all_depends = depcache[pkgdep]
+            except KeyError:
+                all_depends = uniqify(pkgdep.get_all_depends())
+                depcache[pkgdep] = all_depends
+
+            dep_is_displayed = False
+            for dep in all_depends:
+                # TODO: Add ability to determine if dep is enabled by USE flag.
+                #       Check portage.dep.use_reduce
+                if dep.intersects(self):
+                    pkgdep.depth = depth
+                    pkgdep.matching_dep = dep
+                    if printer_fn is not None:
+                        printer_fn(pkgdep, dep_is_displayed=dep_is_displayed)
+                    result.append(pkgdep)
+                    dep_is_displayed = True
+
+            # if --indirect specified, call ourselves again with the dep
+            # Do not call if we have already called ourselves.
+            if (
+                dep_is_displayed
+                and not only_direct
+                and pkgdep.cpv not in seen
+                and (depth < max_depth or max_depth == -1)
+            ):
+
+                seen.add(pkgdep.cpv)
+                result.append(
+                    pkgdep.graph_reverse_depends(
+                        pkgset=pkgset,
+                        max_depth=max_depth,
+                        only_direct=only_direct,
+                        printer_fn=printer_fn,
+                        depth=depth + 1,
+                        depcache=depcache,
+                        seen=seen,
+                        result=result,
+                    )
+                )
+
+        if depth == 0:
+            return result
+        return pkgdep
+
+    def _parser(self, deps, use_conditional=None, depth=0):
+        """?DEPEND file parser.
+
+        @rtype: list
+        @return: L{gentoolkit.atom.Atom} objects
+        """
+        result = []
+
+        if depth == 0:
+            deps = paren_reduce(deps)
+        for tok in deps:
+            if tok == "||":
+                continue
+            if tok[-1] == "?":
+                use_conditional = tok[:-1]
+                continue
+            if isinstance(tok, list):
+                sub_r = self._parser(tok, use_conditional, depth=depth + 1)
+                result.extend(sub_r)
+                use_conditional = None
+                continue
+            # FIXME: This is a quick fix for bug #299260.
+            #        A better fix is to not discard blockers in the parser,
+            #        but to check for atom.blocker in whatever equery/depends
+            #        (in this case) and ignore them there.
+            # TODO: Test to see how much a performance impact ignoring
+            #       blockers here rather than checking for atom.blocker has.
+            if tok[0] == "!":
+                # We're not interested in blockers
+                continue
+            # skip it if it's empty
+            if tok and tok != "":
+                atom = Atom(tok)
+                if use_conditional is not None:
+                    atom.use_conditional = use_conditional
+                result.append(atom)
+            else:
+                message = (
+                    "dependencies.py: _parser() found an empty "
+                    + "dep string token for: %s, deps= %s"
+                )
+                raise errors.GentoolkitInvalidAtom(message % (self.cpv, deps))
+
+        return result
+
 
 # vim: set ts=4 sw=4 tw=0:

diff --git a/pym/gentoolkit/eclean/clean.py b/pym/gentoolkit/eclean/clean.py
index 3f6fe45..a6358a4 100644
--- a/pym/gentoolkit/eclean/clean.py
+++ b/pym/gentoolkit/eclean/clean.py
@@ -12,142 +12,138 @@ from gentoolkit.eclean.pkgindex import PkgIndex
 
 
 class CleanUp:
-	"""Performs all cleaning actions to distfiles or package directories.
-
-	@param controller: a progress output/user interaction controller function
-					   which returns a Boolean to control file deletion
-					   or bypassing/ignoring
-	"""
-
-	def __init__(self, controller):
-		self.controller = controller
-
-	def clean_dist(self, clean_dict):
-		"""Calculate size of each entry for display, prompt user if needed,
-		delete files if approved and return the total size of files that
-		have been deleted.
-
-		@param clean_dict: dictionary of {'display name':[list of files]}
-
-		@rtype: int
-		@return: total size that was cleaned
-		"""
-		file_type = 'file'
-		clean_size = 0
-		# clean all entries one by one; sorting helps reading
-		for key in sorted(clean_dict):
-			clean_size += self._clean_files(clean_dict[key], key, file_type)
-		# return total size of deleted or to delete files
-		return clean_size
-
-	def clean_pkgs(self, clean_dict, pkgdir):
-		"""Calculate size of each entry for display, prompt user if needed,
-		delete files if approved and return the total size of files that
-		have been deleted.
-
-		@param clean_dict:  dictionary of  {'display name':[list of files]}
-		@param metadata: package index of type portage.getbinpkg.PackageIndex()
-		@param pkgdir: path to the package directory to be cleaned
-
-		@rtype: int
-		@return: total size that was cleaned
-		"""
-		file_type = 'binary package'
-		clean_size = 0
-		# clean all entries one by one; sorting helps reading
-		for key in sorted(clean_dict):
-			clean_size += self._clean_files(clean_dict[key], key, file_type)
-
-		#  run 'emaint --fix' here
-		if clean_size:
-			index_control = PkgIndex(self.controller)
-			# emaint is not yet importable so call it
-			# print a blank line here for separation
-			print()
-			clean_size += index_control.call_emaint()
-		# return total size of deleted or to delete files
-		return clean_size
-
-
-	def pretend_clean(self, clean_dict):
-		"""Shortcut function that calculates total space savings
-		for the files in clean_dict.
-
-		@param clean_dict: dictionary of {'display name':[list of files]}
-		@rtype: integer
-		@return: total size that would be cleaned
-		"""
-		file_type = 'file'
-		clean_size = 0
-		# tally all entries one by one; sorting helps reading
-		for key in sorted(clean_dict):
-			key_size = self._get_size(clean_dict[key])
-			self.controller(key_size, key, clean_dict[key], file_type)
-			clean_size += key_size
-		return clean_size
-
-	def _get_size(self, key):
-		"""Determine the total size for an entry (may be several files)."""
-		key_size = 0
-		for file_ in key:
-			#print file_
-			# get total size for an entry (may be several files, and
-			# links don't count
-			# ...get its statinfo
-			try:
-				statinfo = os.stat(file_)
-				if statinfo.st_nlink == 1:
-					key_size += statinfo.st_size
-			except EnvironmentError as er:
-				print( pp.error(
-					"Could not get stat info for:" + file_), file=sys.stderr)
-				print( pp.error("Error: %s" %str(er)), file=sys.stderr)
-		return key_size
-
-	def _clean_files(self, files, key, file_type):
-		"""File removal function."""
-		clean_size = 0
-		for file_ in files:
-			#print file_, type(file_)
-			# ...get its statinfo
-			try:
-				statinfo = os.stat(file_)
-			except EnvironmentError as er:
-				if not os.path.exists(os.readlink(file_)):
-					try:
-						os.remove(file_)
-						print( pp.error(
-							"Removed broken symbolic link " + file_), file=sys.stderr)
-						break
-					except EnvironmentError as er:
-						print( pp.error(
-							"Error deleting broken symbolic link " + file_), file=sys.stderr)
-						print( pp.error("Error: %s" %str(er)), file=sys.stderr)
-						break
-				else:
-					print( pp.error(
-						"Could not get stat info for:" + file_), file=sys.stderr)
-					print( pp.error(
-						"Error: %s" %str(er)), file=sys.stderr)
-			if self.controller(statinfo.st_size, key, file_, file_type):
-				# ... try to delete it.
-				try:
-					os.unlink(file_)
-					# only count size if successfully deleted and not a link
-					if statinfo.st_nlink == 1:
-						clean_size += statinfo.st_size
-						try:
-							os.rmdir(os.path.dirname(file_))
-						except OSError:
-							pass
-				except EnvironmentError as er:
-					print( pp.error("Could not delete "+file_), file=sys.stderr)
-					print( pp.error("Error: %s" %str(er)), file=sys.stderr)
-		return clean_size
-
-
-
-
-
-
-
+    """Performs all cleaning actions to distfiles or package directories.
+
+    @param controller: a progress output/user interaction controller function
+                                       which returns a Boolean to control file deletion
+                                       or bypassing/ignoring
+    """
+
+    def __init__(self, controller):
+        self.controller = controller
+
+    def clean_dist(self, clean_dict):
+        """Calculate size of each entry for display, prompt user if needed,
+        delete files if approved and return the total size of files that
+        have been deleted.
+
+        @param clean_dict: dictionary of {'display name':[list of files]}
+
+        @rtype: int
+        @return: total size that was cleaned
+        """
+        file_type = "file"
+        clean_size = 0
+        # clean all entries one by one; sorting helps reading
+        for key in sorted(clean_dict):
+            clean_size += self._clean_files(clean_dict[key], key, file_type)
+        # return total size of deleted or to delete files
+        return clean_size
+
+    def clean_pkgs(self, clean_dict, pkgdir):
+        """Calculate size of each entry for display, prompt user if needed,
+        delete files if approved and return the total size of files that
+        have been deleted.
+
+        @param clean_dict:  dictionary of  {'display name':[list of files]}
+        @param metadata: package index of type portage.getbinpkg.PackageIndex()
+        @param pkgdir: path to the package directory to be cleaned
+
+        @rtype: int
+        @return: total size that was cleaned
+        """
+        file_type = "binary package"
+        clean_size = 0
+        # clean all entries one by one; sorting helps reading
+        for key in sorted(clean_dict):
+            clean_size += self._clean_files(clean_dict[key], key, file_type)
+
+        #  run 'emaint --fix' here
+        if clean_size:
+            index_control = PkgIndex(self.controller)
+            # emaint is not yet importable so call it
+            # print a blank line here for separation
+            print()
+            clean_size += index_control.call_emaint()
+        # return total size of deleted or to delete files
+        return clean_size
+
+    def pretend_clean(self, clean_dict):
+        """Shortcut function that calculates total space savings
+        for the files in clean_dict.
+
+        @param clean_dict: dictionary of {'display name':[list of files]}
+        @rtype: integer
+        @return: total size that would be cleaned
+        """
+        file_type = "file"
+        clean_size = 0
+        # tally all entries one by one; sorting helps reading
+        for key in sorted(clean_dict):
+            key_size = self._get_size(clean_dict[key])
+            self.controller(key_size, key, clean_dict[key], file_type)
+            clean_size += key_size
+        return clean_size
+
+    def _get_size(self, key):
+        """Determine the total size for an entry (may be several files)."""
+        key_size = 0
+        for file_ in key:
+            # print file_
+            # get total size for an entry (may be several files, and
+            # links don't count
+            # ...get its statinfo
+            try:
+                statinfo = os.stat(file_)
+                if statinfo.st_nlink == 1:
+                    key_size += statinfo.st_size
+            except EnvironmentError as er:
+                print(pp.error("Could not get stat info for:" + file_), file=sys.stderr)
+                print(pp.error("Error: %s" % str(er)), file=sys.stderr)
+        return key_size
+
+    def _clean_files(self, files, key, file_type):
+        """File removal function."""
+        clean_size = 0
+        for file_ in files:
+            # print file_, type(file_)
+            # ...get its statinfo
+            try:
+                statinfo = os.stat(file_)
+            except EnvironmentError as er:
+                if not os.path.exists(os.readlink(file_)):
+                    try:
+                        os.remove(file_)
+                        print(
+                            pp.error("Removed broken symbolic link " + file_),
+                            file=sys.stderr,
+                        )
+                        break
+                    except EnvironmentError as er:
+                        print(
+                            pp.error("Error deleting broken symbolic link " + file_),
+                            file=sys.stderr,
+                        )
+                        print(pp.error("Error: %s" % str(er)), file=sys.stderr)
+                        break
+                else:
+                    print(
+                        pp.error("Could not get stat info for:" + file_),
+                        file=sys.stderr,
+                    )
+                    print(pp.error("Error: %s" % str(er)), file=sys.stderr)
+            if self.controller(statinfo.st_size, key, file_, file_type):
+                # ... try to delete it.
+                try:
+                    os.unlink(file_)
+                    # only count size if successfully deleted and not a link
+                    if statinfo.st_nlink == 1:
+                        clean_size += statinfo.st_size
+                        try:
+                            os.rmdir(os.path.dirname(file_))
+                        except OSError:
+                            pass
+                except EnvironmentError as er:
+                    print(pp.error("Could not delete " + file_), file=sys.stderr)
+                    print(pp.error("Error: %s" % str(er)), file=sys.stderr)
+        return clean_size

diff --git a/pym/gentoolkit/eclean/cli.py b/pym/gentoolkit/eclean/cli.py
index e31fde9..bb6deeb 100644
--- a/pym/gentoolkit/eclean/cli.py
+++ b/pym/gentoolkit/eclean/cli.py
@@ -4,10 +4,11 @@
 # Distributed under the terms of the GNU General Public License v2
 
 
-__author__ = "Thomas de Grenier de Latour (tgl), " + \
-	"modular re-write by: Brian Dolbec (dol-sen)"
-__email__ = "degrenier@easyconnect.fr, " + \
-	"brian.dolbec@gmail.com"
+__author__ = (
+    "Thomas de Grenier de Latour (tgl), "
+    + "modular re-write by: Brian Dolbec (dol-sen)"
+)
+__email__ = "degrenier@easyconnect.fr, " + "brian.dolbec@gmail.com"
 __version__ = "git"
 __productname__ = "eclean"
 __description__ = "A cleaning tool for Gentoo distfiles and binaries."
@@ -23,494 +24,654 @@ import portage
 from portage.output import white, yellow, turquoise, green
 
 import gentoolkit.pprinter as pp
-from gentoolkit.eclean.search import (DistfilesSearch,
-	findPackages, port_settings, pkgdir)
-from gentoolkit.eclean.exclude import (parseExcludeFile,
-	ParseExcludeFileException)
+from gentoolkit.eclean.search import (
+    DistfilesSearch,
+    findPackages,
+    port_settings,
+    pkgdir,
+)
+from gentoolkit.eclean.exclude import parseExcludeFile, ParseExcludeFileException
 from gentoolkit.eclean.clean import CleanUp
 from gentoolkit.eclean.output import OutputControl
-#from gentoolkit.eclean.dbapi import Dbapi
+
+# from gentoolkit.eclean.dbapi import Dbapi
 from gentoolkit.eprefix import EPREFIX
 
+
 def printVersion():
-	"""Output the version info."""
-	print( "%s (%s) - %s" \
-			% (__productname__, __version__, __description__))
-	print()
-	print("Author: %s <%s>" % (__author__,__email__))
-	print("Copyright 2003-2009 Gentoo Foundation")
-	print("Distributed under the terms of the GNU General Public License v2")
+    """Output the version info."""
+    print("%s (%s) - %s" % (__productname__, __version__, __description__))
+    print()
+    print("Author: %s <%s>" % (__author__, __email__))
+    print("Copyright 2003-2009 Gentoo Foundation")
+    print("Distributed under the terms of the GNU General Public License v2")
 
 
 def printUsage(_error=None, help=None):
-	"""Print help message. May also print partial help to stderr if an
-	error from {'options','actions'} is specified."""
-
-	out = sys.stdout
-	if _error:
-		out = sys.stderr
-	if not _error in ('actions', 'global-options', \
-			'packages-options', 'distfiles-options', \
-			'merged-packages-options', 'merged-distfiles-options', \
-			'time', 'size'):
-		_error = None
-	if not _error and not help: help = 'all'
-	if _error == 'time':
-		print( pp.error("Wrong time specification"), file=out)
-		print( "Time specification should be an integer followed by a"+
-				" single letter unit.", file=out)
-		print( "Available units are: y (years), m (months), w (weeks), "+
-				"d (days) and h (hours).", file=out)
-		print( "For instance: \"1y\" is \"one year\", \"2w\" is \"two"+
-				" weeks\", etc. ", file=out)
-		return
-	if _error == 'size':
-		print( pp.error("Wrong size specification"), file=out)
-		print( "Size specification should be an integer followed by a"+
-				" single letter unit.", file=out)
-		print( "Available units are: G, M, K and B.", file=out)
-		print("For instance: \"10M\" is \"ten megabytes\", \"200K\" "+
-				"is \"two hundreds kilobytes\", etc.", file=out)
-		return
-	if _error in ('global-options', 'packages-options', 'distfiles-options', \
-			'merged-packages-options', 'merged-distfiles-options',):
-		print( pp.error("Wrong option on command line."), file=out)
-		print( file=out)
-	elif _error == 'actions':
-		print( pp.error("Wrong or missing action name on command line."), file=out)
-		print( file=out)
-	print( white("Usage:"), file=out)
-	if _error in ('actions','global-options', 'packages-options', \
-			'distfiles-options') or help == 'all':
-		print( " "+turquoise(__productname__),
-			yellow("[global-option] ..."),
-			green("<action>"),
-			yellow("[action-option] ..."), file=out)
-	if _error == 'merged-distfiles-options' or help in ('all','distfiles'):
-		print( " "+turquoise(__productname__+'-dist'),
-			yellow("[global-option, distfiles-option] ..."), file=out)
-	if _error == 'merged-packages-options' or help in ('all','packages'):
-		print( " "+turquoise(__productname__+'-pkg'),
-			yellow("[global-option, packages-option] ..."), file=out)
-	if _error in ('global-options', 'actions'):
-		print( " "+turquoise(__productname__),
-			yellow("[--help, --version]"), file=out)
-	if help == 'all':
-		print( " "+turquoise(__productname__+"(-dist,-pkg)"),
-			yellow("[--help, --version]"), file=out)
-	if _error == 'merged-packages-options' or help == 'packages':
-		print( " "+turquoise(__productname__+'-pkg'),
-			yellow("[--help, --version]"), file=out)
-	if _error == 'merged-distfiles-options' or help == 'distfiles':
-		print( " "+turquoise(__productname__+'-dist'),
-			yellow("[--help, --version]"), file=out)
-	print(file=out)
-	if _error in ('global-options', 'merged-packages-options', \
-	'merged-distfiles-options') or help:
-		print( "Available global", yellow("options")+":", file=out)
-		print( yellow(" -C, --nocolor")+
-			"             - turn off colors on output", file=out)
-		print( yellow(" -d, --deep")+
-			"                - only keep the minimum for a reinstallation", file=out)
-		print( yellow(" -e, --exclude-file=<path>")+
-			" - path to the exclusion file", file=out)
-		print( yellow(" -i, --interactive")+
-			"         - ask confirmation before deletions", file=out)
-		print( yellow(" -n, --package-names")+
-			"       - protect all versions (when --deep)", file=out)
-		print( yellow(" -p, --pretend")+
-			"             - only display what would be cleaned", file=out)
-		print( yellow(" -q, --quiet")+
-			"               - be as quiet as possible", file=out)
-		print( yellow(" -t, --time-limit=<time>")+
-			"   - don't delete files modified since "+yellow("<time>"), file=out)
-		print( "   "+yellow("<time>"), "is a duration: \"1y\" is"+
-				" \"one year\", \"2w\" is \"two weeks\", etc. ", file=out)
-		print( "   "+"Units are: y (years), m (months), w (weeks), "+
-				"d (days) and h (hours).", file=out)
-		print( yellow(" -h, --help")+ \
-			"                - display the help screen", file=out)
-		print( yellow(" -V, --version")+
-			"             - display version info", file=out)
-		print( file=out)
-	if _error == 'actions' or help == 'all':
-		print( "Available", green("actions")+":", file=out)
-		print( green(" packages")+
-			"     - clean outdated binary packages from PKGDIR", file=out)
-		print( green(" distfiles")+
-			"    - clean outdated packages sources files from DISTDIR", file=out)
-		print( file=out)
-	if _error in ('packages-options','merged-packages-options') \
-	or help in ('all','packages'):
-		print( "Available", yellow("options"),"for the",
-				green("packages"),"action:", file=out)
-		print( yellow("     --changed-deps")+
-			"               - delete packages for which ebuild dependencies have changed", file=out)
-		print( yellow(" -i, --ignore-failure")+
-			"             - ignore failure to locate PKGDIR", file=out)
-		print( file=out)
-	if _error in ('distfiles-options', 'merged-distfiles-options') \
-	or help in ('all','distfiles'):
-		print("Available", yellow("options"),"for the",
-				green("distfiles"),"action:", file=out)
-		print( yellow(" -f, --fetch-restricted")+
-			"   - protect fetch-restricted files (when --deep)", file=out)
-		print( yellow(" -s, --size-limit=<size>")+
-			"  - don't delete distfiles bigger than "+yellow("<size>"), file=out)
-		print( "   "+yellow("<size>"), "is a size specification: "+
-				"\"10M\" is \"ten megabytes\", \"200K\" is", file=out)
-		print( "   "+"\"two hundreds kilobytes\", etc.  Units are: "+
-				"G, M, K and B.", file=out)
-		print( file=out)
-	print( "More detailed instruction can be found in",
-			turquoise("`man %s`" % __productname__), file=out)
+    """Print help message. May also print partial help to stderr if an
+    error from {'options','actions'} is specified."""
+
+    out = sys.stdout
+    if _error:
+        out = sys.stderr
+    if not _error in (
+        "actions",
+        "global-options",
+        "packages-options",
+        "distfiles-options",
+        "merged-packages-options",
+        "merged-distfiles-options",
+        "time",
+        "size",
+    ):
+        _error = None
+    if not _error and not help:
+        help = "all"
+    if _error == "time":
+        print(pp.error("Wrong time specification"), file=out)
+        print(
+            "Time specification should be an integer followed by a"
+            + " single letter unit.",
+            file=out,
+        )
+        print(
+            "Available units are: y (years), m (months), w (weeks), "
+            + "d (days) and h (hours).",
+            file=out,
+        )
+        print(
+            'For instance: "1y" is "one year", "2w" is "two' + ' weeks", etc. ',
+            file=out,
+        )
+        return
+    if _error == "size":
+        print(pp.error("Wrong size specification"), file=out)
+        print(
+            "Size specification should be an integer followed by a"
+            + " single letter unit.",
+            file=out,
+        )
+        print("Available units are: G, M, K and B.", file=out)
+        print(
+            'For instance: "10M" is "ten megabytes", "200K" '
+            + 'is "two hundreds kilobytes", etc.',
+            file=out,
+        )
+        return
+    if _error in (
+        "global-options",
+        "packages-options",
+        "distfiles-options",
+        "merged-packages-options",
+        "merged-distfiles-options",
+    ):
+        print(pp.error("Wrong option on command line."), file=out)
+        print(file=out)
+    elif _error == "actions":
+        print(pp.error("Wrong or missing action name on command line."), file=out)
+        print(file=out)
+    print(white("Usage:"), file=out)
+    if (
+        _error in ("actions", "global-options", "packages-options", "distfiles-options")
+        or help == "all"
+    ):
+        print(
+            " " + turquoise(__productname__),
+            yellow("[global-option] ..."),
+            green("<action>"),
+            yellow("[action-option] ..."),
+            file=out,
+        )
+    if _error == "merged-distfiles-options" or help in ("all", "distfiles"):
+        print(
+            " " + turquoise(__productname__ + "-dist"),
+            yellow("[global-option, distfiles-option] ..."),
+            file=out,
+        )
+    if _error == "merged-packages-options" or help in ("all", "packages"):
+        print(
+            " " + turquoise(__productname__ + "-pkg"),
+            yellow("[global-option, packages-option] ..."),
+            file=out,
+        )
+    if _error in ("global-options", "actions"):
+        print(" " + turquoise(__productname__), yellow("[--help, --version]"), file=out)
+    if help == "all":
+        print(
+            " " + turquoise(__productname__ + "(-dist,-pkg)"),
+            yellow("[--help, --version]"),
+            file=out,
+        )
+    if _error == "merged-packages-options" or help == "packages":
+        print(
+            " " + turquoise(__productname__ + "-pkg"),
+            yellow("[--help, --version]"),
+            file=out,
+        )
+    if _error == "merged-distfiles-options" or help == "distfiles":
+        print(
+            " " + turquoise(__productname__ + "-dist"),
+            yellow("[--help, --version]"),
+            file=out,
+        )
+    print(file=out)
+    if (
+        _error
+        in ("global-options", "merged-packages-options", "merged-distfiles-options")
+        or help
+    ):
+        print("Available global", yellow("options") + ":", file=out)
+        print(
+            yellow(" -C, --nocolor") + "             - turn off colors on output",
+            file=out,
+        )
+        print(
+            yellow(" -d, --deep")
+            + "                - only keep the minimum for a reinstallation",
+            file=out,
+        )
+        print(
+            yellow(" -e, --exclude-file=<path>") + " - path to the exclusion file",
+            file=out,
+        )
+        print(
+            yellow(" -i, --interactive")
+            + "         - ask confirmation before deletions",
+            file=out,
+        )
+        print(
+            yellow(" -n, --package-names")
+            + "       - protect all versions (when --deep)",
+            file=out,
+        )
+        print(
+            yellow(" -p, --pretend")
+            + "             - only display what would be cleaned",
+            file=out,
+        )
+        print(
+            yellow(" -q, --quiet") + "               - be as quiet as possible",
+            file=out,
+        )
+        print(
+            yellow(" -t, --time-limit=<time>")
+            + "   - don't delete files modified since "
+            + yellow("<time>"),
+            file=out,
+        )
+        print(
+            "   " + yellow("<time>"),
+            'is a duration: "1y" is' + ' "one year", "2w" is "two weeks", etc. ',
+            file=out,
+        )
+        print(
+            "   "
+            + "Units are: y (years), m (months), w (weeks), "
+            + "d (days) and h (hours).",
+            file=out,
+        )
+        print(
+            yellow(" -h, --help") + "                - display the help screen",
+            file=out,
+        )
+        print(
+            yellow(" -V, --version") + "             - display version info", file=out
+        )
+        print(file=out)
+    if _error == "actions" or help == "all":
+        print("Available", green("actions") + ":", file=out)
+        print(
+            green(" packages") + "     - clean outdated binary packages from PKGDIR",
+            file=out,
+        )
+        print(
+            green(" distfiles")
+            + "    - clean outdated packages sources files from DISTDIR",
+            file=out,
+        )
+        print(file=out)
+    if _error in ("packages-options", "merged-packages-options") or help in (
+        "all",
+        "packages",
+    ):
+        print(
+            "Available",
+            yellow("options"),
+            "for the",
+            green("packages"),
+            "action:",
+            file=out,
+        )
+        print(
+            yellow("     --changed-deps")
+            + "               - delete packages for which ebuild dependencies have changed",
+            file=out,
+        )
+        print(
+            yellow(" -i, --ignore-failure")
+            + "             - ignore failure to locate PKGDIR",
+            file=out,
+        )
+        print(file=out)
+    if _error in ("distfiles-options", "merged-distfiles-options") or help in (
+        "all",
+        "distfiles",
+    ):
+        print(
+            "Available",
+            yellow("options"),
+            "for the",
+            green("distfiles"),
+            "action:",
+            file=out,
+        )
+        print(
+            yellow(" -f, --fetch-restricted")
+            + "   - protect fetch-restricted files (when --deep)",
+            file=out,
+        )
+        print(
+            yellow(" -s, --size-limit=<size>")
+            + "  - don't delete distfiles bigger than "
+            + yellow("<size>"),
+            file=out,
+        )
+        print(
+            "   " + yellow("<size>"),
+            "is a size specification: " + '"10M" is "ten megabytes", "200K" is',
+            file=out,
+        )
+        print(
+            "   " + '"two hundreds kilobytes", etc.  Units are: ' + "G, M, K and B.",
+            file=out,
+        )
+        print(file=out)
+    print(
+        "More detailed instruction can be found in",
+        turquoise("`man %s`" % __productname__),
+        file=out,
+    )
 
 
 class ParseArgsException(Exception):
-	"""For parseArgs() -> main() communications."""
-	def __init__(self, value):
-		self.value = value # sdfgsdfsdfsd
-	def __str__(self):
-		return repr(self.value)
+    """For parseArgs() -> main() communications."""
+
+    def __init__(self, value):
+        self.value = value  # sdfgsdfsdfsd
+
+    def __str__(self):
+        return repr(self.value)
 
 
 def parseSize(size):
-	"""Convert a file size "Xu" ("X" is an integer, and "u" in
-	[G,M,K,B]) into an integer (file size in Bytes).
-
-	@raise ParseArgsException: in case of failure
-	"""
-	units = {
-		'G': (1024**3),
-		'M': (1024**2),
-		'K': 1024,
-		'B': 1
-	}
-	try:
-		match = re.match(r"^(?P<value>\d+)(?P<unit>[GMKBgmkb])?$",size)
-		size = int(match.group('value'))
-		if match.group('unit'):
-			size *= units[match.group('unit').capitalize()]
-	except:
-		raise ParseArgsException('size')
-	return size
+    """Convert a file size "Xu" ("X" is an integer, and "u" in
+    [G,M,K,B]) into an integer (file size in Bytes).
+
+    @raise ParseArgsException: in case of failure
+    """
+    units = {"G": (1024 ** 3), "M": (1024 ** 2), "K": 1024, "B": 1}
+    try:
+        match = re.match(r"^(?P<value>\d+)(?P<unit>[GMKBgmkb])?$", size)
+        size = int(match.group("value"))
+        if match.group("unit"):
+            size *= units[match.group("unit").capitalize()]
+    except:
+        raise ParseArgsException("size")
+    return size
 
 
 def parseTime(timespec):
-	"""Convert a duration "Xu" ("X" is an int, and "u" a time unit in
-	[Y,M,W,D,H]) into an integer which is a past EPOCH date.
-	Raises ParseArgsException('time') in case of failure.
-	(yep, big approximations inside... who cares?).
-	"""
-	units = {'H' : (60 * 60)}
-	units['D'] = units['H'] * 24
-	units['W'] = units['D'] * 7
-	units['M'] = units['D'] * 30
-	units['Y'] = units['D'] * 365
-	try:
-		# parse the time specification
-		match = re.match(r"^(?P<value>\d+)(?P<unit>[YMWDHymwdh])?$",timespec)
-		value = int(match.group('value'))
-		if not match.group('unit'): unit = 'D'
-		else: unit = match.group('unit').capitalize()
-	except:
-		raise ParseArgsException('time')
-	return time.time() - (value * units[unit])
+    """Convert a duration "Xu" ("X" is an int, and "u" a time unit in
+    [Y,M,W,D,H]) into an integer which is a past EPOCH date.
+    Raises ParseArgsException('time') in case of failure.
+    (yep, big approximations inside... who cares?).
+    """
+    units = {"H": (60 * 60)}
+    units["D"] = units["H"] * 24
+    units["W"] = units["D"] * 7
+    units["M"] = units["D"] * 30
+    units["Y"] = units["D"] * 365
+    try:
+        # parse the time specification
+        match = re.match(r"^(?P<value>\d+)(?P<unit>[YMWDHymwdh])?$", timespec)
+        value = int(match.group("value"))
+        if not match.group("unit"):
+            unit = "D"
+        else:
+            unit = match.group("unit").capitalize()
+    except:
+        raise ParseArgsException("time")
+    return time.time() - (value * units[unit])
 
 
 def parseArgs(options={}):
-	"""Parse the command line arguments. Raise exceptions on
-	errors or non-action modes (help/version). Returns an action, and affect
-	the options dict.
-	"""
-
-	def optionSwitch(option,opts,action=None):
-		"""local function for interpreting command line options
-		and setting options accordingly"""
-		return_code = True
-		do_help = False
-		for o, a in opts:
-			if o in ("-h", "--help"):
-				do_help = True
-			elif o in ("-V", "--version"):
-				raise ParseArgsException('version')
-			elif o in ("-C", "--nocolor"):
-				options['nocolor'] = True
-				pp.output.nocolor()
-			elif o in ("-d", "--deep", "--destructive"):
-				options['destructive'] = True
-			elif o in ("-D", "--deprecated"):
-				options['deprecated'] = True
-			elif o in ("-i", "--interactive") and not options['pretend']:
-				options['interactive'] = True
-			elif o in ("-p", "--pretend"):
-				options['pretend'] = True
-				options['interactive'] = False
-			elif o in ("-q", "--quiet"):
-				options['quiet'] = True
-				options['verbose'] = False
-			elif o in ("-t", "--time-limit"):
-				options['time-limit'] = parseTime(a)
-			elif o in ("-e", "--exclude-file"):
-				print("cli --exclude option")
-				options['exclude-file'] = a
-			elif o in ("-n", "--package-names"):
-				options['package-names'] = True
-			elif o in ("-f", "--fetch-restricted"):
-				options['fetch-restricted'] = True
-			elif o in ("-s", "--size-limit"):
-				options['size-limit'] = parseSize(a)
-			elif o in ("-v", "--verbose") and not options['quiet']:
-					options['verbose'] = True
-			elif o in ("--changed-deps"):
-				options['changed-deps'] = True
-			elif o in ("-i", "--ignore-failure"):
-				options['ignore-failure'] = True
-			else:
-				return_code = False
-		# sanity check of --deep only options:
-		for opt in ('fetch-restricted', 'package-names'):
-			if (not options['destructive']) and options[opt]:
-				if not options['quiet']:
-					print( pp.error(
-						"--%s only makes sense in --deep mode." % opt), file=sys.stderr)
-				options[opt] = False
-		if do_help:
-			if action:
-				raise ParseArgsException('help-'+action)
-			else:
-				raise ParseArgsException('help')
-		return return_code
-
-	# here are the different allowed command line options (getopt args)
-	getopt_options = {'short':{}, 'long':{}}
-	getopt_options['short']['global'] = "CdDipqe:t:nhVv"
-	getopt_options['long']['global'] = ["nocolor", "deep", "destructive",
-		"deprecated", "interactive", "pretend", "quiet", "exclude-file=",
-		"time-limit=", "package-names", "help", "version",  "verbose"]
-	getopt_options['short']['distfiles'] = "fs:"
-	getopt_options['long']['distfiles'] = ["fetch-restricted", "size-limit="]
-	getopt_options['short']['packages'] = "i"
-	getopt_options['long']['packages'] = ["ignore-failure", "changed-deps"]
-	# set default options, except 'nocolor', which is set in main()
-	options['interactive'] = False
-	options['pretend'] = False
-	options['quiet'] = False
-	options['accept_all'] = False
-	options['destructive'] = False
-	options['deprecated'] = False
-	options['time-limit'] = 0
-	options['package-names'] = False
-	options['fetch-restricted'] = False
-	options['size-limit'] = 0
-	options['verbose'] = False
-	options['changed-deps'] = False
-	options['ignore-failure'] = False
-	# if called by a well-named symlink, set the action accordingly:
-	action = None
-	# temp print line to ensure it is the svn/branch code running, etc..
-	#print(  "###### svn/branch/gentoolkit_eclean ####### ==> ", os.path.basename(sys.argv[0]))
-	if os.path.basename(sys.argv[0]).startswith(__productname__+'-pkg') or \
-		os.path.basename(sys.argv[0]).startswith(__productname__+'-packages'):
-			action = 'packages'
-	elif os.path.basename(sys.argv[0]).startswith(__productname__+'-dist') or \
-		os.path.basename(sys.argv[0]).startswith(__productname__+'distfiles'):
-			action = 'distfiles'
-	# prepare for the first getopt
-	if action:
-		short_opts = getopt_options['short']['global'] \
-			+ getopt_options['short'][action]
-		long_opts = getopt_options['long']['global'] \
-			+ getopt_options['long'][action]
-		opts_mode = 'merged-'+action
-	else:
-		short_opts = getopt_options['short']['global']
-		long_opts = getopt_options['long']['global']
-		opts_mode = 'global'
-	# apply getopts to command line, show partial help on failure
-	try:
-		opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
-	except:
-		raise ParseArgsException(opts_mode+'-options')
-	# set options accordingly
-	optionSwitch(options,opts,action=action)
-	# if action was already set, there should be no more args
-	if action and len(args):
-		raise ParseArgsException(opts_mode+'-options')
-	# if action was set, there is nothing left to do
-	if action:
-		return action
-	# So, we are in "eclean --foo action --bar" mode. Parse remaining args...
-	# Only two actions are allowed: 'packages' and 'distfiles'.
-	if not len(args) or not args[0] in ('packages','distfiles'):
-		raise ParseArgsException('actions')
-	action = args.pop(0)
-	# parse the action specific options
-	try:
-		opts, args = getopt.getopt(args, \
-			getopt_options['short'][action], \
-			getopt_options['long'][action])
-	except:
-		raise ParseArgsException(action+'-options')
-	# set options again, for action-specific options
-	optionSwitch(options,opts,action=action)
-	# any remaning args? Then die!
-	if len(args):
-		raise ParseArgsException(action+'-options')
-	# returns the action. Options dictionary is modified by side-effect.
-	return action
-
-
-def doAction(action,options,exclude={}, output=None):
-	"""doAction: execute one action, ie display a few message, call the right
-	find* function, and then call doCleanup with its result."""
-	# define vocabulary for the output
-	if action == 'packages':
-		files_type = "binary packages"
-	else:
-		files_type = "distfiles"
-	saved = {}
-	deprecated = {}
-	# find files to delete, depending on the action
-	if not options['quiet']:
-		output.einfo("Building file list for "+action+" cleaning...")
-	if action == 'packages':
-		clean_me = findPackages(
-			options,
-			exclude=exclude,
-			destructive=options['destructive'],
-			package_names=options['package-names'],
-			time_limit=options['time-limit'],
-			pkgdir=pkgdir,
-			#port_dbapi=Dbapi(portage.db[portage.root]["porttree"].dbapi),
-			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
-		)
-	else:
-		# accept defaults
-		engine = DistfilesSearch(output=options['verbose-output'],
-			#portdb=Dbapi(portage.db[portage.root]["porttree"].dbapi),
-			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
-		)
-		clean_me, saved, deprecated = engine.findDistfiles(
-			exclude=exclude,
-			destructive=options['destructive'],
-			fetch_restricted=options['fetch-restricted'],
-			package_names=options['package-names'],
-			time_limit=options['time-limit'],
-			size_limit=options['size-limit'],
-			deprecate = options['deprecated']
-		)
-
-	# initialize our cleaner
-	cleaner = CleanUp(output.progress_controller)
-
-	# actually clean files if something was found
-	if clean_me:
-		# verbose pretend message
-		if options['pretend'] and not options['quiet']:
-			output.einfo("Here are the "+files_type+" that would be deleted:")
-		# verbose non-pretend message
-		elif not options['quiet']:
-			output.einfo("Cleaning " + files_type  +"...")
-		# do the cleanup, and get size of deleted files
-		if  options['pretend']:
-			clean_size = cleaner.pretend_clean(clean_me)
-		elif action in ['distfiles']:
-			clean_size = cleaner.clean_dist(clean_me)
-		elif action in ['packages']:
-			clean_size = cleaner.clean_pkgs(clean_me,
-				pkgdir)
-		# vocabulary for final message
-		if options['pretend']:
-			verb = "would be"
-		else:
-			verb = "were"
-		# display freed space
-		if not options['quiet']:
-			output.total('normal', clean_size, len(clean_me), verb, action)
-	# nothing was found
-	elif not options['quiet']:
-		output.einfo("Your "+action+" directory was already clean.")
-	if saved and not options['quiet']:
-		print()
-		print( (pp.emph("   The following ") + yellow("unavailable") +
-			pp.emph(" files were saved from cleaning due to exclusion file entries")))
-		output.set_colors('deprecated')
-		clean_size = cleaner.pretend_clean(saved)
-		output.total('deprecated', clean_size, len(saved), verb, action)
-	if deprecated and not options['quiet']:
-		print()
-		print( (pp.emph("   The following ") + yellow("unavailable") +
-			pp.emph(" installed packages were found")))
-		output.set_colors('deprecated')
-		output.list_pkgs(deprecated)
+    """Parse the command line arguments. Raise exceptions on
+    errors or non-action modes (help/version). Returns an action, and affect
+    the options dict.
+    """
+
+    def optionSwitch(option, opts, action=None):
+        """local function for interpreting command line options
+        and setting options accordingly"""
+        return_code = True
+        do_help = False
+        for o, a in opts:
+            if o in ("-h", "--help"):
+                do_help = True
+            elif o in ("-V", "--version"):
+                raise ParseArgsException("version")
+            elif o in ("-C", "--nocolor"):
+                options["nocolor"] = True
+                pp.output.nocolor()
+            elif o in ("-d", "--deep", "--destructive"):
+                options["destructive"] = True
+            elif o in ("-D", "--deprecated"):
+                options["deprecated"] = True
+            elif o in ("-i", "--interactive") and not options["pretend"]:
+                options["interactive"] = True
+            elif o in ("-p", "--pretend"):
+                options["pretend"] = True
+                options["interactive"] = False
+            elif o in ("-q", "--quiet"):
+                options["quiet"] = True
+                options["verbose"] = False
+            elif o in ("-t", "--time-limit"):
+                options["time-limit"] = parseTime(a)
+            elif o in ("-e", "--exclude-file"):
+                print("cli --exclude option")
+                options["exclude-file"] = a
+            elif o in ("-n", "--package-names"):
+                options["package-names"] = True
+            elif o in ("-f", "--fetch-restricted"):
+                options["fetch-restricted"] = True
+            elif o in ("-s", "--size-limit"):
+                options["size-limit"] = parseSize(a)
+            elif o in ("-v", "--verbose") and not options["quiet"]:
+                options["verbose"] = True
+            elif o in ("--changed-deps"):
+                options["changed-deps"] = True
+            elif o in ("-i", "--ignore-failure"):
+                options["ignore-failure"] = True
+            else:
+                return_code = False
+        # sanity check of --deep only options:
+        for opt in ("fetch-restricted", "package-names"):
+            if (not options["destructive"]) and options[opt]:
+                if not options["quiet"]:
+                    print(
+                        pp.error("--%s only makes sense in --deep mode." % opt),
+                        file=sys.stderr,
+                    )
+                options[opt] = False
+        if do_help:
+            if action:
+                raise ParseArgsException("help-" + action)
+            else:
+                raise ParseArgsException("help")
+        return return_code
+
+    # here are the different allowed command line options (getopt args)
+    getopt_options = {"short": {}, "long": {}}
+    getopt_options["short"]["global"] = "CdDipqe:t:nhVv"
+    getopt_options["long"]["global"] = [
+        "nocolor",
+        "deep",
+        "destructive",
+        "deprecated",
+        "interactive",
+        "pretend",
+        "quiet",
+        "exclude-file=",
+        "time-limit=",
+        "package-names",
+        "help",
+        "version",
+        "verbose",
+    ]
+    getopt_options["short"]["distfiles"] = "fs:"
+    getopt_options["long"]["distfiles"] = ["fetch-restricted", "size-limit="]
+    getopt_options["short"]["packages"] = "i"
+    getopt_options["long"]["packages"] = ["ignore-failure", "changed-deps"]
+    # set default options, except 'nocolor', which is set in main()
+    options["interactive"] = False
+    options["pretend"] = False
+    options["quiet"] = False
+    options["accept_all"] = False
+    options["destructive"] = False
+    options["deprecated"] = False
+    options["time-limit"] = 0
+    options["package-names"] = False
+    options["fetch-restricted"] = False
+    options["size-limit"] = 0
+    options["verbose"] = False
+    options["changed-deps"] = False
+    options["ignore-failure"] = False
+    # if called by a well-named symlink, set the action accordingly:
+    action = None
+    # temp print line to ensure it is the svn/branch code running, etc..
+    # print(  "###### svn/branch/gentoolkit_eclean ####### ==> ", os.path.basename(sys.argv[0]))
+    if os.path.basename(sys.argv[0]).startswith(
+        __productname__ + "-pkg"
+    ) or os.path.basename(sys.argv[0]).startswith(__productname__ + "-packages"):
+        action = "packages"
+    elif os.path.basename(sys.argv[0]).startswith(
+        __productname__ + "-dist"
+    ) or os.path.basename(sys.argv[0]).startswith(__productname__ + "distfiles"):
+        action = "distfiles"
+    # prepare for the first getopt
+    if action:
+        short_opts = getopt_options["short"]["global"] + getopt_options["short"][action]
+        long_opts = getopt_options["long"]["global"] + getopt_options["long"][action]
+        opts_mode = "merged-" + action
+    else:
+        short_opts = getopt_options["short"]["global"]
+        long_opts = getopt_options["long"]["global"]
+        opts_mode = "global"
+    # apply getopts to command line, show partial help on failure
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], short_opts, long_opts)
+    except:
+        raise ParseArgsException(opts_mode + "-options")
+    # set options accordingly
+    optionSwitch(options, opts, action=action)
+    # if action was already set, there should be no more args
+    if action and len(args):
+        raise ParseArgsException(opts_mode + "-options")
+    # if action was set, there is nothing left to do
+    if action:
+        return action
+    # So, we are in "eclean --foo action --bar" mode. Parse remaining args...
+    # Only two actions are allowed: 'packages' and 'distfiles'.
+    if not len(args) or not args[0] in ("packages", "distfiles"):
+        raise ParseArgsException("actions")
+    action = args.pop(0)
+    # parse the action specific options
+    try:
+        opts, args = getopt.getopt(
+            args, getopt_options["short"][action], getopt_options["long"][action]
+        )
+    except:
+        raise ParseArgsException(action + "-options")
+    # set options again, for action-specific options
+    optionSwitch(options, opts, action=action)
+    # any remaning args? Then die!
+    if len(args):
+        raise ParseArgsException(action + "-options")
+    # returns the action. Options dictionary is modified by side-effect.
+    return action
+
+
+def doAction(action, options, exclude={}, output=None):
+    """doAction: execute one action, ie display a few message, call the right
+    find* function, and then call doCleanup with its result."""
+    # define vocabulary for the output
+    if action == "packages":
+        files_type = "binary packages"
+    else:
+        files_type = "distfiles"
+    saved = {}
+    deprecated = {}
+    # find files to delete, depending on the action
+    if not options["quiet"]:
+        output.einfo("Building file list for " + action + " cleaning...")
+    if action == "packages":
+        clean_me = findPackages(
+            options,
+            exclude=exclude,
+            destructive=options["destructive"],
+            package_names=options["package-names"],
+            time_limit=options["time-limit"],
+            pkgdir=pkgdir,
+            # port_dbapi=Dbapi(portage.db[portage.root]["porttree"].dbapi),
+            # var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
+        )
+    else:
+        # accept defaults
+        engine = DistfilesSearch(
+            output=options["verbose-output"],
+            # portdb=Dbapi(portage.db[portage.root]["porttree"].dbapi),
+            # var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
+        )
+        clean_me, saved, deprecated = engine.findDistfiles(
+            exclude=exclude,
+            destructive=options["destructive"],
+            fetch_restricted=options["fetch-restricted"],
+            package_names=options["package-names"],
+            time_limit=options["time-limit"],
+            size_limit=options["size-limit"],
+            deprecate=options["deprecated"],
+        )
+
+    # initialize our cleaner
+    cleaner = CleanUp(output.progress_controller)
+
+    # actually clean files if something was found
+    if clean_me:
+        # verbose pretend message
+        if options["pretend"] and not options["quiet"]:
+            output.einfo("Here are the " + files_type + " that would be deleted:")
+        # verbose non-pretend message
+        elif not options["quiet"]:
+            output.einfo("Cleaning " + files_type + "...")
+        # do the cleanup, and get size of deleted files
+        if options["pretend"]:
+            clean_size = cleaner.pretend_clean(clean_me)
+        elif action in ["distfiles"]:
+            clean_size = cleaner.clean_dist(clean_me)
+        elif action in ["packages"]:
+            clean_size = cleaner.clean_pkgs(clean_me, pkgdir)
+        # vocabulary for final message
+        if options["pretend"]:
+            verb = "would be"
+        else:
+            verb = "were"
+        # display freed space
+        if not options["quiet"]:
+            output.total("normal", clean_size, len(clean_me), verb, action)
+    # nothing was found
+    elif not options["quiet"]:
+        output.einfo("Your " + action + " directory was already clean.")
+    if saved and not options["quiet"]:
+        print()
+        print(
+            (
+                pp.emph("   The following ")
+                + yellow("unavailable")
+                + pp.emph(
+                    " files were saved from cleaning due to exclusion file entries"
+                )
+            )
+        )
+        output.set_colors("deprecated")
+        clean_size = cleaner.pretend_clean(saved)
+        output.total("deprecated", clean_size, len(saved), verb, action)
+    if deprecated and not options["quiet"]:
+        print()
+        print(
+            (
+                pp.emph("   The following ")
+                + yellow("unavailable")
+                + pp.emph(" installed packages were found")
+            )
+        )
+        output.set_colors("deprecated")
+        output.list_pkgs(deprecated)
 
 
 def main():
-	"""Parse command line and execute all actions."""
-	# set default options
-	options = {}
-	options['nocolor'] = (port_settings.get("NOCOLOR") in ('yes','true')
-		or not sys.stdout.isatty())
-	if options['nocolor']:
-		pp.output.nocolor()
-	# parse command line options and actions
-	try:
-		action = parseArgs(options)
-	# filter exception to know what message to display
-	except ParseArgsException as e:
-		if e.value == 'help':
-			printUsage(help='all')
-			sys.exit(0)
-		elif e.value[:5] == 'help-':
-			printUsage(help=e.value[5:])
-			sys.exit(0)
-		elif e.value == 'version':
-			printVersion()
-			sys.exit(0)
-		else:
-			printUsage(e.value)
-			sys.exit(2)
-	output = OutputControl(options)
-	options['verbose-output'] = lambda x: None
-	if not options['quiet']:
-		if options['verbose']:
-			options['verbose-output'] = output.einfo
-	# parse the exclusion file
-	if not 'exclude-file' in options:
-		# set it to the default exclude file if it exists
-		exclude_file = "%s/etc/%s/%s.exclude" % (EPREFIX,__productname__ , action)
-		if os.path.isfile(exclude_file):
-			options['exclude-file'] = exclude_file
-	if 'exclude-file' in options:
-		try:
-			exclude = parseExcludeFile(options['exclude-file'],
-					options['verbose-output'])
-		except ParseExcludeFileException as e:
-			print( pp.error(str(e)), file=sys.stderr)
-			print( pp.error(
-				"Invalid exclusion file: %s" % options['exclude-file']), file=sys.stderr)
-			print( pp.error(
-				"See format of this file in `man %s`" % __productname__), file=sys.stderr)
-			sys.exit(1)
-	else:
-			exclude = {}
-	# security check for non-pretend mode
-	if not options['pretend'] and portage.secpass == 0:
-		print( pp.error(
-			"Permission denied: you must be root or belong to " +
-			"the portage group."), file=sys.stderr)
-		sys.exit(1)
-	# execute action
-	doAction(action, options, exclude=exclude,
-		output=output)
+    """Parse command line and execute all actions."""
+    # set default options
+    options = {}
+    options["nocolor"] = (
+        port_settings.get("NOCOLOR") in ("yes", "true") or not sys.stdout.isatty()
+    )
+    if options["nocolor"]:
+        pp.output.nocolor()
+    # parse command line options and actions
+    try:
+        action = parseArgs(options)
+    # filter exception to know what message to display
+    except ParseArgsException as e:
+        if e.value == "help":
+            printUsage(help="all")
+            sys.exit(0)
+        elif e.value[:5] == "help-":
+            printUsage(help=e.value[5:])
+            sys.exit(0)
+        elif e.value == "version":
+            printVersion()
+            sys.exit(0)
+        else:
+            printUsage(e.value)
+            sys.exit(2)
+    output = OutputControl(options)
+    options["verbose-output"] = lambda x: None
+    if not options["quiet"]:
+        if options["verbose"]:
+            options["verbose-output"] = output.einfo
+    # parse the exclusion file
+    if not "exclude-file" in options:
+        # set it to the default exclude file if it exists
+        exclude_file = "%s/etc/%s/%s.exclude" % (EPREFIX, __productname__, action)
+        if os.path.isfile(exclude_file):
+            options["exclude-file"] = exclude_file
+    if "exclude-file" in options:
+        try:
+            exclude = parseExcludeFile(
+                options["exclude-file"], options["verbose-output"]
+            )
+        except ParseExcludeFileException as e:
+            print(pp.error(str(e)), file=sys.stderr)
+            print(
+                pp.error("Invalid exclusion file: %s" % options["exclude-file"]),
+                file=sys.stderr,
+            )
+            print(
+                pp.error("See format of this file in `man %s`" % __productname__),
+                file=sys.stderr,
+            )
+            sys.exit(1)
+    else:
+        exclude = {}
+    # security check for non-pretend mode
+    if not options["pretend"] and portage.secpass == 0:
+        print(
+            pp.error(
+                "Permission denied: you must be root or belong to "
+                + "the portage group."
+            ),
+            file=sys.stderr,
+        )
+        sys.exit(1)
+    # execute action
+    doAction(action, options, exclude=exclude, output=output)
 
 
 if __name__ == "__main__":
-	"""actually call main() if launched as a script"""
-	try:
-		main()
-	except KeyboardInterrupt:
-		print( "Aborted.")
-		sys.exit(130)
-	sys.exit(0)
+    """actually call main() if launched as a script"""
+    try:
+        main()
+    except KeyboardInterrupt:
+        print("Aborted.")
+        sys.exit(130)
+    sys.exit(0)

diff --git a/pym/gentoolkit/eclean/exclude.py b/pym/gentoolkit/eclean/exclude.py
index 1da9523..a5c29d4 100644
--- a/pym/gentoolkit/eclean/exclude.py
+++ b/pym/gentoolkit/eclean/exclude.py
@@ -13,254 +13,283 @@ from portage import _encodings, _unicode_encode
 # Misc. shortcuts to some portage stuff:
 listdir = portage.listdir
 
-FILENAME_RE = [re.compile(r'(?P<pkgname>[-a-zA-z0-9\+]+)(?P<ver>-\d+\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z]+)(?P<ver>_\d+\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z_]+)(?P<ver>\d\d+\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z0-9_]+)(?P<ver>-default\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z0-9]+)(?P<ver>_\d\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z0-9\+\.]+)(?P<ver>-\d+\S+)'),
-	re.compile(r'(?P<pkgname>[-a-zA-z0-9\+\.]+)(?P<ver>.\d+\S+)')]
+FILENAME_RE = [
+    re.compile(r"(?P<pkgname>[-a-zA-z0-9\+]+)(?P<ver>-\d+\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z]+)(?P<ver>_\d+\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z_]+)(?P<ver>\d\d+\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z0-9_]+)(?P<ver>-default\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z0-9]+)(?P<ver>_\d\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z0-9\+\.]+)(?P<ver>-\d+\S+)"),
+    re.compile(r"(?P<pkgname>[-a-zA-z0-9\+\.]+)(?P<ver>.\d+\S+)"),
+]
 
 debug_modules = []
 
+
 def dprint(module, message):
-	if module in debug_modules:
-		print(message)
+    if module in debug_modules:
+        print(message)
+
 
 def isValidCP(cp):
-	"""Check whether a string is a valid cat/pkg-name.
+    """Check whether a string is a valid cat/pkg-name.
 
-	This is for 2.0.51 vs. CVS HEAD compatibility, I've not found any function
-	for that which would exists in both. Weird...
+    This is for 2.0.51 vs. CVS HEAD compatibility, I've not found any function
+    for that which would exists in both. Weird...
 
-	@param cp: catageory/package string
-	@rtype: bool
-	"""
+    @param cp: catageory/package string
+    @rtype: bool
+    """
 
-	if not '/' in cp:
-		return False
-	try:
-		portage.cpv_getkey(cp+"-0")
-	except:
-		return False
-	else:
-		return True
+    if not "/" in cp:
+        return False
+    try:
+        portage.cpv_getkey(cp + "-0")
+    except:
+        return False
+    else:
+        return True
 
 
 class ParseExcludeFileException(Exception):
-	"""For parseExcludeFile() -> main() communication.
+    """For parseExcludeFile() -> main() communication.
+
+    @param value: Error message string
+    """
+
+    def __init__(self, value):
+        self.value = value
 
-	@param value: Error message string
-	"""
-	def __init__(self, value):
-		self.value = value
-	def __str__(self):
-		return repr(self.value)
+    def __str__(self):
+        return repr(self.value)
 
 
 def parseExcludeFile(filepath, output):
-	"""Parses an exclusion file.
-
-	@param filepath: file containing the list of cat/pkg's to exclude
-	@param output: --verbose enabled output method or "lambda x: None"
-
-	@rtype: dict
-	@return: an exclusion dict
-	@raise ParseExcludeFileException: in case of fatal error
-	"""
-
-	exclude = {
-			'categories': {},
-			'packages': {},
-			'anti-packages': {},
-			'filenames': {}
-		}
-	output("Parsing Exclude file: " + filepath)
-	try:
-		file_ = open(_unicode_encode(filepath,
-			encoding=_encodings['fs']), mode="r", encoding=_encodings['content'])
-	except IOError:
-		raise ParseExcludeFileException("Could not open exclusion file: " +
-			filepath)
-	filecontents = file_.readlines()
-	file_.close()
-	cat_re = re.compile(r'^(?P<cat>[a-zA-Z0-9]+-[a-zA-Z0-9]+)(/\*)?$')
-	cp_re = re.compile(r'^(?P<cp>[-a-zA-Z0-9_]+/[-a-zA-Z0-9_]+)$')
-	# used to output the line number for exception error reporting
-	linenum = 0
-	for line in filecontents:
-		# need to increment it here due to continue statements.
-		linenum += 1
-		line = line.strip()
-		if not len(line): # skip blank a line
-			continue
-		if line[0] == '#': # skip a comment line
-			continue
-		#print( "parseExcludeFile: line=", line)
-		try: # category matching
-			cat = cat_re.match(line).group('cat')
-			#print( "parseExcludeFile: found cat=", cat)
-		except:
-			pass
-		else:
-			if not cat in portage.settings.categories:
-				raise ParseExcludeFileException("Invalid category: "+cat +
-					" @line # " + str(linenum))
-			exclude['categories'][cat] = None
-			continue
-		dict_key = 'packages'
-		if line[0] == '!': # reverses category setting
-			dict_key = 'anti-packages'
-			line = line[1:]
-		try: # cat/pkg matching
-			cp = cp_re.match(line).group('cp')
-			#print( "parseExcludeFile: found cp=", cp)
-			if isValidCP(cp):
-				exclude[dict_key][cp] = None
-				continue
-			else:
-				raise ParseExcludeFileException("Invalid cat/pkg: "+cp +
-					" @line # " + str(linenum))
-		except:
-			pass
-		#raise ParseExcludeFileException("Invalid line: "+line)
-		try: # filename matching.
-			exclude['filenames'][line] = re.compile(line)
-			#print( "parseExcludeFile: found filenames", line)
-		except:
-			try:
-				exclude['filenames'][line] = re.compile(re.escape(line))
-				#print( "parseExcludeFile: found escaped filenames", line)
-			except:
-				raise ParseExcludeFileException("Invalid file name/regular " +
-					"expression: @line # " + str(linenum) + " line=" +line)
-	output("Exclude file parsed. Found " +
-		"%d categories, %d packages, %d anti-packages %d filenames"
-		%(len(exclude['categories']), len(exclude['packages']),
-		len(exclude['anti-packages']), len(exclude['filenames'])))
-	#print()
-	#print( "parseExcludeFile: final exclude_dict = ", exclude)
-	#print()
-	return exclude
-
-def cp_all(categories, portdb=portage.portdb ):
-		"""temp function until the new portdb.cp_all([cat,...])
-		behaviour is fully available.
-
-		@param categories: list of categories to get all packages for
-				eg. ['app-portage', 'sys-apps',...]
-		@rtype: list of cat/pkg's  ['foo/bar', 'foo/baz']
-		"""
-		try:
-			cps = portdb.cp_all(categories)
-			# NOTE: the following backup code should be removed
-			# when all available versions of portage have the
-			# categories parameter in cp_all()
-		except:  # new behaviour not available
-			#~ message =  "Exception: eclean.exclude.cp_all() " +\
-				#~ "new portdb.cp_all() behavior not found. using fallback code"
-			#~ print( warn(message), file=sys.stderr)
-			cps = []
-			# XXX: i smell an access to something which is really out of API...
-			_pkg_dir_name_re = re.compile(r'^\w[-+\w]*$')
-			for tree in portdb.porttrees:
-				for cat in categories:
-					for pkg in listdir(os.path.join(tree,cat),
-								EmptyOnError=1, ignorecvs=1, dirsonly=1):
-						if not _pkg_dir_name_re.match(pkg) or pkg == "CVS":
-							continue
-						cps.append(cat+'/'+pkg)
-		#print( "cp_all: new cps list=", cps)
-		return cps
+    """Parses an exclusion file.
+
+    @param filepath: file containing the list of cat/pkg's to exclude
+    @param output: --verbose enabled output method or "lambda x: None"
+
+    @rtype: dict
+    @return: an exclusion dict
+    @raise ParseExcludeFileException: in case of fatal error
+    """
+
+    exclude = {"categories": {}, "packages": {}, "anti-packages": {}, "filenames": {}}
+    output("Parsing Exclude file: " + filepath)
+    try:
+        file_ = open(
+            _unicode_encode(filepath, encoding=_encodings["fs"]),
+            mode="r",
+            encoding=_encodings["content"],
+        )
+    except IOError:
+        raise ParseExcludeFileException("Could not open exclusion file: " + filepath)
+    filecontents = file_.readlines()
+    file_.close()
+    cat_re = re.compile(r"^(?P<cat>[a-zA-Z0-9]+-[a-zA-Z0-9]+)(/\*)?$")
+    cp_re = re.compile(r"^(?P<cp>[-a-zA-Z0-9_]+/[-a-zA-Z0-9_]+)$")
+    # used to output the line number for exception error reporting
+    linenum = 0
+    for line in filecontents:
+        # need to increment it here due to continue statements.
+        linenum += 1
+        line = line.strip()
+        if not len(line):  # skip blank a line
+            continue
+        if line[0] == "#":  # skip a comment line
+            continue
+        # print( "parseExcludeFile: line=", line)
+        try:  # category matching
+            cat = cat_re.match(line).group("cat")
+            # print( "parseExcludeFile: found cat=", cat)
+        except:
+            pass
+        else:
+            if not cat in portage.settings.categories:
+                raise ParseExcludeFileException(
+                    "Invalid category: " + cat + " @line # " + str(linenum)
+                )
+            exclude["categories"][cat] = None
+            continue
+        dict_key = "packages"
+        if line[0] == "!":  # reverses category setting
+            dict_key = "anti-packages"
+            line = line[1:]
+        try:  # cat/pkg matching
+            cp = cp_re.match(line).group("cp")
+            # print( "parseExcludeFile: found cp=", cp)
+            if isValidCP(cp):
+                exclude[dict_key][cp] = None
+                continue
+            else:
+                raise ParseExcludeFileException(
+                    "Invalid cat/pkg: " + cp + " @line # " + str(linenum)
+                )
+        except:
+            pass
+        # raise ParseExcludeFileException("Invalid line: "+line)
+        try:  # filename matching.
+            exclude["filenames"][line] = re.compile(line)
+            # print( "parseExcludeFile: found filenames", line)
+        except:
+            try:
+                exclude["filenames"][line] = re.compile(re.escape(line))
+                # print( "parseExcludeFile: found escaped filenames", line)
+            except:
+                raise ParseExcludeFileException(
+                    "Invalid file name/regular "
+                    + "expression: @line # "
+                    + str(linenum)
+                    + " line="
+                    + line
+                )
+    output(
+        "Exclude file parsed. Found "
+        + "%d categories, %d packages, %d anti-packages %d filenames"
+        % (
+            len(exclude["categories"]),
+            len(exclude["packages"]),
+            len(exclude["anti-packages"]),
+            len(exclude["filenames"]),
+        )
+    )
+    # print()
+    # print( "parseExcludeFile: final exclude_dict = ", exclude)
+    # print()
+    return exclude
+
+
+def cp_all(categories, portdb=portage.portdb):
+    """temp function until the new portdb.cp_all([cat,...])
+    behaviour is fully available.
+
+    @param categories: list of categories to get all packages for
+                    eg. ['app-portage', 'sys-apps',...]
+    @rtype: list of cat/pkg's  ['foo/bar', 'foo/baz']
+    """
+    try:
+        cps = portdb.cp_all(categories)
+        # NOTE: the following backup code should be removed
+        # when all available versions of portage have the
+        # categories parameter in cp_all()
+    except:  # new behaviour not available
+        # ~ message =  "Exception: eclean.exclude.cp_all() " +\
+        # ~ "new portdb.cp_all() behavior not found. using fallback code"
+        # ~ print( warn(message), file=sys.stderr)
+        cps = []
+        # XXX: i smell an access to something which is really out of API...
+        _pkg_dir_name_re = re.compile(r"^\w[-+\w]*$")
+        for tree in portdb.porttrees:
+            for cat in categories:
+                for pkg in listdir(
+                    os.path.join(tree, cat), EmptyOnError=1, ignorecvs=1, dirsonly=1
+                ):
+                    if not _pkg_dir_name_re.match(pkg) or pkg == "CVS":
+                        continue
+                    cps.append(cat + "/" + pkg)
+    # print( "cp_all: new cps list=", cps)
+    return cps
+
 
 def exclDictExpand(exclude):
-	"""Returns a dictionary of all CP/CPV from porttree which match
-	the exclusion dictionary.
-	"""
-	d = {}
-	if 'categories' in exclude:
-		# replace the following cp_all call with
-		# portage.portdb.cp_all([cat1, cat2])
-		# when it is available in all portage versions.
-		cps = cp_all(exclude['categories'])
-		for cp in cps:
-			d[cp] = None
-	if 'packages' in exclude:
-		for cp in exclude['packages']:
-			d[cp] = None
-	if 'anti-packages' in exclude:
-		for cp in exclude['anti-packages']:
-			if cp in d:
-				del d[cp]
-	return d
-
-def exclDictMatchCP(exclude,pkg):
-	"""Checks whether a CP matches the exclusion rules."""
-	if pkg is None:
-		return False
-	if 'anti-packages' in exclude and pkg in exclude['anti-packages']:
-		return False
-	if 'packages' in exclude and pkg in exclude['packages']:
-		return True
-	try:
-		cat = pkg.split('/')[0]
-	except:
-		dprint( "exclude", "exclDictMatchCP: Invalid package name: " +\
-			"%s, Could not determine category" %pkg)
-		cat = ''
-	if 'categories' in exclude and cat in exclude['categories']:
-			return True
-	return False
+    """Returns a dictionary of all CP/CPV from porttree which match
+    the exclusion dictionary.
+    """
+    d = {}
+    if "categories" in exclude:
+        # replace the following cp_all call with
+        # portage.portdb.cp_all([cat1, cat2])
+        # when it is available in all portage versions.
+        cps = cp_all(exclude["categories"])
+        for cp in cps:
+            d[cp] = None
+    if "packages" in exclude:
+        for cp in exclude["packages"]:
+            d[cp] = None
+    if "anti-packages" in exclude:
+        for cp in exclude["anti-packages"]:
+            if cp in d:
+                del d[cp]
+    return d
+
+
+def exclDictMatchCP(exclude, pkg):
+    """Checks whether a CP matches the exclusion rules."""
+    if pkg is None:
+        return False
+    if "anti-packages" in exclude and pkg in exclude["anti-packages"]:
+        return False
+    if "packages" in exclude and pkg in exclude["packages"]:
+        return True
+    try:
+        cat = pkg.split("/")[0]
+    except:
+        dprint(
+            "exclude",
+            "exclDictMatchCP: Invalid package name: "
+            + "%s, Could not determine category" % pkg,
+        )
+        cat = ""
+    if "categories" in exclude and cat in exclude["categories"]:
+        return True
+    return False
+
 
 def exclDictExpandPkgname(exclude):
-	"""Returns a set of all pkgnames  from porttree which match
-	the exclusion dictionary.
-	"""
-	p = set()
-	if 'categories' in exclude:
-		# replace the following cp_all call with
-		# portage.portdb.cp_all([cat1, cat2])
-		# when it is available in all portage versions.
-		cps = cp_all(exclude['categories'])
-		for cp in cps:
-			pkgname = cp.split('/')[1]
-			p.add(pkgname)
-	if 'packages' in exclude:
-		for cp in exclude['packages']:
-			pkgname = cp.split('/')[1]
-			p.add(pkgname)
-	if 'anti-packages' in exclude:
-		for cp in exclude['anti-packages']:
-			if cp in p:
-				p.remove(cp)
-	return p
+    """Returns a set of all pkgnames  from porttree which match
+    the exclusion dictionary.
+    """
+    p = set()
+    if "categories" in exclude:
+        # replace the following cp_all call with
+        # portage.portdb.cp_all([cat1, cat2])
+        # when it is available in all portage versions.
+        cps = cp_all(exclude["categories"])
+        for cp in cps:
+            pkgname = cp.split("/")[1]
+            p.add(pkgname)
+    if "packages" in exclude:
+        for cp in exclude["packages"]:
+            pkgname = cp.split("/")[1]
+            p.add(pkgname)
+    if "anti-packages" in exclude:
+        for cp in exclude["anti-packages"]:
+            if cp in p:
+                p.remove(cp)
+    return p
 
 
 def exclMatchFilename(exclude_names, filename):
-	"""Attempts to split the package name out of a filename
-	and then checks if it matches any exclusion rules.
-
-	This is intended to be run on the cleaning list after all
-	normal checks and removal of protected files.  This will reduce
-	the number of files to perform this last minute check on
-
-	@param exclude_names: a set of pkgnames to exlcude
-	@param filename:
-
-	@rtype: bool
-	"""
-	found = False
-	index = 0
-	while not found and index < len(FILENAME_RE):
-		found = FILENAME_RE[index].match(filename)
-		index += 1
-	if not found:
-		dprint( "exclude", "exclMatchFilename: filename: " +\
-			"%s, Could not determine package name" %filename)
-		return False
-	pkgname = found.group('pkgname')
-	dprint("exclude", "exclMatchFilename: found pkgname = " +
-		"%s, %s, %d, %s" %(pkgname, str(pkgname in exclude_names),
-		index-1, filename))
-	return (pkgname in exclude_names)
+    """Attempts to split the package name out of a filename
+    and then checks if it matches any exclusion rules.
+
+    This is intended to be run on the cleaning list after all
+    normal checks and removal of protected files.  This will reduce
+    the number of files to perform this last minute check on
+
+    @param exclude_names: a set of pkgnames to exlcude
+    @param filename:
 
+    @rtype: bool
+    """
+    found = False
+    index = 0
+    while not found and index < len(FILENAME_RE):
+        found = FILENAME_RE[index].match(filename)
+        index += 1
+    if not found:
+        dprint(
+            "exclude",
+            "exclMatchFilename: filename: "
+            + "%s, Could not determine package name" % filename,
+        )
+        return False
+    pkgname = found.group("pkgname")
+    dprint(
+        "exclude",
+        "exclMatchFilename: found pkgname = "
+        + "%s, %s, %d, %s"
+        % (pkgname, str(pkgname in exclude_names), index - 1, filename),
+    )
+    return pkgname in exclude_names

diff --git a/pym/gentoolkit/eclean/output.py b/pym/gentoolkit/eclean/output.py
index e2ed221..62777b7 100644
--- a/pym/gentoolkit/eclean/output.py
+++ b/pym/gentoolkit/eclean/output.py
@@ -10,173 +10,187 @@ from gentoolkit.pprinter import cpv, number
 
 
 class OutputControl:
-	"""Outputs data according to predetermined options and handles any user
-	interaction.
-
-	@param options: dictionary of boolean options as determined in cli.py
-			used here: interactive, pretend, quiet, accept_all, nocolor.
-	"""
-
-	def __init__(self, options):
-		if not options:
-			# set some defaults
-			self.options['interactive'] = False
-			self.options['pretend'] = True
-			self.options['quiet'] = False
-			self.options['accept_all'] = False
-			self.options['nocolor'] = False
-		else:
-			self.options = options
-		self.set_colors("normal")
-
-	def set_colors(self, mode):
-		"""Sets the colors for the progress_controller
-		and prettysize output
-
-		@param mode: string, 1 of ["normal", "deprecated"]
-		"""
-		if mode == "normal":
-			self.pkg_color = cpv        # green
-			self.numbers = number  # turquoise
-			self.brace = blue
-		elif mode == "deprecated":
-			self.pkg_color = yellow
-			self.numbers =  teal # darkgreen
-			self.brace = blue
-
-	def einfo(self, message=""):
-		"""Display an info message depending on a color mode.
-
-		@param message: text string to display
-
-		@outputs to stdout.
-		"""
-		if not  self.options['nocolor']:
-			prefix = " "+green('*')
-		else:
-			prefix = ">>>"
-		print(prefix,message)
-
-	def eprompt(self, message):
-		"""Display a user question depending on a color mode.
-
-		@param message: text string to display
-
-		@output to stdout
-		"""
-		if not self.options['nocolor']:
-			prefix = " "+red('>')+" "
-		else:
-			prefix = "??? "
-		sys.stdout.write(prefix+message)
-		sys.stdout.flush()
-
-	def prettySize(self, size, justify=False, color=None):
-		"""int -> byte/kilo/mega/giga converter. Optionally
-		justify the result. Output is a string.
-
-		@param size: integer
-		@param justify: optional boolean, defaults to False
-		@param color: optional color, defaults to green
-				as defined in portage.output
-
-		@returns a formatted and (escape sequenced)
-				colorized text string
-		"""
-		if color == None:
-			color = self.numbers
-		units = [" G"," M"," K"," B"]
-		# by using 1000 as the changeover, the integer portion
-		# of the number will never be more than 3 digits long
-		# but the true base 2 value of 1024 is used for the actual
-		# calulation to maintain better accuracy.
-		while len(units) and size >= 1000:
-			size = size / 1024.0
-			units.pop()
-		sizestr = "%.1f" %(round(size,1)) + units[-1]
-		if justify:
-			sizestr = " " + self.brace("[ ")  + \
-				color(sizestr.rjust(8)) + self.brace(" ]")
-		return sizestr
-
-	def yesNoAllPrompt(self, message="Do you want to proceed?"):
-		"""Print a prompt until user answer in yes/no/all. Return a
-		boolean for answer, and also may affect the 'accept_all' option.
-
-		@param message: optional different input string from the default
-				message of: "Do you want to proceed?"
-		@outputs to stdout
-		@modifies class var options['accept_all']
-		@rtype: bool
-		"""
-		user_string="xxx"
-		while not user_string.lower() in ["","y","n","a","yes","no","all"]:
-			self.eprompt(message+" [Y/n/a]: ")
-			user_string =  sys.stdin.readline().rstrip('\n')
-			user_string = user_string.strip()
-		if user_string.lower() in ["a","all"]:
-			self.options['accept_all'] = True
-		answer = user_string.lower() in ["","y","a","yes","all"]
-		return answer
-
-	def progress_controller(self, size, key, clean_list, file_type):
-		"""Callback function for doCleanup. It outputs data according to the
-		options configured.
-		Alternatively it handles user interaction for decisions that are
-		required.
-
-		@param size: Integer of the file(s) size
-		@param key: the filename/pkgname currently being processed
-		@param clean_list: list of files being processed.
-		"""
-		if not self.options['quiet']:
-			# pretty print mode
-			print(self.prettySize(size,True), self.pkg_color(key))
-		elif self.options['pretend'] or self.options['interactive']:
-			# file list mode
-			for file_ in clean_list:
-				print(file_)
-		if self.options['pretend']:
-			return False
-		elif not self.options['interactive'] \
-			or self.options['accept_all'] \
-			or self.yesNoAllPrompt("Do you want to delete this " + file_type + "?"):
-			return True
-		return False
-
-	def total(self, mode, size, num_files, verb, action):
-		"""outputs the formatted totals to stdout
-
-		@param mode: sets color and message. 1 of ['normal', 'deprecated']
-		@param size: total space savings
-		@param num_files: total number of files
-		@param verb: string eg. 1 of ["would be", "has been"]
-		@param action: string eg 1 of ['distfiles', 'packages']
-		"""
-		self.set_colors(mode)
-		if mode =="normal":
-			message="Total space from "+red(str(num_files))+" files "+\
-				verb+" freed in the " + action + " directory"
-			print( " ===========")
-			print( self.prettySize(size, True, red), message)
-		elif mode == "deprecated":
-			message = "Total space from "+red(str(num_files))+" package files\n"+\
-				"   Re-run the last command with the -D " +\
-				"option to clean them as well"
-			print( " ===========")
-			print( self.prettySize(size, True, red), message)
-
-	def list_pkgs(self, pkgs):
-		"""outputs the packages to stdout
-
-		@param pkgs: dict. of {cat/pkg-ver: src_uri,}
-		"""
-		indent = ' ' * 12
-		keys = sorted(pkgs)
-		for key in keys:
-			if pkgs[key]:
-				saved = ""
-			else:
-				saved = " ...distfile name(s) not known/saved"
-			print( indent,self.pkg_color(key) + saved)
-		print()
+    """Outputs data according to predetermined options and handles any user
+    interaction.
+
+    @param options: dictionary of boolean options as determined in cli.py
+                    used here: interactive, pretend, quiet, accept_all, nocolor.
+    """
+
+    def __init__(self, options):
+        if not options:
+            # set some defaults
+            self.options["interactive"] = False
+            self.options["pretend"] = True
+            self.options["quiet"] = False
+            self.options["accept_all"] = False
+            self.options["nocolor"] = False
+        else:
+            self.options = options
+        self.set_colors("normal")
+
+    def set_colors(self, mode):
+        """Sets the colors for the progress_controller
+        and prettysize output
+
+        @param mode: string, 1 of ["normal", "deprecated"]
+        """
+        if mode == "normal":
+            self.pkg_color = cpv  # green
+            self.numbers = number  # turquoise
+            self.brace = blue
+        elif mode == "deprecated":
+            self.pkg_color = yellow
+            self.numbers = teal  # darkgreen
+            self.brace = blue
+
+    def einfo(self, message=""):
+        """Display an info message depending on a color mode.
+
+        @param message: text string to display
+
+        @outputs to stdout.
+        """
+        if not self.options["nocolor"]:
+            prefix = " " + green("*")
+        else:
+            prefix = ">>>"
+        print(prefix, message)
+
+    def eprompt(self, message):
+        """Display a user question depending on a color mode.
+
+        @param message: text string to display
+
+        @output to stdout
+        """
+        if not self.options["nocolor"]:
+            prefix = " " + red(">") + " "
+        else:
+            prefix = "??? "
+        sys.stdout.write(prefix + message)
+        sys.stdout.flush()
+
+    def prettySize(self, size, justify=False, color=None):
+        """int -> byte/kilo/mega/giga converter. Optionally
+        justify the result. Output is a string.
+
+        @param size: integer
+        @param justify: optional boolean, defaults to False
+        @param color: optional color, defaults to green
+                        as defined in portage.output
+
+        @returns a formatted and (escape sequenced)
+                        colorized text string
+        """
+        if color == None:
+            color = self.numbers
+        units = [" G", " M", " K", " B"]
+        # by using 1000 as the changeover, the integer portion
+        # of the number will never be more than 3 digits long
+        # but the true base 2 value of 1024 is used for the actual
+        # calulation to maintain better accuracy.
+        while len(units) and size >= 1000:
+            size = size / 1024.0
+            units.pop()
+        sizestr = "%.1f" % (round(size, 1)) + units[-1]
+        if justify:
+            sizestr = (
+                " " + self.brace("[ ") + color(sizestr.rjust(8)) + self.brace(" ]")
+            )
+        return sizestr
+
+    def yesNoAllPrompt(self, message="Do you want to proceed?"):
+        """Print a prompt until user answer in yes/no/all. Return a
+        boolean for answer, and also may affect the 'accept_all' option.
+
+        @param message: optional different input string from the default
+                        message of: "Do you want to proceed?"
+        @outputs to stdout
+        @modifies class var options['accept_all']
+        @rtype: bool
+        """
+        user_string = "xxx"
+        while not user_string.lower() in ["", "y", "n", "a", "yes", "no", "all"]:
+            self.eprompt(message + " [Y/n/a]: ")
+            user_string = sys.stdin.readline().rstrip("\n")
+            user_string = user_string.strip()
+        if user_string.lower() in ["a", "all"]:
+            self.options["accept_all"] = True
+        answer = user_string.lower() in ["", "y", "a", "yes", "all"]
+        return answer
+
+    def progress_controller(self, size, key, clean_list, file_type):
+        """Callback function for doCleanup. It outputs data according to the
+        options configured.
+        Alternatively it handles user interaction for decisions that are
+        required.
+
+        @param size: Integer of the file(s) size
+        @param key: the filename/pkgname currently being processed
+        @param clean_list: list of files being processed.
+        """
+        if not self.options["quiet"]:
+            # pretty print mode
+            print(self.prettySize(size, True), self.pkg_color(key))
+        elif self.options["pretend"] or self.options["interactive"]:
+            # file list mode
+            for file_ in clean_list:
+                print(file_)
+        if self.options["pretend"]:
+            return False
+        elif (
+            not self.options["interactive"]
+            or self.options["accept_all"]
+            or self.yesNoAllPrompt("Do you want to delete this " + file_type + "?")
+        ):
+            return True
+        return False
+
+    def total(self, mode, size, num_files, verb, action):
+        """outputs the formatted totals to stdout
+
+        @param mode: sets color and message. 1 of ['normal', 'deprecated']
+        @param size: total space savings
+        @param num_files: total number of files
+        @param verb: string eg. 1 of ["would be", "has been"]
+        @param action: string eg 1 of ['distfiles', 'packages']
+        """
+        self.set_colors(mode)
+        if mode == "normal":
+            message = (
+                "Total space from "
+                + red(str(num_files))
+                + " files "
+                + verb
+                + " freed in the "
+                + action
+                + " directory"
+            )
+            print(" ===========")
+            print(self.prettySize(size, True, red), message)
+        elif mode == "deprecated":
+            message = (
+                "Total space from "
+                + red(str(num_files))
+                + " package files\n"
+                + "   Re-run the last command with the -D "
+                + "option to clean them as well"
+            )
+            print(" ===========")
+            print(self.prettySize(size, True, red), message)
+
+    def list_pkgs(self, pkgs):
+        """outputs the packages to stdout
+
+        @param pkgs: dict. of {cat/pkg-ver: src_uri,}
+        """
+        indent = " " * 12
+        keys = sorted(pkgs)
+        for key in keys:
+            if pkgs[key]:
+                saved = ""
+            else:
+                saved = " ...distfile name(s) not known/saved"
+            print(indent, self.pkg_color(key) + saved)
+        print()

diff --git a/pym/gentoolkit/eclean/pkgindex.py b/pym/gentoolkit/eclean/pkgindex.py
index 46c734d..617b437 100644
--- a/pym/gentoolkit/eclean/pkgindex.py
+++ b/pym/gentoolkit/eclean/pkgindex.py
@@ -14,77 +14,77 @@ import portage
 
 
 class PkgIndex:
-	"""Handle the cleaning of the binpkg Package
-	Index file
-
-	@type output: class
-	@param output: optional output class for printing
-	"""
-
-	def __init__(self, controller=None):
-		self.controller = controller
-		# backup command line call
-		self.emaint_cmd = "%s/usr/sbin/emaint --fix binhost" % EPREFIX
-
-
-	def _get_emaint_binhost(self):
-		"""Obtain a reference to the binhost module class
-
-		@sets: self.binhost to BinhostHandler class
-		@rtype: boolean
-		"""
-                # About noqa below: I don't understand how this code can run at all.
-                # TODO: verify soundness
-		try:
-			self.emaint_control = Modules()  # noqa
-			self.binhost = self.emaint_control._get_class('binhost')
-		except InvalidModuleName as er:  # noqa
-			print( pp.error("Error importing emaint binhost module"), file=sys.stderr)
-			print( pp.error("Original error: " + er), file=sys.stderr)
-		except:
-			return False
-		return True
-
-
-	def _load_modules(self):
-		"""Import the emaint modules and report the success/fail of them
-		"""
-		try:
-			from emaint.module import Modules  # noqa
-			from emaint.main import TaskHandler  # noqa
-		except ImportError:
-			return False
-		return True
-
-
-	def clean_pkgs_index(self,):
-		"""This will clean the binpkgs packages index file"""
-		go = self._load_modules()
-		if go:
-			if self.get_emaint_binhost():
-				self.taskmaster = TaskHandler(show_progress_bar=True)  # noqa
-				tasks = [self.binhost]
-				self.taskmaster.run_tasks(tasks)
-
-
-	def call_emaint(self):
-		"""Run the stand alone emaint script from
-		a subprocess call.
-
-		@rtype: integer
-		@return: the difference in file size
-		"""
-		file_ = os.path.join(portage.settings['PKGDIR'], 'Packages')
-		statinfo = os.stat(file_)
-		size1 = statinfo.st_size
-		try:
-			retcode = subprocess.call(self.emaint_cmd, shell=True)
-			if retcode < 0:
-				print( pp.error("Child was terminated by signal" + str(-retcode)), file=sys.stderr)
-		except OSError as e:
-			print( pp.error("Execution failed:" + e), file=sys.stderr)
-		print()
-		statinfo = os.stat(file_)
-		clean_size = size1 - statinfo.st_size
-		self.controller(clean_size, "Packages Index", file_, "Index")
-		return clean_size
+    """Handle the cleaning of the binpkg Package
+    Index file
+
+    @type output: class
+    @param output: optional output class for printing
+    """
+
+    def __init__(self, controller=None):
+        self.controller = controller
+        # backup command line call
+        self.emaint_cmd = "%s/usr/sbin/emaint --fix binhost" % EPREFIX
+
+    def _get_emaint_binhost(self):
+        """Obtain a reference to the binhost module class
+
+        @sets: self.binhost to BinhostHandler class
+        @rtype: boolean
+        """
+        # About noqa below: I don't understand how this code can run at all.
+        # TODO: verify soundness
+        try:
+            self.emaint_control = Modules()  # noqa
+            self.binhost = self.emaint_control._get_class("binhost")
+        except InvalidModuleName as er:  # noqa
+            print(pp.error("Error importing emaint binhost module"), file=sys.stderr)
+            print(pp.error("Original error: " + er), file=sys.stderr)
+        except:
+            return False
+        return True
+
+    def _load_modules(self):
+        """Import the emaint modules and report the success/fail of them"""
+        try:
+            from emaint.module import Modules  # noqa
+            from emaint.main import TaskHandler  # noqa
+        except ImportError:
+            return False
+        return True
+
+    def clean_pkgs_index(
+        self,
+    ):
+        """This will clean the binpkgs packages index file"""
+        go = self._load_modules()
+        if go:
+            if self.get_emaint_binhost():
+                self.taskmaster = TaskHandler(show_progress_bar=True)  # noqa
+                tasks = [self.binhost]
+                self.taskmaster.run_tasks(tasks)
+
+    def call_emaint(self):
+        """Run the stand alone emaint script from
+        a subprocess call.
+
+        @rtype: integer
+        @return: the difference in file size
+        """
+        file_ = os.path.join(portage.settings["PKGDIR"], "Packages")
+        statinfo = os.stat(file_)
+        size1 = statinfo.st_size
+        try:
+            retcode = subprocess.call(self.emaint_cmd, shell=True)
+            if retcode < 0:
+                print(
+                    pp.error("Child was terminated by signal" + str(-retcode)),
+                    file=sys.stderr,
+                )
+        except OSError as e:
+            print(pp.error("Execution failed:" + e), file=sys.stderr)
+        print()
+        statinfo = os.stat(file_)
+        clean_size = size1 - statinfo.st_size
+        self.controller(clean_size, "Packages Index", file_, "Index")
+        return clean_size

diff --git a/pym/gentoolkit/eclean/search.py b/pym/gentoolkit/eclean/search.py
index 8f6e52f..cb695c0 100644
--- a/pym/gentoolkit/eclean/search.py
+++ b/pym/gentoolkit/eclean/search.py
@@ -14,8 +14,12 @@ from portage.dep import Atom, use_reduce
 from portage.dep._slot_operator import strip_slots
 
 import gentoolkit.pprinter as pp
-from gentoolkit.eclean.exclude import (exclDictMatchCP, exclDictExpand,
-	exclDictExpandPkgname, exclMatchFilename)
+from gentoolkit.eclean.exclude import (
+    exclDictMatchCP,
+    exclDictExpand,
+    exclDictExpandPkgname,
+    exclMatchFilename,
+)
 
 
 # Misc. shortcuts to some portage stuff:
@@ -23,7 +27,7 @@ port_settings = portage.settings
 pkgdir = port_settings["PKGDIR"]
 
 err = sys.stderr
-deprecated_message=""""Deprecation Warning: Installed package: %s
+deprecated_message = """"Deprecation Warning: Installed package: %s
 	Is no longer in the tree or an installed overlay"""
 DEPRECATED = pp.warn(deprecated_message)
 
@@ -31,574 +35,580 @@ debug_modules = []
 
 
 def dprint(module, message):
-	if module in debug_modules:
-		print(message)
+    if module in debug_modules:
+        print(message)
 
 
 def get_distdir():
-	"""Returns DISTDIR if sane, else barfs."""
+    """Returns DISTDIR if sane, else barfs."""
+
+    d = portage.settings["DISTDIR"]
+    if not os.path.isdir(d):
+        e = pp.error("%s does not appear to be a directory.\n" % d)
+        e += pp.error("Please set DISTDIR to a sane value.\n")
+        e += pp.error("(Check your make.conf file and environment).")
+        print(e, file=sys.stderr)
+        exit(1)
+    return d
 
-	d = portage.settings["DISTDIR"]
-	if not os.path.isdir(d):
-		e = pp.error("%s does not appear to be a directory.\n" % d)
-		e += pp.error("Please set DISTDIR to a sane value.\n")
-		e += pp.error("(Check your make.conf file and environment).")
-		print( e, file=sys.stderr)
-		exit(1)
-	return d
 
 distdir = get_distdir()
 
 
 class DistfilesSearch:
-	"""
-
-		@param output: verbose output method or (lambda x: None) to turn off
-		@param vardb: defaults to portage.db[portage.root]["vartree"].dbapi
-					is overridden for testing.
-		@param portdb: defaults to portage.portdb and is overriden for testing.
-"""
-
-	def __init__(self,
-			output,
-			portdb=portage.portdb,
-			vardb=portage.db[portage.root]["vartree"].dbapi,
-			):
-		self.vardb =vardb
-		self.portdb = portdb
-		self.output = output
-		self.installed_cpvs = None
-
-	def findDistfiles(self,
-			exclude=None,
-			destructive=False,
-			fetch_restricted=False,
-			package_names=False,
-			time_limit=0,
-			size_limit=0,
-			_distdir=distdir,
-			deprecate=False,
-			extra_checks=()
-			):
-		"""Find all obsolete distfiles.
-
-		XXX: what about cvs ebuilds?
-		I should install some to see where it goes...
-
-		@param exclude: an exclusion dict as defined in
-				exclude.parseExcludeFile class.
-		@param destructive: boolean, defaults to False
-		@param fetch_restricted: boolean, defaults to False
-		@param package_names: boolean, defaults to False.
-		@param time_limit: integer time value as returned by parseTime()
-		@param size_limit: integer value of max. file size to keep or 0 to ignore.
-		@param _distdir: path to the distfiles dir being checked, defaults to portage.
-		@param deprecate: bool to control checking the clean dict. files for exclusion
-
-		@rtype: dict
-		@return dict. of package files to clean i.e. {'cat/pkg-ver.tbz2': [filename],}
-		"""
-		if exclude is None:
-			exclude = {}
-		clean_me = {}
-		pkgs = {}
-		saved = {}
-		deprecated = {}
-		installed_included = False
-		# create a big CPV->SRC_URI dict of packages
-		# whose distfiles should be kept
-		if (not destructive) or fetch_restricted:
-			self.output("...non-destructive type search")
-			pkgs, _deprecated = self._non_destructive(destructive, fetch_restricted)
-			deprecated.update(_deprecated)
-			installed_included = True
-		if destructive:
-			self.output("...destructive type search: %d packages already found" %len(pkgs))
-			pkgs, _deprecated = self._destructive(package_names,
-					exclude, pkgs, installed_included)
-			deprecated.update(_deprecated)
-		# gather the files to be cleaned
-		self.output("...checking limits for %d ebuild sources"
-				%len(pkgs))
-
-		checks = self._get_default_checks(size_limit, time_limit, exclude, destructive)
-		checks.extend(extra_checks)
-		clean_me = self._check_limits(_distdir, checks, clean_me)
-		# remove any protected files from the list
-		self.output("...removing protected sources from %s candidates to clean"
-				%len(clean_me))
-		clean_me = self._remove_protected(pkgs, clean_me)
-		if not deprecate and len(exclude) and len(clean_me):
-			self.output("...checking final for exclusion from " +\
-				"%s remaining candidates to clean" %len(clean_me))
-			clean_me, saved = self._check_excludes(exclude, clean_me)
-		return clean_me, saved, deprecated
-
-
-####################### begin _check_limits code block
-
-	def _get_default_checks(self, size_limit, time_limit, excludes, destructive):
-		#checks =[(self._isreg_check_, "is_reg_check")]
-		checks =[self._isreg_check_]
-		if 'filenames' in excludes:
-			#checks.append((partial(self._filenames_check_, excludes), "Filenames_check"))
-			checks.append(partial(self._filenames_check_, excludes))
-		else:
-			self.output("   - skipping exclude filenames check")
-		if size_limit:
-			#checks.append((partial(self._size_check_, size_limit), "size_check"))
-			checks.append(partial(self._size_check_, size_limit))
-		else:
-			self.output("   - skipping size limit check")
-		if time_limit:
-			#print("time_limit = ", time_limit/1000000,"M sec")
-			#checks.append((partial(self._time_check_, time_limit), "time_check"))
-			checks.append(partial(self._time_check_, time_limit))
-		else:
-			self.output("   - skipping time limit check")
-		if destructive:
-			self.output("   - skipping dot files check")
-		else:
-			checks.append(self._dotfile_check_)
-		return checks
-
-
-	def _check_limits(self,
-			_distdir,
-			checks,
-			clean_me=None
-			):
-		"""Checks files if they exceed size and/or time_limits, etc.
-
-		To start with everything is considered dirty and is excluded
-		only if it matches some condition.
-		"""
-		if clean_me is None:
-			clean_me = {}
-		for file in os.listdir(_distdir):
-			filepath = os.path.join(_distdir, file)
-			try:
-				file_stat = os.lstat(filepath)
-			except EnvironmentError:
-				continue
-			is_dirty = False
-			#for check, check_name in checks:
-			for check in checks:
-				should_break, is_dirty = check(file_stat, file)
-				if should_break:
-					break
-
-			if is_dirty:
-				#print( "%s Adding file to clean_list:" %check_name, file)
-				clean_me[file]=[filepath]
-		return clean_me
-
-	@staticmethod
-	def _isreg_check_(file_stat, file):
-		"""check if file is a regular file."""
-		is_reg_file = stat.S_ISREG(file_stat[stat.ST_MODE])
-		return  not is_reg_file, is_reg_file
-
-	@staticmethod
-	def _size_check_(size_limit, file_stat, file):
-		"""checks if the file size exceeds the size_limit"""
-		if (file_stat[stat.ST_SIZE] >= size_limit):
-			#print( "size mismatch ", file, file_stat[stat.ST_SIZE])
-			return True, False
-		return False, True
-
-	@staticmethod
-	def _time_check_(time_limit, file_stat, file):
-		"""checks if the file exceeds the time_limit
-		(think forward, not back, time keeps increasing)"""
-		if (file_stat[stat.ST_MTIME] >= time_limit):
-			#print( "time match too young ", file, file_stat[stat.ST_MTIME]/1000000,"M sec.")
-			return True, False
-		#print( "time match too old", file, file_stat[stat.ST_MTIME]/1000000,"M sec.")
-		return False, True
-
-	@staticmethod
-	def _filenames_check_(exclude, file_stat, file):
-		"""checks if the file matches an exclusion file listing"""
-		# Try to match file name directly
-		if file in exclude['filenames']:
-			return True, False
-		# See if file matches via regular expression matching
-		else:
-			file_match = False
-			for file_entry in exclude['filenames']:
-				if exclude['filenames'][file_entry].match(file):
-					file_match = True
-					break
-		if file_match:
-			#print( "filename match ", file)
-			return True, False
-		return False, True
-
-	@staticmethod
-	def _dotfile_check_(file_stat, file):
-		"""check if file is a regular file."""
-		head, tail = os.path.split(file)
-		if tail:
-			is_dot_file = tail.startswith('.')
-		return  is_dot_file, not is_dot_file
-
-####################### end _check_limits code block
-
-	@staticmethod
-	def _remove_protected(
-			pkgs,
-			clean_me
-			):
-		"""Remove files owned by some protected packages.
-
-		@returns packages to clean
-		@rtype: dictionary
-		"""
-		for cpv in pkgs:
-			uris = pkgs[cpv].split()
-			uris.reverse()
-			while uris:
-				uri = uris.pop()
-				if uris and uris[-1] == "->":
-					operator = uris.pop()  # noqa
-					file = uris.pop()
-				else:
-					file = os.path.basename(uri)
-				if file in clean_me:
-					del clean_me[file]
-			# no need to waste IO time if there is nothing left to clean
-			if not len(clean_me):
-				return clean_me
-		return clean_me
-
-	def _non_destructive(self,
-			destructive,
-			fetch_restricted,
-			pkgs_ = None,
-			hosts_cpvs=None
-			):
-		"""performs the non-destructive checks
-
-		@param destructive: boolean
-		@param pkgs_: starting dictionary to add to
-				defaults to {}.
-
-		@returns packages and thier SRC_URI's: {cpv: src_uri,}
-		@rtype: dictionary
-		"""
-		if pkgs_ is None:
-			pkgs = {}
-		else:
-			pkgs = pkgs_.copy()
-		deprecated = {}
-		# the following code block was split to optimize for speed
-		# list all CPV from portree (yeah, that takes time...)
-		self.output("   - getting complete ebuild list")
-		cpvs = set(self.portdb.cpv_all())
-		installed_cpvs = set(self.vardb.cpv_all())
-		# now add any installed cpv's that are not in the tree or overlays
-		cpvs.update(installed_cpvs)
-		# Add any installed cpvs from hosts on the network, if any
-		if hosts_cpvs:
-			cpvs.update(hosts_cpvs)
-			installed_cpvs.update(hosts_cpvs)
-		if fetch_restricted and destructive:
-			self.output("   - getting source file names " +
-				"for %d installed ebuilds" %len(installed_cpvs))
-			pkgs, _deprecated = self._unrestricted(pkgs, installed_cpvs)
-			deprecated.update(_deprecated)
-			# remove the installed cpvs then check the remaining for fetch restiction
-			cpvs.difference_update(installed_cpvs)
-			self.output("   - getting fetch-restricted source file names " +
-				"for %d remaining ebuilds" %len(cpvs))
-			pkgs, _deprecated = self._fetch_restricted(pkgs, cpvs)
-			deprecated.update(_deprecated)
-			# save the installed cpv list to re-use in _destructive()
-			self.installed_cpvs = installed_cpvs.copy()
-		else:
-			self.output("   - getting source file names " +
-				"for %d ebuilds" %len(cpvs))
-			pkgs, _deprecated = self._unrestricted(pkgs, cpvs)
-			deprecated.update(_deprecated)
-		return pkgs, deprecated
-
-	def _fetch_restricted(self, pkgs_, cpvs):
-		"""perform fetch restricted non-destructive source
-		filename lookups
-
-		@param pkgs_: starting dictionary to add to
-		@param cpvs: set of (cat/pkg-ver, ...) identifiers
-
-		@return a new pkg dictionary
-		@rtype: dictionary
-		"""
-		if pkgs_ is None:
-			pkgs = {}
-		else:
-			pkgs = pkgs_.copy()
-		deprecated = {}
-		for cpv in cpvs:
-			# get SRC_URI and RESTRICT from aux_get
-			try: # main portdb
-				(src_uri,restrict) = \
-					self.portdb.aux_get(cpv,["SRC_URI","RESTRICT"])
-				# keep fetch-restricted check
-				# inside try so it is bypassed on KeyError
-				if 'fetch' in restrict:
-					pkgs[cpv] = src_uri
-			except KeyError:
-				try: # installed vardb
-					(src_uri,restrict) = \
-						self.vardb.aux_get(cpv,["SRC_URI","RESTRICT"])
-					deprecated[cpv] = src_uri
-					self.output(DEPRECATED %cpv)
-					# keep fetch-restricted check
-					# inside try so it is bypassed on KeyError
-					if 'fetch' in restrict:
-						pkgs[cpv] = src_uri
-				except KeyError:
-					self.output("   - Key Error looking up: " + cpv)
-		return pkgs, deprecated
-
-	def _unrestricted(self, pkgs_, cpvs):
-		"""Perform unrestricted source filenames lookups
-
-		@param pkgs_: starting packages dictionary
-		@param cpvs: set of (cat/pkg-ver, ...) identifiers
-
-		@return a new pkg dictionary
-		@rtype: dictionary
-		"""
-		if pkgs_ is None:
-			pkgs = {}
-		else:
-			pkgs = pkgs_.copy()
-		deprecated = {}
-		for cpv in cpvs:
-			# get SRC_URI from aux_get
-			try:
-				pkgs[cpv] = self.portdb.aux_get(cpv,["SRC_URI"])[0]
-			except KeyError:
-				try: # installed vardb
-					pkgs[cpv] = self.vardb.aux_get(cpv,["SRC_URI"])[0]
-					deprecated[cpv] = pkgs[cpv]
-					self.output(DEPRECATED %cpv)
-				except KeyError:
-					self.output("   - Key Error looking up: " + cpv)
-		return pkgs, deprecated
-
-	def _destructive(self,
-			package_names,
-			exclude,
-			pkgs_=None,
-			installed_included=False
-			):
-		"""Builds on pkgs according to input options
-
-		@param package_names: boolean
-		@param exclude: an exclusion dict as defined in
-				exclude.parseExcludeFile class.
-		@param pkgs: starting dictionary to add to
-				defaults to {}.
-		@param installed_included: bool. pkgs already
-				has the installed cpv's added.
-
-		@returns pkgs: {cpv: src_uri,}
-		"""
-		if pkgs_ is None:
-			pkgs = {}
-		else:
-			pkgs = pkgs_.copy()
-		deprecated = {}
-		pkgset = set()
-		if not installed_included:
-			if not package_names:
-				# list all installed CPV's from vartree
-				#print( "_destructive: getting vardb.cpv_all")
-				if not self.installed_cpvs:
-					pkgset.update(self.vardb.cpv_all())
-				else:
-					pkgset.update(self.installed_cpvs)
-				self.output("   - processing %s installed ebuilds" % len(pkgset))
-			elif package_names:
-				# list all CPV's from portree for CP's in vartree
-				#print( "_destructive: getting vardb.cp_all")
-				cps = self.vardb.cp_all()
-				self.output("   - processing %s installed packages" % len(cps))
-				for package in cps:
-					pkgset.update(self.portdb.cp_list(package))
-		self.output("   - processing excluded")
-		excludes = self._get_excludes(exclude)
-		excludes_length = len(excludes)
-		dprint("excludes", "EXCLUDES LENGTH =%d" %excludes_length)
-		pkgset.update(excludes)
-		pkgs_done = set(list(pkgs))
-		pkgset.difference_update(pkgs_done)
-		self.output(
-			"   - (%d of %d total) additional excluded packages to get source filenames for"
-			%(len(pkgset), excludes_length))
-		#self.output("   - processing %d ebuilds for filenames" %len(pkgset))
-		pkgs, _deprecated = self._unrestricted(pkgs, pkgset)
-		deprecated.update(_deprecated)
-		#self.output("   - done...")
-		return pkgs, deprecated
-
-	def _get_excludes(self, exclude):
-		"""Expands the exclude dictionary into a set of
-		CPV's
-
-		@param exclude: dictionary of exclusion categories,
-			packages to exclude from the cleaning
-
-		@rtype: set
-		@return set of package cpv's
-		"""
-		pkgset = set()
-		for cp in exclDictExpand(exclude):
-			# add packages from the exclude file
-			dprint("excludes", "_GET_EXCLUDES, cp=" + \
-				cp+", "+str(self.portdb.cp_list(cp)))
-			pkgset.update(self.portdb.cp_list(cp))
-		return pkgset
-
-	def _check_excludes(self, exclude, clean_me):
-		"""Performs a last minute check on remaining filenames
-		to see if they should be protected.  Since if the pkg-version
-		was deprecated it would not have been matched to a
-		source filename and removed.
-
-		@param exclude: an exclusion dictionary
-		@param clean_me: the list of filenames for cleaning
-
-		@rtype: dict of packages to clean
-		"""
-		saved = {}
-		pn_excludes = exclDictExpandPkgname(exclude)
-		dprint("excludes", "_check_excludes: made it here ;)")
-		if not pn_excludes:
-			return clean_me, saved
-		dprint("excludes", pn_excludes)
-		for key in list(clean_me):
-			if exclMatchFilename(pn_excludes, key):
-				saved[key] = clean_me[key]
-				del clean_me[key]
-				self.output("   ...Saved excluded package filename: " + key)
-		return clean_me, saved
+    """
+
+    @param output: verbose output method or (lambda x: None) to turn off
+    @param vardb: defaults to portage.db[portage.root]["vartree"].dbapi
+                            is overridden for testing.
+    @param portdb: defaults to portage.portdb and is overriden for testing."""
+
+    def __init__(
+        self,
+        output,
+        portdb=portage.portdb,
+        vardb=portage.db[portage.root]["vartree"].dbapi,
+    ):
+        self.vardb = vardb
+        self.portdb = portdb
+        self.output = output
+        self.installed_cpvs = None
+
+    def findDistfiles(
+        self,
+        exclude=None,
+        destructive=False,
+        fetch_restricted=False,
+        package_names=False,
+        time_limit=0,
+        size_limit=0,
+        _distdir=distdir,
+        deprecate=False,
+        extra_checks=(),
+    ):
+        """Find all obsolete distfiles.
+
+        XXX: what about cvs ebuilds?
+        I should install some to see where it goes...
+
+        @param exclude: an exclusion dict as defined in
+                        exclude.parseExcludeFile class.
+        @param destructive: boolean, defaults to False
+        @param fetch_restricted: boolean, defaults to False
+        @param package_names: boolean, defaults to False.
+        @param time_limit: integer time value as returned by parseTime()
+        @param size_limit: integer value of max. file size to keep or 0 to ignore.
+        @param _distdir: path to the distfiles dir being checked, defaults to portage.
+        @param deprecate: bool to control checking the clean dict. files for exclusion
+
+        @rtype: dict
+        @return dict. of package files to clean i.e. {'cat/pkg-ver.tbz2': [filename],}
+        """
+        if exclude is None:
+            exclude = {}
+        clean_me = {}
+        pkgs = {}
+        saved = {}
+        deprecated = {}
+        installed_included = False
+        # create a big CPV->SRC_URI dict of packages
+        # whose distfiles should be kept
+        if (not destructive) or fetch_restricted:
+            self.output("...non-destructive type search")
+            pkgs, _deprecated = self._non_destructive(destructive, fetch_restricted)
+            deprecated.update(_deprecated)
+            installed_included = True
+        if destructive:
+            self.output(
+                "...destructive type search: %d packages already found" % len(pkgs)
+            )
+            pkgs, _deprecated = self._destructive(
+                package_names, exclude, pkgs, installed_included
+            )
+            deprecated.update(_deprecated)
+        # gather the files to be cleaned
+        self.output("...checking limits for %d ebuild sources" % len(pkgs))
+
+        checks = self._get_default_checks(size_limit, time_limit, exclude, destructive)
+        checks.extend(extra_checks)
+        clean_me = self._check_limits(_distdir, checks, clean_me)
+        # remove any protected files from the list
+        self.output(
+            "...removing protected sources from %s candidates to clean" % len(clean_me)
+        )
+        clean_me = self._remove_protected(pkgs, clean_me)
+        if not deprecate and len(exclude) and len(clean_me):
+            self.output(
+                "...checking final for exclusion from "
+                + "%s remaining candidates to clean" % len(clean_me)
+            )
+            clean_me, saved = self._check_excludes(exclude, clean_me)
+        return clean_me, saved, deprecated
+
+    # begin _check_limits code block
+
+    def _get_default_checks(self, size_limit, time_limit, excludes, destructive):
+        # checks =[(self._isreg_check_, "is_reg_check")]
+        checks = [self._isreg_check_]
+        if "filenames" in excludes:
+            # checks.append((partial(self._filenames_check_, excludes), "Filenames_check"))
+            checks.append(partial(self._filenames_check_, excludes))
+        else:
+            self.output("   - skipping exclude filenames check")
+        if size_limit:
+            # checks.append((partial(self._size_check_, size_limit), "size_check"))
+            checks.append(partial(self._size_check_, size_limit))
+        else:
+            self.output("   - skipping size limit check")
+        if time_limit:
+            # print("time_limit = ", time_limit/1000000,"M sec")
+            # checks.append((partial(self._time_check_, time_limit), "time_check"))
+            checks.append(partial(self._time_check_, time_limit))
+        else:
+            self.output("   - skipping time limit check")
+        if destructive:
+            self.output("   - skipping dot files check")
+        else:
+            checks.append(self._dotfile_check_)
+        return checks
+
+    def _check_limits(self, _distdir, checks, clean_me=None):
+        """Checks files if they exceed size and/or time_limits, etc.
+
+        To start with everything is considered dirty and is excluded
+        only if it matches some condition.
+        """
+        if clean_me is None:
+            clean_me = {}
+        for file in os.listdir(_distdir):
+            filepath = os.path.join(_distdir, file)
+            try:
+                file_stat = os.lstat(filepath)
+            except EnvironmentError:
+                continue
+            is_dirty = False
+            # for check, check_name in checks:
+            for check in checks:
+                should_break, is_dirty = check(file_stat, file)
+                if should_break:
+                    break
+
+            if is_dirty:
+                # print( "%s Adding file to clean_list:" %check_name, file)
+                clean_me[file] = [filepath]
+        return clean_me
+
+    @staticmethod
+    def _isreg_check_(file_stat, file):
+        """check if file is a regular file."""
+        is_reg_file = stat.S_ISREG(file_stat[stat.ST_MODE])
+        return not is_reg_file, is_reg_file
+
+    @staticmethod
+    def _size_check_(size_limit, file_stat, file):
+        """checks if the file size exceeds the size_limit"""
+        if file_stat[stat.ST_SIZE] >= size_limit:
+            # print( "size mismatch ", file, file_stat[stat.ST_SIZE])
+            return True, False
+        return False, True
+
+    @staticmethod
+    def _time_check_(time_limit, file_stat, file):
+        """checks if the file exceeds the time_limit
+        (think forward, not back, time keeps increasing)"""
+        if file_stat[stat.ST_MTIME] >= time_limit:
+            # print( "time match too young ", file, file_stat[stat.ST_MTIME]/1000000,"M sec.")
+            return True, False
+        # print( "time match too old", file, file_stat[stat.ST_MTIME]/1000000,"M sec.")
+        return False, True
+
+    @staticmethod
+    def _filenames_check_(exclude, file_stat, file):
+        """checks if the file matches an exclusion file listing"""
+        # Try to match file name directly
+        if file in exclude["filenames"]:
+            return True, False
+        # See if file matches via regular expression matching
+        else:
+            file_match = False
+            for file_entry in exclude["filenames"]:
+                if exclude["filenames"][file_entry].match(file):
+                    file_match = True
+                    break
+        if file_match:
+            # print( "filename match ", file)
+            return True, False
+        return False, True
+
+    @staticmethod
+    def _dotfile_check_(file_stat, file):
+        """check if file is a regular file."""
+        head, tail = os.path.split(file)
+        if tail:
+            is_dot_file = tail.startswith(".")
+        return is_dot_file, not is_dot_file
+
+    # end _check_limits code block
+
+    @staticmethod
+    def _remove_protected(pkgs, clean_me):
+        """Remove files owned by some protected packages.
+
+        @returns packages to clean
+        @rtype: dictionary
+        """
+        for cpv in pkgs:
+            uris = pkgs[cpv].split()
+            uris.reverse()
+            while uris:
+                uri = uris.pop()
+                if uris and uris[-1] == "->":
+                    operator = uris.pop()  # noqa
+                    file = uris.pop()
+                else:
+                    file = os.path.basename(uri)
+                if file in clean_me:
+                    del clean_me[file]
+            # no need to waste IO time if there is nothing left to clean
+            if not len(clean_me):
+                return clean_me
+        return clean_me
+
+    def _non_destructive(
+        self, destructive, fetch_restricted, pkgs_=None, hosts_cpvs=None
+    ):
+        """performs the non-destructive checks
+
+        @param destructive: boolean
+        @param pkgs_: starting dictionary to add to
+                        defaults to {}.
+
+        @returns packages and thier SRC_URI's: {cpv: src_uri,}
+        @rtype: dictionary
+        """
+        if pkgs_ is None:
+            pkgs = {}
+        else:
+            pkgs = pkgs_.copy()
+        deprecated = {}
+        # the following code block was split to optimize for speed
+        # list all CPV from portree (yeah, that takes time...)
+        self.output("   - getting complete ebuild list")
+        cpvs = set(self.portdb.cpv_all())
+        installed_cpvs = set(self.vardb.cpv_all())
+        # now add any installed cpv's that are not in the tree or overlays
+        cpvs.update(installed_cpvs)
+        # Add any installed cpvs from hosts on the network, if any
+        if hosts_cpvs:
+            cpvs.update(hosts_cpvs)
+            installed_cpvs.update(hosts_cpvs)
+        if fetch_restricted and destructive:
+            self.output(
+                "   - getting source file names "
+                + "for %d installed ebuilds" % len(installed_cpvs)
+            )
+            pkgs, _deprecated = self._unrestricted(pkgs, installed_cpvs)
+            deprecated.update(_deprecated)
+            # remove the installed cpvs then check the remaining for fetch restiction
+            cpvs.difference_update(installed_cpvs)
+            self.output(
+                "   - getting fetch-restricted source file names "
+                + "for %d remaining ebuilds" % len(cpvs)
+            )
+            pkgs, _deprecated = self._fetch_restricted(pkgs, cpvs)
+            deprecated.update(_deprecated)
+            # save the installed cpv list to re-use in _destructive()
+            self.installed_cpvs = installed_cpvs.copy()
+        else:
+            self.output(
+                "   - getting source file names " + "for %d ebuilds" % len(cpvs)
+            )
+            pkgs, _deprecated = self._unrestricted(pkgs, cpvs)
+            deprecated.update(_deprecated)
+        return pkgs, deprecated
+
+    def _fetch_restricted(self, pkgs_, cpvs):
+        """perform fetch restricted non-destructive source
+        filename lookups
+
+        @param pkgs_: starting dictionary to add to
+        @param cpvs: set of (cat/pkg-ver, ...) identifiers
+
+        @return a new pkg dictionary
+        @rtype: dictionary
+        """
+        if pkgs_ is None:
+            pkgs = {}
+        else:
+            pkgs = pkgs_.copy()
+        deprecated = {}
+        for cpv in cpvs:
+            # get SRC_URI and RESTRICT from aux_get
+            try:  # main portdb
+                (src_uri, restrict) = self.portdb.aux_get(cpv, ["SRC_URI", "RESTRICT"])
+                # keep fetch-restricted check
+                # inside try so it is bypassed on KeyError
+                if "fetch" in restrict:
+                    pkgs[cpv] = src_uri
+            except KeyError:
+                try:  # installed vardb
+                    (src_uri, restrict) = self.vardb.aux_get(
+                        cpv, ["SRC_URI", "RESTRICT"]
+                    )
+                    deprecated[cpv] = src_uri
+                    self.output(DEPRECATED % cpv)
+                    # keep fetch-restricted check
+                    # inside try so it is bypassed on KeyError
+                    if "fetch" in restrict:
+                        pkgs[cpv] = src_uri
+                except KeyError:
+                    self.output("   - Key Error looking up: " + cpv)
+        return pkgs, deprecated
+
+    def _unrestricted(self, pkgs_, cpvs):
+        """Perform unrestricted source filenames lookups
+
+        @param pkgs_: starting packages dictionary
+        @param cpvs: set of (cat/pkg-ver, ...) identifiers
+
+        @return a new pkg dictionary
+        @rtype: dictionary
+        """
+        if pkgs_ is None:
+            pkgs = {}
+        else:
+            pkgs = pkgs_.copy()
+        deprecated = {}
+        for cpv in cpvs:
+            # get SRC_URI from aux_get
+            try:
+                pkgs[cpv] = self.portdb.aux_get(cpv, ["SRC_URI"])[0]
+            except KeyError:
+                try:  # installed vardb
+                    pkgs[cpv] = self.vardb.aux_get(cpv, ["SRC_URI"])[0]
+                    deprecated[cpv] = pkgs[cpv]
+                    self.output(DEPRECATED % cpv)
+                except KeyError:
+                    self.output("   - Key Error looking up: " + cpv)
+        return pkgs, deprecated
+
+    def _destructive(
+        self, package_names, exclude, pkgs_=None, installed_included=False
+    ):
+        """Builds on pkgs according to input options
+
+        @param package_names: boolean
+        @param exclude: an exclusion dict as defined in
+                        exclude.parseExcludeFile class.
+        @param pkgs: starting dictionary to add to
+                        defaults to {}.
+        @param installed_included: bool. pkgs already
+                        has the installed cpv's added.
+
+        @returns pkgs: {cpv: src_uri,}
+        """
+        if pkgs_ is None:
+            pkgs = {}
+        else:
+            pkgs = pkgs_.copy()
+        deprecated = {}
+        pkgset = set()
+        if not installed_included:
+            if not package_names:
+                # list all installed CPV's from vartree
+                # print( "_destructive: getting vardb.cpv_all")
+                if not self.installed_cpvs:
+                    pkgset.update(self.vardb.cpv_all())
+                else:
+                    pkgset.update(self.installed_cpvs)
+                self.output("   - processing %s installed ebuilds" % len(pkgset))
+            elif package_names:
+                # list all CPV's from portree for CP's in vartree
+                # print( "_destructive: getting vardb.cp_all")
+                cps = self.vardb.cp_all()
+                self.output("   - processing %s installed packages" % len(cps))
+                for package in cps:
+                    pkgset.update(self.portdb.cp_list(package))
+        self.output("   - processing excluded")
+        excludes = self._get_excludes(exclude)
+        excludes_length = len(excludes)
+        dprint("excludes", "EXCLUDES LENGTH =%d" % excludes_length)
+        pkgset.update(excludes)
+        pkgs_done = set(list(pkgs))
+        pkgset.difference_update(pkgs_done)
+        self.output(
+            "   - (%d of %d total) additional excluded packages to get source filenames for"
+            % (len(pkgset), excludes_length)
+        )
+        # self.output("   - processing %d ebuilds for filenames" %len(pkgset))
+        pkgs, _deprecated = self._unrestricted(pkgs, pkgset)
+        deprecated.update(_deprecated)
+        # self.output("   - done...")
+        return pkgs, deprecated
+
+    def _get_excludes(self, exclude):
+        """Expands the exclude dictionary into a set of
+        CPV's
+
+        @param exclude: dictionary of exclusion categories,
+                packages to exclude from the cleaning
+
+        @rtype: set
+        @return set of package cpv's
+        """
+        pkgset = set()
+        for cp in exclDictExpand(exclude):
+            # add packages from the exclude file
+            dprint(
+                "excludes",
+                "_GET_EXCLUDES, cp=" + cp + ", " + str(self.portdb.cp_list(cp)),
+            )
+            pkgset.update(self.portdb.cp_list(cp))
+        return pkgset
+
+    def _check_excludes(self, exclude, clean_me):
+        """Performs a last minute check on remaining filenames
+        to see if they should be protected.  Since if the pkg-version
+        was deprecated it would not have been matched to a
+        source filename and removed.
+
+        @param exclude: an exclusion dictionary
+        @param clean_me: the list of filenames for cleaning
+
+        @rtype: dict of packages to clean
+        """
+        saved = {}
+        pn_excludes = exclDictExpandPkgname(exclude)
+        dprint("excludes", "_check_excludes: made it here ;)")
+        if not pn_excludes:
+            return clean_me, saved
+        dprint("excludes", pn_excludes)
+        for key in list(clean_me):
+            if exclMatchFilename(pn_excludes, key):
+                saved[key] = clean_me[key]
+                del clean_me[key]
+                self.output("   ...Saved excluded package filename: " + key)
+        return clean_me, saved
 
 
 def _deps_equal(deps_a, eapi_a, deps_b, eapi_b, uselist=None):
-	"""Compare two dependency lists given a set of USE flags"""
-	if deps_a == deps_b: return True
+    """Compare two dependency lists given a set of USE flags"""
+    if deps_a == deps_b:
+        return True
 
-	deps_a = use_reduce(deps_a, uselist=uselist, eapi=eapi_a, token_class=Atom)
-	deps_b = use_reduce(deps_b, uselist=uselist, eapi=eapi_b, token_class=Atom)
-	strip_slots(deps_a)
-	strip_slots(deps_b)
-	return deps_a == deps_b
+    deps_a = use_reduce(deps_a, uselist=uselist, eapi=eapi_a, token_class=Atom)
+    deps_b = use_reduce(deps_b, uselist=uselist, eapi=eapi_b, token_class=Atom)
+    strip_slots(deps_a)
+    strip_slots(deps_b)
+    return deps_a == deps_b
 
 
 def findPackages(
-		options,
-		exclude=None,
-		destructive=False,
-		time_limit=0,
-		package_names=False,
-		pkgdir=None,
-		port_dbapi=portage.db[portage.root]["porttree"].dbapi,
-		var_dbapi=portage.db[portage.root]["vartree"].dbapi
-	):
-	"""Find obsolete binary packages.
-
-	@param options: dict of options determined at runtime
-	@type  options: dict
-	@param exclude: exclusion dict (as defined in the exclude.parseExcludeFile class)
-	@type  exclude: dict, optional
-	@param destructive: binpkg is obsolete if not installed (default: `False`)
-	@type  destructive: bool, optional
-	@param time_limit: exclude binpkg if newer than time value as returned by parseTime()
-	@type  time_limit: int, optional
-	@param package_names: exclude all binpkg versions if package is installed
-						  (used with `destructive=True`) (default: `False`)
-	@type  package_names: bool, optional
-	@param pkgdir: path to the binpkg cache (PKGDIR)
-	@type  pkgdir: str
-	@param port_dbapi: defaults to portage.db[portage.root]["porttree"].dbapi
-					   Can be overridden for tests.
-	@param  var_dbapi: defaults to portage.db[portage.root]["vartree"].dbapi
-					   Can be overridden for tests.
-
-	@return binary packages to remove. e.g. {'cat/pkg-ver': [filepath]}
-	@rtype: dict
-	"""
-	if exclude is None:
-		exclude = {}
-
-	# Access test, os.walk does not error for "no read permission"
-	try:
-		test = os.listdir(pkgdir)
-		del test
-	except EnvironmentError as er:
-		if options['ignore-failure']:
-			exit(0)
-		print(pp.error("Error accessing PKGDIR."), file=sys.stderr)
-		print(pp.error("(Check your make.conf file and environment)."), file=sys.stderr)
-		print(pp.error("Error: %s" % str(er)), file=sys.stderr)
-		exit(1)
-
-	# Create a dictionary of all installed packages
-	if destructive and package_names:
-		installed = dict.fromkeys(var_dbapi.cp_all())
-	else:
-		installed = {}
-
-	# Dictionary of binary packages to clean. Organized as cpv->[pkgs] in order
-	# to support FEATURES=binpkg-multi-instance.
-	dead_binpkgs = {}
-
-	bin_dbapi = portage.binarytree(pkgdir=pkgdir, settings=var_dbapi.settings).dbapi
-	for cpv in bin_dbapi.cpv_all():
-		cp = portage.cpv_getkey(cpv)
-
-		# Exclude per --exclude-file=...
-		if exclDictMatchCP(exclude, cp):
-			continue
-
-		# Exclude if binpkg is newer than --time-limit=...
-		if time_limit:
-			mtime = int(bin_dbapi.aux_get(cpv, ['_mtime_'])[0])
-			if mtime >= time_limit:
-				continue
-
-		# Exclude if binpkg exists in the porttree and not --deep
-		if not destructive and port_dbapi.cpv_exists(cpv):
-			if not options['changed-deps']:
-				continue
-
-			dep_keys = ('RDEPEND', 'PDEPEND')
-			keys = ('EAPI', 'USE') + dep_keys
-			binpkg_metadata = dict(zip(keys, bin_dbapi.aux_get(cpv, keys)))
-			ebuild_metadata = dict(zip(keys, port_dbapi.aux_get(cpv, keys)))
-
-			if _deps_equal(' '.join(binpkg_metadata[key] for key in dep_keys), binpkg_metadata['EAPI'],
-				' '.join(ebuild_metadata[key] for key in dep_keys), ebuild_metadata['EAPI'],
-				frozenset(binpkg_metadata['USE'].split())):
-				continue
-
-		if destructive and var_dbapi.cpv_exists(cpv):
-			# Exclude if an instance of the package is installed due to
-			# the --package-names option.
-			if cp in installed and port_dbapi.cpv_exists(cpv):
-				continue
-
-			# Exclude if BUILD_TIME of binpkg is same as vartree
-			buildtime = var_dbapi.aux_get(cpv, ['BUILD_TIME'])[0]
-			if buildtime == bin_dbapi.aux_get(cpv, ['BUILD_TIME'])[0]:
-				continue
-
-		binpkg_path = bin_dbapi.bintree.getname(cpv)
-		dead_binpkgs.setdefault(cpv, []).append(binpkg_path)
-
-	return dead_binpkgs
+    options,
+    exclude=None,
+    destructive=False,
+    time_limit=0,
+    package_names=False,
+    pkgdir=None,
+    port_dbapi=portage.db[portage.root]["porttree"].dbapi,
+    var_dbapi=portage.db[portage.root]["vartree"].dbapi,
+):
+    """Find obsolete binary packages.
+
+    @param options: dict of options determined at runtime
+    @type  options: dict
+    @param exclude: exclusion dict (as defined in the exclude.parseExcludeFile class)
+    @type  exclude: dict, optional
+    @param destructive: binpkg is obsolete if not installed (default: `False`)
+    @type  destructive: bool, optional
+    @param time_limit: exclude binpkg if newer than time value as returned by parseTime()
+    @type  time_limit: int, optional
+    @param package_names: exclude all binpkg versions if package is installed
+                                              (used with `destructive=True`) (default: `False`)
+    @type  package_names: bool, optional
+    @param pkgdir: path to the binpkg cache (PKGDIR)
+    @type  pkgdir: str
+    @param port_dbapi: defaults to portage.db[portage.root]["porttree"].dbapi
+                                       Can be overridden for tests.
+    @param  var_dbapi: defaults to portage.db[portage.root]["vartree"].dbapi
+                                       Can be overridden for tests.
+
+    @return binary packages to remove. e.g. {'cat/pkg-ver': [filepath]}
+    @rtype: dict
+    """
+    if exclude is None:
+        exclude = {}
+
+    # Access test, os.walk does not error for "no read permission"
+    try:
+        test = os.listdir(pkgdir)
+        del test
+    except EnvironmentError as er:
+        if options["ignore-failure"]:
+            exit(0)
+        print(pp.error("Error accessing PKGDIR."), file=sys.stderr)
+        print(pp.error("(Check your make.conf file and environment)."), file=sys.stderr)
+        print(pp.error("Error: %s" % str(er)), file=sys.stderr)
+        exit(1)
+
+    # Create a dictionary of all installed packages
+    if destructive and package_names:
+        installed = dict.fromkeys(var_dbapi.cp_all())
+    else:
+        installed = {}
+
+    # Dictionary of binary packages to clean. Organized as cpv->[pkgs] in order
+    # to support FEATURES=binpkg-multi-instance.
+    dead_binpkgs = {}
+
+    bin_dbapi = portage.binarytree(pkgdir=pkgdir, settings=var_dbapi.settings).dbapi
+    for cpv in bin_dbapi.cpv_all():
+        cp = portage.cpv_getkey(cpv)
+
+        # Exclude per --exclude-file=...
+        if exclDictMatchCP(exclude, cp):
+            continue
+
+        # Exclude if binpkg is newer than --time-limit=...
+        if time_limit:
+            mtime = int(bin_dbapi.aux_get(cpv, ["_mtime_"])[0])
+            if mtime >= time_limit:
+                continue
+
+        # Exclude if binpkg exists in the porttree and not --deep
+        if not destructive and port_dbapi.cpv_exists(cpv):
+            if not options["changed-deps"]:
+                continue
+
+            dep_keys = ("RDEPEND", "PDEPEND")
+            keys = ("EAPI", "USE") + dep_keys
+            binpkg_metadata = dict(zip(keys, bin_dbapi.aux_get(cpv, keys)))
+            ebuild_metadata = dict(zip(keys, port_dbapi.aux_get(cpv, keys)))
+
+            if _deps_equal(
+                " ".join(binpkg_metadata[key] for key in dep_keys),
+                binpkg_metadata["EAPI"],
+                " ".join(ebuild_metadata[key] for key in dep_keys),
+                ebuild_metadata["EAPI"],
+                frozenset(binpkg_metadata["USE"].split()),
+            ):
+                continue
+
+        if destructive and var_dbapi.cpv_exists(cpv):
+            # Exclude if an instance of the package is installed due to
+            # the --package-names option.
+            if cp in installed and port_dbapi.cpv_exists(cpv):
+                continue
+
+            # Exclude if BUILD_TIME of binpkg is same as vartree
+            buildtime = var_dbapi.aux_get(cpv, ["BUILD_TIME"])[0]
+            if buildtime == bin_dbapi.aux_get(cpv, ["BUILD_TIME"])[0]:
+                continue
+
+        binpkg_path = bin_dbapi.bintree.getname(cpv)
+        dead_binpkgs.setdefault(cpv, []).append(binpkg_path)
+
+    return dead_binpkgs
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/ekeyword/ekeyword.py b/pym/gentoolkit/ekeyword/ekeyword.py
index da0fd58..13b93ad 100755
--- a/pym/gentoolkit/ekeyword/ekeyword.py
+++ b/pym/gentoolkit/ekeyword/ekeyword.py
@@ -51,7 +51,7 @@ import portage
 from portage.output import colorize, nocolor
 
 
-__version__ = 'git'
+__version__ = "git"
 
 # Operation object that describes how to perform a change.
 # Args:
@@ -62,432 +62,507 @@ __version__ = 'git'
 #      '^': Delete |arch| so it isn't listed at all
 #  arch: The required arch to update
 #  ref_arch: Set |arch| status to this arch (ignoring |op|)
-Op = collections.namedtuple('Op', ('op', 'arch', 'ref_arch'))
+Op = collections.namedtuple("Op", ("op", "arch", "ref_arch"))
 
 
 def warning(msg):
-	"""Write |msg| as a warning to stderr"""
-	print('warning: %s' % msg, file=sys.stderr)
+    """Write |msg| as a warning to stderr"""
+    print("warning: %s" % msg, file=sys.stderr)
 
 
 def keyword_to_arch(keyword):
-	"""Given a keyword, strip it down to its arch value
+    """Given a keyword, strip it down to its arch value
 
-	When an ARCH shows up in KEYWORDS, it may have prefixes like ~ or -.
-	Strip all that cruft off to get back to the ARCH.
-	"""
-	return keyword.lstrip('-~')
+    When an ARCH shows up in KEYWORDS, it may have prefixes like ~ or -.
+    Strip all that cruft off to get back to the ARCH.
+    """
+    return keyword.lstrip("-~")
 
 
 def sort_keywords(arches):
-	"""Sort |arches| list in the order developers expect
+    """Sort |arches| list in the order developers expect
 
-	This is vaguely defined because it is kind of vaguely defined once you get
-	past the basic (Linux-only) keywords.
+    This is vaguely defined because it is kind of vaguely defined once you get
+    past the basic (Linux-only) keywords.
 
-	Args:
-	  arches: An iterable of ARCH values.
+    Args:
+      arches: An iterable of ARCH values.
 
-	Returns:
-	  A sorted list of |arches|
-	"""
-	keywords = []
+    Returns:
+      A sorted list of |arches|
+    """
+    keywords = []
 
-	# Globs always come first.
-	for g in ('-*', '*', '~*'):
-		if g in arches:
-			arches.remove(g)
-			keywords.append(g)
+    # Globs always come first.
+    for g in ("-*", "*", "~*"):
+        if g in arches:
+            arches.remove(g)
+            keywords.append(g)
 
-	def arch_key(keyword):
-		"""Callback for python sorting functions
+    def arch_key(keyword):
+        """Callback for python sorting functions
 
-		Used to turn a Gentoo keyword into a sortable form.
-		"""
-		# Sort independent of leading marker (~ or -).
-		arch = keyword_to_arch(keyword)
+        Used to turn a Gentoo keyword into a sortable form.
+        """
+        # Sort independent of leading marker (~ or -).
+        arch = keyword_to_arch(keyword)
 
-		# A keyword may have a "-" in it.  We split on that and sort
-		# by the two resulting items.  The part after the hyphen is
-		# the primary key.
-		if '-' in arch:
-			arch, plat = arch.split('-', 1)
-		else:
-			arch, plat = arch, ''
+        # A keyword may have a "-" in it.  We split on that and sort
+        # by the two resulting items.  The part after the hyphen is
+        # the primary key.
+        if "-" in arch:
+            arch, plat = arch.split("-", 1)
+        else:
+            arch, plat = arch, ""
 
-		return (plat, arch)
+        return (plat, arch)
 
-	keywords += sorted(arches, key=arch_key)
+    keywords += sorted(arches, key=arch_key)
 
-	return keywords
+    return keywords
 
 
-def diff_keywords(old_keywords, new_keywords, style='color-inline'):
-	"""Show pretty diff between list of keywords
+def diff_keywords(old_keywords, new_keywords, style="color-inline"):
+    """Show pretty diff between list of keywords
 
-	Args:
-	  old_keywords: The old set of KEYWORDS
-	  new_keywords: The new set of KEYWORDS
-	  style: The diff style
+    Args:
+      old_keywords: The old set of KEYWORDS
+      new_keywords: The new set of KEYWORDS
+      style: The diff style
 
-	Returns:
-	  A string containing the diff output ready to shown to the user
-	"""
-	def show_diff(s):
-		output = ''
+    Returns:
+      A string containing the diff output ready to shown to the user
+    """
 
-		for tag, i0, i1, j0, j1 in s.get_opcodes():
+    def show_diff(s):
+        output = ""
 
-			if tag == 'equal':
-				output += s.a[i0:i1]
+        for tag, i0, i1, j0, j1 in s.get_opcodes():
 
-			if tag in ('delete', 'replace'):
-				o = s.a[i0:i1]
-				if style == 'color-inline':
-					o = colorize('bg_darkred', o)
-				else:
-					o = '-{%s}' % o
-				output += o
+            if tag == "equal":
+                output += s.a[i0:i1]
 
-			if tag in ('insert', 'replace'):
-				o = s.b[j0:j1]
-				if style == 'color-inline':
-					o = colorize('bg_darkgreen', o)
-				else:
-					o = '+{%s}' % o
-				output += o
+            if tag in ("delete", "replace"):
+                o = s.a[i0:i1]
+                if style == "color-inline":
+                    o = colorize("bg_darkred", o)
+                else:
+                    o = "-{%s}" % o
+                output += o
 
-		return output
+            if tag in ("insert", "replace"):
+                o = s.b[j0:j1]
+                if style == "color-inline":
+                    o = colorize("bg_darkgreen", o)
+                else:
+                    o = "+{%s}" % o
+                output += o
 
-	sold = str(' '.join(old_keywords))
-	snew = str(' '.join(new_keywords))
-	s = difflib.SequenceMatcher(str.isspace, sold, snew, autojunk=False)
-	return show_diff(s)
+        return output
+
+    sold = str(" ".join(old_keywords))
+    snew = str(" ".join(new_keywords))
+    s = difflib.SequenceMatcher(str.isspace, sold, snew, autojunk=False)
+    return show_diff(s)
 
 
 def process_keywords(keywords, ops, arch_status=None):
-	"""Process |ops| for |keywords|"""
-	new_keywords = set(keywords).copy()
-
-	# Process each op one at a time.
-	for op, oarch, refarch in ops:
-		# Figure out which keywords we need to modify.
-		if oarch == 'all':
-			if arch_status is None:
-				raise ValueError('unable to process "all" w/out profiles.desc')
-			old_arches = set([keyword_to_arch(a) for a in new_keywords])
-			if op is None:
-				# Process just stable keywords.
-				arches = [k for k, v in arch_status.items()
-					if v[1] == 'arch' and k in old_arches]
-			else:
-				# Process all possible keywords.  We use the arch_status as a
-				# master list.  If it lacks some keywords, then we might miss
-				# somethings here, but not much we can do.
-				arches = list(old_arches)
-
-			# We ignore the glob arch as we never want to tweak it.
-			if '*' in arches:
-				arches.remove('*')
-
-			# For keywords that are explicitly disabled, do not update.  When
-			# people use `ekeyword ~all ...` or `ekeyword all ...`, they rarely
-			# (if ever) want to change a '-sparc' to 'sparc' or '-sparc' to
-			# '~sparc'.  We force people to explicitly do `ekeyword sparc ...`
-			# in these cases.
-			arches = [x for x in arches if '-' + x not in new_keywords]
-		else:
-			arches = [oarch]
-
-		if refarch:
-			# Figure out the state for this arch based on the reference arch.
-			# TODO: Add support for "all" keywords.
-			# XXX: Should this ignore the '-' state ?  Does it make sense to
-			#      sync e.g. "s390" to "-ppc" ?
-			refkeyword = [x for x in new_keywords if refarch == keyword_to_arch(x)]
-			if not refkeyword:
-				op = '^'
-			elif refkeyword[0].startswith('~'):
-				op = '~'
-			elif refkeyword[0].startswith('-'):
-				op = '-'
-			else:
-				op = None
-
-		# Finally do the actual update of the keywords list.
-		for arch in arches:
-			new_keywords -= set(['%s%s' % (x, arch) for x in ('', '~', '-')])
-
-			if op is None:
-				new_keywords.add(arch)
-			elif op in ('~', '-'):
-				new_keywords.add('%s%s' % (op, arch))
-			elif op == '^':
-				# Already deleted.  Whee.
-				pass
-			else:
-				raise ValueError('unknown operation %s' % op)
-
-	return new_keywords
-
-
-def process_content(ebuild, data, ops, arch_status=None, verbose=0,
-                    quiet=0, style='color-inline'):
-	"""Process |ops| for |data|"""
-	# Set up the user display style based on verbose/quiet settings.
-	if verbose > 1:
-		disp_name = ebuild
-		def logit(msg):
-			print('%s: %s' % (disp_name, msg))
-	elif quiet > 1:
-		def logit(_msg):
-			pass
-	else:
-		# Chop the full path and the .ebuild suffix.
-		disp_name = os.path.basename(ebuild)[:-7]
-		def logit(msg):
-			print('%s: %s' % (disp_name, msg))
-
-	# Match any KEYWORDS= entry that isn't commented out.
-	keywords_re = re.compile(r'^([^#]*\bKEYWORDS=)([\'"])(.*)(\2)(.*)')
-	updated = False
-	content = []
-
-	# Walk each line of the ebuild looking for KEYWORDS to process.
-	for line in data:
-		m = keywords_re.match(line)
-		if not m:
-			content.append(line)
-			continue
-
-		# Ok, we've got it, now let's process things.
-		old_keywords_original = m.group(3).split()  # preserve original order
-		old_keywords = set(old_keywords_original)
-		new_keywords = process_keywords(
-			old_keywords, ops, arch_status=arch_status)
-
-		were_sorted_already = (
-				old_keywords_original == sort_keywords(old_keywords_original))
-
-		# Finally let's present the results to the user.
-		if (new_keywords != old_keywords) or \
-				(not ops and not were_sorted_already) or verbose:
-			# Only do the diff work if something actually changed.
-			updated = True
-
-			if not ops:
-				# We're sorting only so we want to compare with the
-				# unsorted original (or changes in order will not show)
-				old_keywords = old_keywords_original
-			else:
-				# We changed keywords so let's diff sorted versions
-				# so that keywords changes are easy to spot
-				old_keywords = sort_keywords(old_keywords)
-
-			new_keywords = sort_keywords(new_keywords)
-			line = '%s"%s"%s\n' % (m.group(1), ' '.join(new_keywords),
-			                       m.group(5))
-			if style in ('color-inline', 'inline'):
-				logit(diff_keywords(old_keywords, new_keywords, style=style))
-			else:
-				if style == 'long-multi':
-					logit(' '.join(['%*s' % (len(keyword_to_arch(x)) + 1, x)
-					                for x in old_keywords]))
-					logit(' '.join(['%*s' % (len(keyword_to_arch(x)) + 1, x)
-					                for x in new_keywords]))
-				else:
-					deleted_keywords = [x for x in old_keywords
-					                    if x not in new_keywords]
-					logit('--- %s' % ' '.join(deleted_keywords))
-					added_keywords = [x for x in new_keywords
-					                  if x not in old_keywords]
-					logit('+++ %s' % ' '.join(added_keywords))
-
-		content.append(line)
-
-	if not updated:
-		logit('no updates')
-
-	return updated, content
-
-
-def process_ebuild(ebuild, ops, arch_status=None, verbose=0, quiet=0,
-                   dry_run=False, style='color-inline', manifest=False):
-	"""Process |ops| for |ebuild|
-
-	Args:
-	  ebuild: The ebuild file to operate on & update in place
-	  ops: An iterable of operations (Op objects) to perform on |ebuild|
-	  arch_status: A dict mapping default arches to their stability; see the
-	               load_profile_data function for more details
-	  verbose: Be verbose; show various status messages
-	  quiet: Be quiet; only show errors
-	  dry_run: Do not make any changes to |ebuild|; show what would be done
-	  style: The diff style
-
-	Returns:
-	  Whether any updates were processed
-	"""
-	with io.open(ebuild, encoding='utf8') as f:
-		updated, content = process_content(
-			ebuild, f, ops, arch_status=arch_status,
-			verbose=verbose, quiet=quiet, style=style)
-		if updated and not dry_run:
-			with io.open(ebuild, 'w', encoding='utf8') as f:
-				f.writelines(content)
-			if manifest:
-				subprocess.check_call(['ebuild', ebuild, 'manifest'])
-	return updated
+    """Process |ops| for |keywords|"""
+    new_keywords = set(keywords).copy()
+
+    # Process each op one at a time.
+    for op, oarch, refarch in ops:
+        # Figure out which keywords we need to modify.
+        if oarch == "all":
+            if arch_status is None:
+                raise ValueError('unable to process "all" w/out profiles.desc')
+            old_arches = set([keyword_to_arch(a) for a in new_keywords])
+            if op is None:
+                # Process just stable keywords.
+                arches = [
+                    k
+                    for k, v in arch_status.items()
+                    if v[1] == "arch" and k in old_arches
+                ]
+            else:
+                # Process all possible keywords.  We use the arch_status as a
+                # master list.  If it lacks some keywords, then we might miss
+                # somethings here, but not much we can do.
+                arches = list(old_arches)
+
+            # We ignore the glob arch as we never want to tweak it.
+            if "*" in arches:
+                arches.remove("*")
+
+            # For keywords that are explicitly disabled, do not update.  When
+            # people use `ekeyword ~all ...` or `ekeyword all ...`, they rarely
+            # (if ever) want to change a '-sparc' to 'sparc' or '-sparc' to
+            # '~sparc'.  We force people to explicitly do `ekeyword sparc ...`
+            # in these cases.
+            arches = [x for x in arches if "-" + x not in new_keywords]
+        else:
+            arches = [oarch]
+
+        if refarch:
+            # Figure out the state for this arch based on the reference arch.
+            # TODO: Add support for "all" keywords.
+            # XXX: Should this ignore the '-' state ?  Does it make sense to
+            #      sync e.g. "s390" to "-ppc" ?
+            refkeyword = [x for x in new_keywords if refarch == keyword_to_arch(x)]
+            if not refkeyword:
+                op = "^"
+            elif refkeyword[0].startswith("~"):
+                op = "~"
+            elif refkeyword[0].startswith("-"):
+                op = "-"
+            else:
+                op = None
+
+        # Finally do the actual update of the keywords list.
+        for arch in arches:
+            new_keywords -= set(["%s%s" % (x, arch) for x in ("", "~", "-")])
+
+            if op is None:
+                new_keywords.add(arch)
+            elif op in ("~", "-"):
+                new_keywords.add("%s%s" % (op, arch))
+            elif op == "^":
+                # Already deleted.  Whee.
+                pass
+            else:
+                raise ValueError("unknown operation %s" % op)
+
+    return new_keywords
+
+
+def process_content(
+    ebuild, data, ops, arch_status=None, verbose=0, quiet=0, style="color-inline"
+):
+    """Process |ops| for |data|"""
+    # Set up the user display style based on verbose/quiet settings.
+    if verbose > 1:
+        disp_name = ebuild
+
+        def logit(msg):
+            print("%s: %s" % (disp_name, msg))
+
+    elif quiet > 1:
+
+        def logit(_msg):
+            pass
+
+    else:
+        # Chop the full path and the .ebuild suffix.
+        disp_name = os.path.basename(ebuild)[:-7]
+
+        def logit(msg):
+            print("%s: %s" % (disp_name, msg))
+
+    # Match any KEYWORDS= entry that isn't commented out.
+    keywords_re = re.compile(r'^([^#]*\bKEYWORDS=)([\'"])(.*)(\2)(.*)')
+    updated = False
+    content = []
+
+    # Walk each line of the ebuild looking for KEYWORDS to process.
+    for line in data:
+        m = keywords_re.match(line)
+        if not m:
+            content.append(line)
+            continue
+
+        # Ok, we've got it, now let's process things.
+        old_keywords_original = m.group(3).split()  # preserve original order
+        old_keywords = set(old_keywords_original)
+        new_keywords = process_keywords(old_keywords, ops, arch_status=arch_status)
+
+        were_sorted_already = old_keywords_original == sort_keywords(
+            old_keywords_original
+        )
+
+        # Finally let's present the results to the user.
+        if (
+            (new_keywords != old_keywords)
+            or (not ops and not were_sorted_already)
+            or verbose
+        ):
+            # Only do the diff work if something actually changed.
+            updated = True
+
+            if not ops:
+                # We're sorting only so we want to compare with the
+                # unsorted original (or changes in order will not show)
+                old_keywords = old_keywords_original
+            else:
+                # We changed keywords so let's diff sorted versions
+                # so that keywords changes are easy to spot
+                old_keywords = sort_keywords(old_keywords)
+
+            new_keywords = sort_keywords(new_keywords)
+            line = '%s"%s"%s\n' % (m.group(1), " ".join(new_keywords), m.group(5))
+            if style in ("color-inline", "inline"):
+                logit(diff_keywords(old_keywords, new_keywords, style=style))
+            else:
+                if style == "long-multi":
+                    logit(
+                        " ".join(
+                            [
+                                "%*s" % (len(keyword_to_arch(x)) + 1, x)
+                                for x in old_keywords
+                            ]
+                        )
+                    )
+                    logit(
+                        " ".join(
+                            [
+                                "%*s" % (len(keyword_to_arch(x)) + 1, x)
+                                for x in new_keywords
+                            ]
+                        )
+                    )
+                else:
+                    deleted_keywords = [
+                        x for x in old_keywords if x not in new_keywords
+                    ]
+                    logit("--- %s" % " ".join(deleted_keywords))
+                    added_keywords = [x for x in new_keywords if x not in old_keywords]
+                    logit("+++ %s" % " ".join(added_keywords))
+
+        content.append(line)
+
+    if not updated:
+        logit("no updates")
+
+    return updated, content
+
+
+def process_ebuild(
+    ebuild,
+    ops,
+    arch_status=None,
+    verbose=0,
+    quiet=0,
+    dry_run=False,
+    style="color-inline",
+    manifest=False,
+):
+    """Process |ops| for |ebuild|
+
+    Args:
+      ebuild: The ebuild file to operate on & update in place
+      ops: An iterable of operations (Op objects) to perform on |ebuild|
+      arch_status: A dict mapping default arches to their stability; see the
+                   load_profile_data function for more details
+      verbose: Be verbose; show various status messages
+      quiet: Be quiet; only show errors
+      dry_run: Do not make any changes to |ebuild|; show what would be done
+      style: The diff style
+
+    Returns:
+      Whether any updates were processed
+    """
+    with io.open(ebuild, encoding="utf8") as f:
+        updated, content = process_content(
+            ebuild,
+            f,
+            ops,
+            arch_status=arch_status,
+            verbose=verbose,
+            quiet=quiet,
+            style=style,
+        )
+        if updated and not dry_run:
+            with io.open(ebuild, "w", encoding="utf8") as f:
+                f.writelines(content)
+            if manifest:
+                subprocess.check_call(["ebuild", ebuild, "manifest"])
+    return updated
 
 
 def portage_settings():
-	"""Return the portage settings we care about."""
-	# Portage creates the db member on the fly which confuses the linter.
-	return portage.db[portage.root]['vartree'].settings
+    """Return the portage settings we care about."""
+    # Portage creates the db member on the fly which confuses the linter.
+    return portage.db[portage.root]["vartree"].settings
 
 
 def arg_to_op(arg):
-	"""Convert a command line |arg| to an Op"""
-	arch_prefixes = ('-', '~', '^')
+    """Convert a command line |arg| to an Op"""
+    arch_prefixes = ("-", "~", "^")
 
-	op = None
-	arch = arg
-	refarch = None
+    op = None
+    arch = arg
+    refarch = None
 
-	if arg and arg[0] in arch_prefixes:
-		op, arch = arg[0], arg[1:]
+    if arg and arg[0] in arch_prefixes:
+        op, arch = arg[0], arg[1:]
 
-	if '=' in arch:
-		if not op is None:
-			raise ValueError('Cannot use an op and a refarch')
-		arch, refarch = arch.split('=', 1)
+    if "=" in arch:
+        if not op is None:
+            raise ValueError("Cannot use an op and a refarch")
+        arch, refarch = arch.split("=", 1)
 
-	return Op(op, arch, refarch)
+    return Op(op, arch, refarch)
 
 
 def ignorable_arg(arg, quiet=0):
-	"""Whether it's ok to ignore this argument"""
-	if os.path.isdir(arg):
-		if not quiet:
-			warning('ignoring directory %s' % arg)
-		return True
-
-	WHITELIST = (
-		'Manifest',
-		'metadata.xml',
-	)
-	base = os.path.basename(arg)
-	if (base.startswith('ChangeLog') or
-	    base in WHITELIST or
-	    base.startswith('.') or
-	    base.endswith('~')):
-		if not quiet:
-			warning('ignoring file: %s' % arg)
-		return True
-
-	return False
+    """Whether it's ok to ignore this argument"""
+    if os.path.isdir(arg):
+        if not quiet:
+            warning("ignoring directory %s" % arg)
+        return True
+
+    WHITELIST = (
+        "Manifest",
+        "metadata.xml",
+    )
+    base = os.path.basename(arg)
+    if (
+        base.startswith("ChangeLog")
+        or base in WHITELIST
+        or base.startswith(".")
+        or base.endswith("~")
+    ):
+        if not quiet:
+            warning("ignoring file: %s" % arg)
+        return True
+
+    return False
 
 
 def args_to_work(args, arch_status=None, _repo=None, quiet=0):
-	"""Process |args| into a list of work itmes (ebuild/arches to update)"""
-	work = []
-	todo_arches = []
-	last_todo_arches = []
-
-	for arg in args:
-		if arg.endswith('.ebuild'):
-			if not todo_arches:
-				todo_arches = last_todo_arches
-			work.append([arg, todo_arches])
-			last_todo_arches = todo_arches
-			todo_arches = []
-		else:
-			op = arg_to_op(arg)
-			if not arch_status or op.arch in arch_status:
-				todo_arches.append(op)
-			elif not ignorable_arg(arg, quiet=quiet):
-				raise ValueError('unknown arch/argument: %s' % arg)
-
-	if todo_arches:
-		raise ValueError('missing ebuilds to process!')
-
-	return work
+    """Process |args| into a list of work itmes (ebuild/arches to update)"""
+    work = []
+    todo_arches = []
+    last_todo_arches = []
+
+    for arg in args:
+        if arg.endswith(".ebuild"):
+            if not todo_arches:
+                todo_arches = last_todo_arches
+            work.append([arg, todo_arches])
+            last_todo_arches = todo_arches
+            todo_arches = []
+        else:
+            op = arg_to_op(arg)
+            if not arch_status or op.arch in arch_status:
+                todo_arches.append(op)
+            elif not ignorable_arg(arg, quiet=quiet):
+                raise ValueError("unknown arch/argument: %s" % arg)
+
+    if todo_arches:
+        raise ValueError("missing ebuilds to process!")
+
+    return work
 
 
 def get_parser():
-	"""Return an argument parser for ekeyword"""
-	parser = argparse.ArgumentParser(
-		description=__doc__,
-		formatter_class=argparse.RawDescriptionHelpFormatter)
-	parser.add_argument('-m', '--manifest', default=False, action='store_true',
-		help='Run `ebuild manifest` on the ebuild after modifying it')
-	parser.add_argument('-n', '--dry-run', default=False, action='store_true',
-		help='Show what would be changed, but do not commit')
-	parser.add_argument('-v', '--verbose', action='count', default=0,
-		help='Be verbose while processing things')
-	parser.add_argument('-q', '--quiet', action='count', default=0,
-		help='Be quiet while processing things (only show errors)')
-	parser.add_argument('--format', default='auto', dest='style',
-		choices=('auto', 'color-inline', 'inline', 'short-multi', 'long-multi'),
-		help='Select output format for showing differences')
-	parser.add_argument('-V', '--version', action='version', version=__version__,
-		help='Show version information')
-	return parser
+    """Return an argument parser for ekeyword"""
+    parser = argparse.ArgumentParser(
+        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+    )
+    parser.add_argument(
+        "-m",
+        "--manifest",
+        default=False,
+        action="store_true",
+        help="Run `ebuild manifest` on the ebuild after modifying it",
+    )
+    parser.add_argument(
+        "-n",
+        "--dry-run",
+        default=False,
+        action="store_true",
+        help="Show what would be changed, but do not commit",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        default=0,
+        help="Be verbose while processing things",
+    )
+    parser.add_argument(
+        "-q",
+        "--quiet",
+        action="count",
+        default=0,
+        help="Be quiet while processing things (only show errors)",
+    )
+    parser.add_argument(
+        "--format",
+        default="auto",
+        dest="style",
+        choices=("auto", "color-inline", "inline", "short-multi", "long-multi"),
+        help="Select output format for showing differences",
+    )
+    parser.add_argument(
+        "-V",
+        "--version",
+        action="version",
+        version=__version__,
+        help="Show version information",
+    )
+    return parser
 
 
 def main(argv):
-	if argv is None:
-		argv = sys.argv[1:]
-
-	# Extract the args ourselves.  This is to allow things like -hppa
-	# without tripping over the -h/--help flags.  We can't use the
-	# parse_known_args function either.
-	# This sucks and really wish we didn't need to do this ...
-	parse_args = []
-	work_args = []
-	while argv:
-		arg = argv.pop(0)
-		if arg.startswith('--'):
-			if arg == '--':
-				work_args += argv
-				break
-			else:
-				parse_args.append(arg)
-			# Handle flags that take arguments.
-			if arg in ('--format',):
-				if argv:
-					parse_args.append(argv.pop(0))
-		elif len(arg) == 2 and arg[0] == '-':
-			parse_args.append(arg)
-		else:
-			work_args.append(arg)
-
-	parser = get_parser()
-	opts = parser.parse_args(parse_args)
-	if not work_args:
-		parser.error('need ebuilds to process')
-
-	if opts.style == 'auto':
-		if not portage_settings().get('NOCOLOR', 'false').lower() in ('no', 'false'):
-			nocolor()
-			opts.style = 'short'
-		else:
-			opts.style = 'color-inline'
-
-	arch_status = load_profile_data()
-	try:
-		work = args_to_work(work_args, arch_status=arch_status, quiet=opts.quiet)
-	except ValueError as e:
-		parser.error(e)
-
-	for ebuild, ops in work:
-		process_ebuild(ebuild, ops, arch_status=arch_status,
-		               verbose=opts.verbose, quiet=opts.quiet,
-		               dry_run=opts.dry_run, style=opts.style,
-		               manifest=opts.manifest)
-
-	return os.EX_OK
-
-
-if __name__ == '__main__':
-	sys.exit(main(sys.argv[1:]))
+    if argv is None:
+        argv = sys.argv[1:]
+
+    # Extract the args ourselves.  This is to allow things like -hppa
+    # without tripping over the -h/--help flags.  We can't use the
+    # parse_known_args function either.
+    # This sucks and really wish we didn't need to do this ...
+    parse_args = []
+    work_args = []
+    while argv:
+        arg = argv.pop(0)
+        if arg.startswith("--"):
+            if arg == "--":
+                work_args += argv
+                break
+            else:
+                parse_args.append(arg)
+            # Handle flags that take arguments.
+            if arg in ("--format",):
+                if argv:
+                    parse_args.append(argv.pop(0))
+        elif len(arg) == 2 and arg[0] == "-":
+            parse_args.append(arg)
+        else:
+            work_args.append(arg)
+
+    parser = get_parser()
+    opts = parser.parse_args(parse_args)
+    if not work_args:
+        parser.error("need ebuilds to process")
+
+    if opts.style == "auto":
+        if not portage_settings().get("NOCOLOR", "false").lower() in ("no", "false"):
+            nocolor()
+            opts.style = "short"
+        else:
+            opts.style = "color-inline"
+
+    arch_status = load_profile_data()
+    try:
+        work = args_to_work(work_args, arch_status=arch_status, quiet=opts.quiet)
+    except ValueError as e:
+        parser.error(e)
+
+    for ebuild, ops in work:
+        process_ebuild(
+            ebuild,
+            ops,
+            arch_status=arch_status,
+            verbose=opts.verbose,
+            quiet=opts.quiet,
+            dry_run=opts.dry_run,
+            style=opts.style,
+            manifest=opts.manifest,
+        )
+
+    return os.EX_OK
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))

diff --git a/pym/gentoolkit/ekeyword/test_ekeyword.py b/pym/gentoolkit/ekeyword/test_ekeyword.py
index 0763783..3495aff 100755
--- a/pym/gentoolkit/ekeyword/test_ekeyword.py
+++ b/pym/gentoolkit/ekeyword/test_ekeyword.py
@@ -15,368 +15,368 @@ from unittest import mock
 from gentoolkit.ekeyword import ekeyword
 
 
-TESTDIR = os.path.join(os.path.dirname(__file__), 'tests')
+TESTDIR = os.path.join(os.path.dirname(__file__), "tests")
 
 
 class TestSortKeywords(unittest.TestCase):
-	"""Tests for sort_keywords"""
+    """Tests for sort_keywords"""
 
-	def _test(self, input_data, exp_data):
-		"""Sort |input_data| and make sure it matches |exp_data|"""
-		output_data = ekeyword.sort_keywords(input_data.split())
-		self.assertEqual(exp_data.split(), output_data)
+    def _test(self, input_data, exp_data):
+        """Sort |input_data| and make sure it matches |exp_data|"""
+        output_data = ekeyword.sort_keywords(input_data.split())
+        self.assertEqual(exp_data.split(), output_data)
 
-	def testNull(self):
-		"""Verify whitespace is collapsed"""
-		self._test('', '')
-		self._test('   		 ', '')
+    def testNull(self):
+        """Verify whitespace is collapsed"""
+        self._test("", "")
+        self._test("   		 ", "")
 
-	def testGlob(self):
-		"""Verify globs get sorted before all others"""
-		self._test('* arm', '* arm')
-		self._test('arm -* x86', '-* arm x86')
-		self._test('hppa ~* amd64', '~* amd64 hppa')
+    def testGlob(self):
+        """Verify globs get sorted before all others"""
+        self._test("* arm", "* arm")
+        self._test("arm -* x86", "-* arm x86")
+        self._test("hppa ~* amd64", "~* amd64 hppa")
 
-	def testMixedPlatform(self):
-		"""Verify core arches get sorted before all w/suffix"""
-		self._test('arm-linux alpha amd64-fbsd hppa',
-		           'alpha hppa amd64-fbsd arm-linux')
+    def testMixedPlatform(self):
+        """Verify core arches get sorted before all w/suffix"""
+        self._test("arm-linux alpha amd64-fbsd hppa", "alpha hppa amd64-fbsd arm-linux")
 
-	def testPrefixes(self):
-		"""Verify -/~ and such get ignored for sorting"""
-		self._test('-hppa arm ~alpha -* ~arm-linux',
-		           '-* ~alpha arm -hppa ~arm-linux')
+    def testPrefixes(self):
+        """Verify -/~ and such get ignored for sorting"""
+        self._test("-hppa arm ~alpha -* ~arm-linux", "-* ~alpha arm -hppa ~arm-linux")
 
-	def testPlatform(self):
-		"""Verify we sort based on platform first"""
-		self._test('x86-linux ppc-macos x86-fbsd amd64-linux amd64-fbsd',
-		           'amd64-fbsd x86-fbsd amd64-linux x86-linux ppc-macos')
+    def testPlatform(self):
+        """Verify we sort based on platform first"""
+        self._test(
+            "x86-linux ppc-macos x86-fbsd amd64-linux amd64-fbsd",
+            "amd64-fbsd x86-fbsd amd64-linux x86-linux ppc-macos",
+        )
 
 
 class TestDiffKeywords(unittest.TestCase):
-	"""Tests for diff_keywords"""
+    """Tests for diff_keywords"""
 
-	def testEmpty(self):
-		"""Test when there is no content to diff"""
-		ret = ekeyword.diff_keywords([], [])
-		self.assertEqual(ret, '')
+    def testEmpty(self):
+        """Test when there is no content to diff"""
+        ret = ekeyword.diff_keywords([], [])
+        self.assertEqual(ret, "")
 
-	def testSame(self):
-		"""Test when there is no difference"""
-		ret = ekeyword.diff_keywords(['a b c'], ['a b c'])
-		self.assertEqual(ret, 'a b c')
+    def testSame(self):
+        """Test when there is no difference"""
+        ret = ekeyword.diff_keywords(["a b c"], ["a b c"])
+        self.assertEqual(ret, "a b c")
 
-	def testInsert(self):
-		"""Test when content is simply added"""
-		ret = ekeyword.diff_keywords(['a'], ['~a'])
-		self.assertNotEqual(ret, '')
+    def testInsert(self):
+        """Test when content is simply added"""
+        ret = ekeyword.diff_keywords(["a"], ["~a"])
+        self.assertNotEqual(ret, "")
 
-	def testDelete(self):
-		"""Test when content is simply deleted"""
-		ret = ekeyword.diff_keywords(['~a'], ['a'])
-		self.assertNotEqual(ret, '')
+    def testDelete(self):
+        """Test when content is simply deleted"""
+        ret = ekeyword.diff_keywords(["~a"], ["a"])
+        self.assertNotEqual(ret, "")
 
-	def testReplace(self):
-		"""Test when some content replaces another"""
-		ret = ekeyword.diff_keywords(['~a'], ['-a'])
-		self.assertNotEqual(ret, '')
+    def testReplace(self):
+        """Test when some content replaces another"""
+        ret = ekeyword.diff_keywords(["~a"], ["-a"])
+        self.assertNotEqual(ret, "")
 
-	def _testSmokeStyle(self, style):
-		return ekeyword.diff_keywords(
-			['~a', 'b', '-abcde'],
-			['a', '-b', '-abxde'], style=style)
+    def _testSmokeStyle(self, style):
+        return ekeyword.diff_keywords(
+            ["~a", "b", "-abcde"], ["a", "-b", "-abxde"], style=style
+        )
 
-	def testSmokeStyleColor(self):
-		"""Run a full smoke test for color-inline style"""
-		ret = self._testSmokeStyle('color-inline')
-		self.assertNotEqual(ret, '')
+    def testSmokeStyleColor(self):
+        """Run a full smoke test for color-inline style"""
+        ret = self._testSmokeStyle("color-inline")
+        self.assertNotEqual(ret, "")
 
-	def testSmokeStyleNoColor(self):
-		"""Run a full smoke test for non-color-inline style"""
-		self._testSmokeStyle('nocolor')
+    def testSmokeStyleNoColor(self):
+        """Run a full smoke test for non-color-inline style"""
+        self._testSmokeStyle("nocolor")
 
 
 class TestProcessKeywords(unittest.TestCase):
-	"""Tests for process_keywords"""
-
-	def _test(self, keywords, ops, exp, arch_status=None):
-		# This func doesn't return sorted results (which is fine),
-		# so do so ourselves to get stable tests.
-		ret = ekeyword.process_keywords(
-			keywords.split(), ops, arch_status=arch_status)
-		self.assertEqual(sorted(ret), sorted(exp.split()))
-
-	def testAdd(self):
-		ops = (
-			ekeyword.Op(None, 'arm', None),
-			ekeyword.Op('~', 's390', None),
-			ekeyword.Op('-', 'sh', None),
-		)
-		self._test('moo', ops, 'arm ~s390 -sh moo')
-
-	def testModify(self):
-		ops = (
-			ekeyword.Op(None, 'arm', None),
-			ekeyword.Op('~', 's390', None),
-			ekeyword.Op('-', 'sh', None),
-		)
-		self._test('~arm s390 ~sh moo', ops, 'arm ~s390 -sh moo')
-
-	def testDelete(self):
-		ops = (
-			ekeyword.Op('^', 'arm', None),
-			ekeyword.Op('^', 's390', None),
-			ekeyword.Op('^', 'x86', None),
-		)
-		self._test('arm -s390 ~x86 bar', ops, 'bar')
-
-	def testSync(self):
-		ops = (
-			ekeyword.Op('=', 'arm64', 'arm'),
-			ekeyword.Op('=', 'ppc64', 'ppc'),
-			ekeyword.Op('=', 'amd64', 'x86'),
-			ekeyword.Op('=', 'm68k', 'mips'),
-			ekeyword.Op('=', 'ia64', 'alpha'),
-			ekeyword.Op('=', 'sh', 'sparc'),
-			ekeyword.Op('=', 's390', 's390x'),
-			ekeyword.Op('=', 'boo', 'moo'),
-		)
-		self._test(
-			'arm64 arm '
-			'~ppc64 ~ppc '
-			'~amd64 x86 '
-			'm68k ~mips '
-			'-ia64 alpha '
-			'sh -sparc '
-			's390 '
-			'moo ',
-			ops,
-			'arm64 arm ~ppc64 ~ppc amd64 x86 ~m68k ~mips ia64 alpha '
-			'-sh -sparc boo moo')
-
-	def testAllNoStatus(self):
-		ops = (
-			ekeyword.Op(None, 'all', None),
-		)
-		self.assertRaises(ValueError, self._test, '', ops, '')
-
-	def testAllStable(self):
-		ops = (
-			ekeyword.Op(None, 'all', None),
-		)
-		arch_status = {
-			'alpha': ('stable', '~arch'),
-			'arm':   ('stable', 'arch'),
-			'm68k':  ('exp', '~arch'),
-			's390':  ('exp', 'arch'),
-		}
-		self._test('* ~alpha ~arm ~m68k ~mips ~s390 ~arm-linux', ops,
-		           '* ~alpha arm ~m68k ~mips s390 ~arm-linux', arch_status)
-
-	def testAllUnstable(self):
-		ops = (
-			ekeyword.Op('~', 'all', None),
-		)
-		arch_status = {
-			'alpha': ('stable', '~arch'),
-			'arm':   ('stable', 'arch'),
-			'm68k':  ('exp', '~arch'),
-			's390':  ('exp', 'arch'),
-		}
-		self._test('-* ~* * alpha arm m68k s390 arm-linux', ops,
-		           '-* ~* * ~alpha ~arm ~m68k ~s390 ~arm-linux', arch_status)
-
-	def testAllMultiUnstableStable(self):
-		ops = (
-			ekeyword.Op('~', 'all', None),
-			ekeyword.Op(None, 'all', None),
-		)
-		arch_status = {
-			'alpha': ('stable', '~arch'),
-			'arm':   ('stable', 'arch'),
-			'm68k':  ('exp', '~arch'),
-			's390':  ('exp', 'arch'),
-		}
-		self._test('-* ~* * alpha arm m68k s390', ops,
-		           '-* ~* * ~alpha arm ~m68k s390', arch_status)
-
-	def testAllDisabled(self):
-		"""Make sure ~all does not change -arch to ~arch"""
-		ops = (
-			ekeyword.Op('~', 'all', None),
-		)
-		self._test('alpha -sparc ~x86', ops,
-		           '~alpha -sparc ~x86', {})
+    """Tests for process_keywords"""
+
+    def _test(self, keywords, ops, exp, arch_status=None):
+        # This func doesn't return sorted results (which is fine),
+        # so do so ourselves to get stable tests.
+        ret = ekeyword.process_keywords(keywords.split(), ops, arch_status=arch_status)
+        self.assertEqual(sorted(ret), sorted(exp.split()))
+
+    def testAdd(self):
+        ops = (
+            ekeyword.Op(None, "arm", None),
+            ekeyword.Op("~", "s390", None),
+            ekeyword.Op("-", "sh", None),
+        )
+        self._test("moo", ops, "arm ~s390 -sh moo")
+
+    def testModify(self):
+        ops = (
+            ekeyword.Op(None, "arm", None),
+            ekeyword.Op("~", "s390", None),
+            ekeyword.Op("-", "sh", None),
+        )
+        self._test("~arm s390 ~sh moo", ops, "arm ~s390 -sh moo")
+
+    def testDelete(self):
+        ops = (
+            ekeyword.Op("^", "arm", None),
+            ekeyword.Op("^", "s390", None),
+            ekeyword.Op("^", "x86", None),
+        )
+        self._test("arm -s390 ~x86 bar", ops, "bar")
+
+    def testSync(self):
+        ops = (
+            ekeyword.Op("=", "arm64", "arm"),
+            ekeyword.Op("=", "ppc64", "ppc"),
+            ekeyword.Op("=", "amd64", "x86"),
+            ekeyword.Op("=", "m68k", "mips"),
+            ekeyword.Op("=", "ia64", "alpha"),
+            ekeyword.Op("=", "sh", "sparc"),
+            ekeyword.Op("=", "s390", "s390x"),
+            ekeyword.Op("=", "boo", "moo"),
+        )
+        self._test(
+            "arm64 arm "
+            "~ppc64 ~ppc "
+            "~amd64 x86 "
+            "m68k ~mips "
+            "-ia64 alpha "
+            "sh -sparc "
+            "s390 "
+            "moo ",
+            ops,
+            "arm64 arm ~ppc64 ~ppc amd64 x86 ~m68k ~mips ia64 alpha "
+            "-sh -sparc boo moo",
+        )
+
+    def testAllNoStatus(self):
+        ops = (ekeyword.Op(None, "all", None),)
+        self.assertRaises(ValueError, self._test, "", ops, "")
+
+    def testAllStable(self):
+        ops = (ekeyword.Op(None, "all", None),)
+        arch_status = {
+            "alpha": ("stable", "~arch"),
+            "arm": ("stable", "arch"),
+            "m68k": ("exp", "~arch"),
+            "s390": ("exp", "arch"),
+        }
+        self._test(
+            "* ~alpha ~arm ~m68k ~mips ~s390 ~arm-linux",
+            ops,
+            "* ~alpha arm ~m68k ~mips s390 ~arm-linux",
+            arch_status,
+        )
+
+    def testAllUnstable(self):
+        ops = (ekeyword.Op("~", "all", None),)
+        arch_status = {
+            "alpha": ("stable", "~arch"),
+            "arm": ("stable", "arch"),
+            "m68k": ("exp", "~arch"),
+            "s390": ("exp", "arch"),
+        }
+        self._test(
+            "-* ~* * alpha arm m68k s390 arm-linux",
+            ops,
+            "-* ~* * ~alpha ~arm ~m68k ~s390 ~arm-linux",
+            arch_status,
+        )
+
+    def testAllMultiUnstableStable(self):
+        ops = (
+            ekeyword.Op("~", "all", None),
+            ekeyword.Op(None, "all", None),
+        )
+        arch_status = {
+            "alpha": ("stable", "~arch"),
+            "arm": ("stable", "arch"),
+            "m68k": ("exp", "~arch"),
+            "s390": ("exp", "arch"),
+        }
+        self._test(
+            "-* ~* * alpha arm m68k s390",
+            ops,
+            "-* ~* * ~alpha arm ~m68k s390",
+            arch_status,
+        )
+
+    def testAllDisabled(self):
+        """Make sure ~all does not change -arch to ~arch"""
+        ops = (ekeyword.Op("~", "all", None),)
+        self._test("alpha -sparc ~x86", ops, "~alpha -sparc ~x86", {})
 
 
 class TestProcessContent(unittest.TestCase):
-	"""Tests for process_content"""
-
-	def _testKeywords(self, line):
-		ops = (
-			ekeyword.Op(None, 'arm', None),
-			ekeyword.Op('~', 'sparc', None),
-		)
-		return ekeyword.process_content(
-			'file', ['%s\n' % line], ops, quiet=True)
-
-	def testKeywords(self):
-		"""Basic KEYWORDS mod"""
-		updated, ret = self._testKeywords('KEYWORDS=""')
-		self.assertTrue(updated)
-		self.assertEqual(ret, ['KEYWORDS="arm ~sparc"\n'])
-
-	def testKeywordsIndented(self):
-		"""Test KEYWORDS indented by space"""
-		updated, ret = self._testKeywords(' 	 	KEYWORDS=""')
-		self.assertTrue(updated)
-		self.assertEqual(ret, [' 	 	KEYWORDS="arm ~sparc"\n'])
-
-	def testKeywordsSingleQuote(self):
-		"""Test single quoted KEYWORDS"""
-		updated, ret = self._testKeywords("KEYWORDS=' '")
-		self.assertTrue(updated)
-		self.assertEqual(ret, ['KEYWORDS="arm ~sparc"\n'])
-
-	def testKeywordsComment(self):
-		"""Test commented out KEYWORDS"""
-		updated, ret = self._testKeywords('# KEYWORDS=""')
-		self.assertFalse(updated)
-		self.assertEqual(ret, ['# KEYWORDS=""\n'])
-
-	def testKeywordsCode(self):
-		"""Test code leading KEYWORDS"""
-		updated, ret = self._testKeywords('[[ ${PV} ]] && KEYWORDS=""')
-		self.assertTrue(updated)
-		self.assertEqual(ret, ['[[ ${PV} ]] && KEYWORDS="arm ~sparc"\n'])
-
-	def testKeywordsEmpty(self):
-		"""Test KEYWORDS not set at all"""
-		updated, ret = self._testKeywords(' KEYWORDS=')
-		self.assertFalse(updated)
-		self.assertEqual(ret, [' KEYWORDS=\n'])
-
-	def _testSmoke(self, style='color-inline', verbose=0, quiet=0):
-		ops = (
-			ekeyword.Op(None, 'arm', None),
-			ekeyword.Op('~', 'sparc', None),
-		)
-		ekeyword.process_content(
-			'asdf', ['KEYWORDS="arm"'], ops, verbose=verbose,
-			quiet=quiet, style=style)
-
-	def testSmokeQuiet(self):
-		"""Smoke test for quiet mode"""
-		self._testSmoke(quiet=10)
-
-	def testSmokeVerbose(self):
-		"""Smoke test for verbose mode"""
-		self._testSmoke(verbose=10)
-
-	def testSmokeStyleColor(self):
-		"""Smoke test for color-inline style"""
-		self._testSmoke('color-inline')
-
-	def testSmokeStyleInline(self):
-		"""Smoke test for inline style"""
-		self._testSmoke('inline')
-
-	def testSmokeStyleShortMulti(self):
-		"""Smoke test for short-multi style"""
-		self._testSmoke('short-multi')
-
-	def testSmokeStyleLongMulti(self):
-		"""Smoke test for long-multi style"""
-		self._testSmoke('long-multi')
+    """Tests for process_content"""
+
+    def _testKeywords(self, line):
+        ops = (
+            ekeyword.Op(None, "arm", None),
+            ekeyword.Op("~", "sparc", None),
+        )
+        return ekeyword.process_content("file", ["%s\n" % line], ops, quiet=True)
+
+    def testKeywords(self):
+        """Basic KEYWORDS mod"""
+        updated, ret = self._testKeywords('KEYWORDS=""')
+        self.assertTrue(updated)
+        self.assertEqual(ret, ['KEYWORDS="arm ~sparc"\n'])
+
+    def testKeywordsIndented(self):
+        """Test KEYWORDS indented by space"""
+        updated, ret = self._testKeywords(' 	 	KEYWORDS=""')
+        self.assertTrue(updated)
+        self.assertEqual(ret, [' 	 	KEYWORDS="arm ~sparc"\n'])
+
+    def testKeywordsSingleQuote(self):
+        """Test single quoted KEYWORDS"""
+        updated, ret = self._testKeywords("KEYWORDS=' '")
+        self.assertTrue(updated)
+        self.assertEqual(ret, ['KEYWORDS="arm ~sparc"\n'])
+
+    def testKeywordsComment(self):
+        """Test commented out KEYWORDS"""
+        updated, ret = self._testKeywords('# KEYWORDS=""')
+        self.assertFalse(updated)
+        self.assertEqual(ret, ['# KEYWORDS=""\n'])
+
+    def testKeywordsCode(self):
+        """Test code leading KEYWORDS"""
+        updated, ret = self._testKeywords('[[ ${PV} ]] && KEYWORDS=""')
+        self.assertTrue(updated)
+        self.assertEqual(ret, ['[[ ${PV} ]] && KEYWORDS="arm ~sparc"\n'])
+
+    def testKeywordsEmpty(self):
+        """Test KEYWORDS not set at all"""
+        updated, ret = self._testKeywords(" KEYWORDS=")
+        self.assertFalse(updated)
+        self.assertEqual(ret, [" KEYWORDS=\n"])
+
+    def _testSmoke(self, style="color-inline", verbose=0, quiet=0):
+        ops = (
+            ekeyword.Op(None, "arm", None),
+            ekeyword.Op("~", "sparc", None),
+        )
+        ekeyword.process_content(
+            "asdf", ['KEYWORDS="arm"'], ops, verbose=verbose, quiet=quiet, style=style
+        )
+
+    def testSmokeQuiet(self):
+        """Smoke test for quiet mode"""
+        self._testSmoke(quiet=10)
+
+    def testSmokeVerbose(self):
+        """Smoke test for verbose mode"""
+        self._testSmoke(verbose=10)
+
+    def testSmokeStyleColor(self):
+        """Smoke test for color-inline style"""
+        self._testSmoke("color-inline")
+
+    def testSmokeStyleInline(self):
+        """Smoke test for inline style"""
+        self._testSmoke("inline")
+
+    def testSmokeStyleShortMulti(self):
+        """Smoke test for short-multi style"""
+        self._testSmoke("short-multi")
+
+    def testSmokeStyleLongMulti(self):
+        """Smoke test for long-multi style"""
+        self._testSmoke("long-multi")
 
 
 class TestProcessEbuild(unittest.TestCase):
-	"""Tests for process_ebuild
-
-	This is fairly light as most code is in process_content.
-	"""
-
-	def _process_ebuild(self, *args, **kwargs):
-		"""Set up a writable copy of an ebuild for process_ebuild()"""
-		with tempfile.NamedTemporaryFile() as tmp:
-			with open(tmp.name, 'wb') as fw:
-				with open(os.path.join(TESTDIR, 'process-1.ebuild'), 'rb') as f:
-					orig_content = f.read()
-					fw.write(orig_content)
-			ekeyword.process_ebuild(tmp.name, *args, **kwargs)
-			with open(tmp.name, 'rb') as f:
-				return (orig_content, f.read())
-
-	def _testSmoke(self, dry_run):
-		ops = (
-			ekeyword.Op(None, 'arm', None),
-			ekeyword.Op('~', 'sparc', None),
-		)
-		orig_content, new_content = self._process_ebuild(ops, dry_run=dry_run)
-		if dry_run:
-			self.assertEqual(orig_content, new_content)
-		else:
-			self.assertNotEqual(orig_content, new_content)
-
-	def testSmokeNotDry(self):
-		self._testSmoke(False)
-
-	def testSmokeDry(self):
-		self._testSmoke(True)
-
-	def testManifestUpdated(self):
-		"""Verify `ebuild ... manifest` runs on updated files"""
-		with mock.patch.object(subprocess, 'check_call') as m:
-			self._process_ebuild((ekeyword.Op('~', 'arm', None),),
-			                     manifest=True)
-		m.assert_called_once_with(['ebuild', mock.ANY, 'manifest'])
-
-	def testManifestNotUpdated(self):
-		"""Verify we don't run `ebuild ... manifest` on unmodified files"""
-		with mock.patch.object(subprocess, 'check_call') as m:
-			self._process_ebuild((ekeyword.Op(None, 'arm', None),),
-			                     manifest=True)
-		self.assertEqual(m.call_count, 0)
+    """Tests for process_ebuild
+
+    This is fairly light as most code is in process_content.
+    """
+
+    def _process_ebuild(self, *args, **kwargs):
+        """Set up a writable copy of an ebuild for process_ebuild()"""
+        with tempfile.NamedTemporaryFile() as tmp:
+            with open(tmp.name, "wb") as fw:
+                with open(os.path.join(TESTDIR, "process-1.ebuild"), "rb") as f:
+                    orig_content = f.read()
+                    fw.write(orig_content)
+            ekeyword.process_ebuild(tmp.name, *args, **kwargs)
+            with open(tmp.name, "rb") as f:
+                return (orig_content, f.read())
+
+    def _testSmoke(self, dry_run):
+        ops = (
+            ekeyword.Op(None, "arm", None),
+            ekeyword.Op("~", "sparc", None),
+        )
+        orig_content, new_content = self._process_ebuild(ops, dry_run=dry_run)
+        if dry_run:
+            self.assertEqual(orig_content, new_content)
+        else:
+            self.assertNotEqual(orig_content, new_content)
+
+    def testSmokeNotDry(self):
+        self._testSmoke(False)
+
+    def testSmokeDry(self):
+        self._testSmoke(True)
+
+    def testManifestUpdated(self):
+        """Verify `ebuild ... manifest` runs on updated files"""
+        with mock.patch.object(subprocess, "check_call") as m:
+            self._process_ebuild((ekeyword.Op("~", "arm", None),), manifest=True)
+        m.assert_called_once_with(["ebuild", mock.ANY, "manifest"])
+
+    def testManifestNotUpdated(self):
+        """Verify we don't run `ebuild ... manifest` on unmodified files"""
+        with mock.patch.object(subprocess, "check_call") as m:
+            self._process_ebuild((ekeyword.Op(None, "arm", None),), manifest=True)
+        self.assertEqual(m.call_count, 0)
 
 
 class TestArgToOps(unittest.TestCase):
-	"""Tests for arg_to_op()"""
+    """Tests for arg_to_op()"""
 
-	def _test(self, arg, op):
-		self.assertEqual(ekeyword.arg_to_op(arg), ekeyword.Op(*op))
+    def _test(self, arg, op):
+        self.assertEqual(ekeyword.arg_to_op(arg), ekeyword.Op(*op))
 
-	def testStable(self):
-		self._test('arm', (None, 'arm', None))
+    def testStable(self):
+        self._test("arm", (None, "arm", None))
 
-	def testUnstable(self):
-		self._test('~ppc64', ('~', 'ppc64', None))
+    def testUnstable(self):
+        self._test("~ppc64", ("~", "ppc64", None))
 
-	def testDisabled(self):
-		self._test('-sparc', ('-', 'sparc', None))
+    def testDisabled(self):
+        self._test("-sparc", ("-", "sparc", None))
 
-	def testDeleted(self):
-		self._test('^x86-fbsd', ('^', 'x86-fbsd', None))
+    def testDeleted(self):
+        self._test("^x86-fbsd", ("^", "x86-fbsd", None))
 
-	def testSync(self):
-		self._test('s390=x86', (None, 's390', 'x86'))
+    def testSync(self):
+        self._test("s390=x86", (None, "s390", "x86"))
 
 
 class TestMain(unittest.TestCase):
-	"""Tests for the main entry point"""
+    """Tests for the main entry point"""
 
-	def testSmoke(self):
-		ekeyword.main(['arm', '--dry-run', os.path.join(TESTDIR, 'process-1.ebuild')])
+    def testSmoke(self):
+        ekeyword.main(["arm", "--dry-run", os.path.join(TESTDIR, "process-1.ebuild")])
 
-	def testVersion(self):
-		with self.assertRaises(SystemExit) as e:
-			ekeyword.main(['--version', '--dry-run'])
-		self.assertEqual(e.exception.code, os.EX_OK)
+    def testVersion(self):
+        with self.assertRaises(SystemExit) as e:
+            ekeyword.main(["--version", "--dry-run"])
+        self.assertEqual(e.exception.code, os.EX_OK)
 
-	def testEmptyString(self):
-		with self.assertRaises(SystemExit) as e:
-			ekeyword.main(['', os.path.join(TESTDIR, 'process-1.ebuild')])
-		self.assertNotEqual(e.exception.code, os.EX_OK)
+    def testEmptyString(self):
+        with self.assertRaises(SystemExit) as e:
+            ekeyword.main(["", os.path.join(TESTDIR, "process-1.ebuild")])
+        self.assertNotEqual(e.exception.code, os.EX_OK)
 
 
-if __name__ == '__main__':
-	unittest.main()
+if __name__ == "__main__":
+    unittest.main()

diff --git a/pym/gentoolkit/enalyze/__init__.py b/pym/gentoolkit/enalyze/__init__.py
index 6a40215..f884dcf 100644
--- a/pym/gentoolkit/enalyze/__init__.py
+++ b/pym/gentoolkit/enalyze/__init__.py
@@ -11,23 +11,19 @@
 # Move to Imports section after Python 2.6 is stable
 
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 # version is dynamically set by distutils sdist
 __version__ = "git"
 __productname__ = "enalyze"
-__authors__ = (
-	'Brian Dolbec, <brian.dolbec@gmail.com>'
-
-)
+__authors__ = "Brian Dolbec, <brian.dolbec@gmail.com>"
 
 # make an exportable copy of the info for help output
 MODULE_INFO = {
-	"__docformat__": __docformat__,
-	"__doc__": __doc__,
-	"__version__": __version__,
-	"__productname__": __productname__,
-	"__authors__": __authors__
-
+    "__docformat__": __docformat__,
+    "__doc__": __doc__,
+    "__version__": __version__,
+    "__productname__": __productname__,
+    "__authors__": __authors__,
 }
 
 import errno
@@ -39,86 +35,85 @@ import portage
 import gentoolkit as gen
 from gentoolkit import errors
 from gentoolkit import pprinter as pp
-from gentoolkit.base import (initialize_configuration, split_arguments,
-	parse_global_options, print_help)
+from gentoolkit.base import (
+    initialize_configuration,
+    split_arguments,
+    parse_global_options,
+    print_help,
+)
 
 
-NAME_MAP = {
-	'a': 'analyze',
-	'r': 'rebuild'
-}
+NAME_MAP = {"a": "analyze", "r": "rebuild"}
 
 FORMATTED_OPTIONS = (
-		("    (a)nalyze",
-		"analyzes the installed PKG database USE flag or keyword useage"),
-		("    (r)ebuild",
-		"analyzes the Installed PKG database and generates files suitable"),
-		("  ",
-		"to replace corrupted or missing /etc/portage/package.* files")
-	)
+    ("    (a)nalyze", "analyzes the installed PKG database USE flag or keyword useage"),
+    (
+        "    (r)ebuild",
+        "analyzes the Installed PKG database and generates files suitable",
+    ),
+    ("  ", "to replace corrupted or missing /etc/portage/package.* files"),
+)
+
 
 def expand_module_name(module_name):
-	"""Returns one of the values of NAME_MAP or raises KeyError"""
+    """Returns one of the values of NAME_MAP or raises KeyError"""
 
-	if module_name == 'list':
-		# list is a Python builtin type, so we must rename our module
-		return 'list_'
-	elif module_name in NAME_MAP.values():
-		return module_name
-	else:
-		return NAME_MAP[module_name]
+    if module_name == "list":
+        # list is a Python builtin type, so we must rename our module
+        return "list_"
+    elif module_name in NAME_MAP.values():
+        return module_name
+    else:
+        return NAME_MAP[module_name]
 
 
 def main():
-	"""Parse input and run the program."""
-
-	short_opts = "hqCNV"
-	long_opts = (
-		'help', 'quiet', 'nocolor', 'no-color', 'no-pipe', 'version', 'debug'
-	)
-
-	initialize_configuration()
-
-	try:
-		global_opts, args = getopt(sys.argv[1:], short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(" \n")
-		sys.stderr.write(pp.error("Global %s\n" % err))
-		print_help(MODULE_INFO, FORMATTED_OPTIONS, with_description=False)
-		sys.exit(2)
-
-	# Parse global options
-	need_help = parse_global_options(global_opts, args, MODULE_INFO, FORMATTED_OPTIONS)
-
-	if gen.CONFIG['quiet']:
-		gen.CONFIG['verbose'] = False
-
-	try:
-		module_name, module_args = split_arguments(args)
-	except IndexError:
-		print_help(MODULE_INFO,  FORMATTED_OPTIONS)
-		sys.exit(2)
-
-	if need_help:
-		module_args.append('--help')
-
-	try:
-		expanded_module_name = expand_module_name(module_name)
-	except KeyError:
-		sys.stderr.write(pp.error("Unknown module '%s'" % module_name))
-		print_help(MODULE_INFO, FORMATTED_OPTIONS, with_description=False)
-		sys.exit(2)
-
-	try:
-		loaded_module = __import__(
-			expanded_module_name, globals(), locals(), [], 1
-		)
-		loaded_module.main(module_args)
-	except portage.exception.AmbiguousPackageName as err:
-		raise errors.GentoolkitAmbiguousPackage(err.args[0])
-	except IOError as err:
-		if err.errno != errno.EPIPE:
-			raise
-
-if __name__ == '__main__':
-	main()
+    """Parse input and run the program."""
+
+    short_opts = "hqCNV"
+    long_opts = ("help", "quiet", "nocolor", "no-color", "no-pipe", "version", "debug")
+
+    initialize_configuration()
+
+    try:
+        global_opts, args = getopt(sys.argv[1:], short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(" \n")
+        sys.stderr.write(pp.error("Global %s\n" % err))
+        print_help(MODULE_INFO, FORMATTED_OPTIONS, with_description=False)
+        sys.exit(2)
+
+    # Parse global options
+    need_help = parse_global_options(global_opts, args, MODULE_INFO, FORMATTED_OPTIONS)
+
+    if gen.CONFIG["quiet"]:
+        gen.CONFIG["verbose"] = False
+
+    try:
+        module_name, module_args = split_arguments(args)
+    except IndexError:
+        print_help(MODULE_INFO, FORMATTED_OPTIONS)
+        sys.exit(2)
+
+    if need_help:
+        module_args.append("--help")
+
+    try:
+        expanded_module_name = expand_module_name(module_name)
+    except KeyError:
+        sys.stderr.write(pp.error("Unknown module '%s'" % module_name))
+        print_help(MODULE_INFO, FORMATTED_OPTIONS, with_description=False)
+        sys.exit(2)
+
+    try:
+        loaded_module = __import__(expanded_module_name, globals(), locals(), [], 1)
+        loaded_module.main(module_args)
+    except portage.exception.AmbiguousPackageName as err:
+        raise errors.GentoolkitAmbiguousPackage(err.args[0])
+    except IOError as err:
+        if err.errno != errno.EPIPE:
+            raise
+
+
+if __name__ == "__main__":
+    main()

diff --git a/pym/gentoolkit/enalyze/analyze.py b/pym/gentoolkit/enalyze/analyze.py
index 30ffa93..3a09145 100644
--- a/pym/gentoolkit/enalyze/analyze.py
+++ b/pym/gentoolkit/enalyze/analyze.py
@@ -21,456 +21,502 @@ import portage
 
 
 def gather_flags_info(
-		cpvs=None,
-		system_flags=None,
-		include_unset=False,
-		target="USE",
-		use_portage=False,
-		#  override-able for testing
-		_get_flags=get_flags,
-		_get_used=get_installed_use
-		):
-	"""Analyze the installed pkgs USE flags for frequency of use
-
-	@type cpvs: list
-	@param cpvs: optional list of [cat/pkg-ver,...] to analyze or
-			defaults to entire installed pkg db
-	@type: system_flags: list
-	@param system_flags: the current default USE flags as defined
-			by portage.settings["USE"].split()
-	@type include_unset: bool
-	@param include_unset: controls the inclusion of unset USE flags in the report.
-	@type target: string
-	@param target: the environment variable being analyzed
-			one of ["USE", "PKGUSE"]
-	@type _get_flags: function
-	@param _get_flags: ovride-able for testing,
-			defaults to gentoolkit.enalyze.lib.get_flags
-	@param _get_used: ovride-able for testing,
-			defaults to gentoolkit.enalyze.lib.get_installed_use
-	@rtype dict. {flag:{"+":[cat/pkg-ver,...], "-":[cat/pkg-ver,...], "unset":[]}
-	"""
-	if cpvs is None:
-		cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-	# pass them in to override for tests
-	flags = FlagAnalyzer(system_flags,
-		filter_defaults=False,
-		target=target,
-		_get_flags=_get_flags,
-		_get_used=get_installed_use
-	)
-	flag_users = {}
-	for cpv in cpvs:
-		if cpv.startswith("virtual"):
-			continue
-		if use_portage:
-			plus, minus, unset = flags.analyse_cpv(cpv)
-		else:
-			pkg = Package(cpv)
-			plus, minus, unset = flags.analyse_pkg(pkg)
-		for flag in plus:
-			if flag in flag_users:
-				flag_users[flag]["+"].append(cpv)
-			else:
-				flag_users[flag] = {"+": [cpv], "-": []}
-		for flag in minus:
-			if flag in flag_users:
-				flag_users[flag]["-"].append(cpv)
-			else:
-				flag_users[flag] = {"+":[], "-": [cpv]}
-		if include_unset:
-			for flag in unset:
-				if flag in flag_users:
-					if "unset" in flag_users[flag]:
-						flag_users[flag]["unset"].append(cpv)
-					else:
-						flag_users[flag]["unset"] = [cpv]
-				else:
-					flag_users[flag] = {"+": [], "-": [], "unset": [cpv]}
-	return flag_users
+    cpvs=None,
+    system_flags=None,
+    include_unset=False,
+    target="USE",
+    use_portage=False,
+    #  override-able for testing
+    _get_flags=get_flags,
+    _get_used=get_installed_use,
+):
+    """Analyze the installed pkgs USE flags for frequency of use
+
+    @type cpvs: list
+    @param cpvs: optional list of [cat/pkg-ver,...] to analyze or
+                    defaults to entire installed pkg db
+    @type: system_flags: list
+    @param system_flags: the current default USE flags as defined
+                    by portage.settings["USE"].split()
+    @type include_unset: bool
+    @param include_unset: controls the inclusion of unset USE flags in the report.
+    @type target: string
+    @param target: the environment variable being analyzed
+                    one of ["USE", "PKGUSE"]
+    @type _get_flags: function
+    @param _get_flags: ovride-able for testing,
+                    defaults to gentoolkit.enalyze.lib.get_flags
+    @param _get_used: ovride-able for testing,
+                    defaults to gentoolkit.enalyze.lib.get_installed_use
+    @rtype dict. {flag:{"+":[cat/pkg-ver,...], "-":[cat/pkg-ver,...], "unset":[]}
+    """
+    if cpvs is None:
+        cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+    # pass them in to override for tests
+    flags = FlagAnalyzer(
+        system_flags,
+        filter_defaults=False,
+        target=target,
+        _get_flags=_get_flags,
+        _get_used=get_installed_use,
+    )
+    flag_users = {}
+    for cpv in cpvs:
+        if cpv.startswith("virtual"):
+            continue
+        if use_portage:
+            plus, minus, unset = flags.analyse_cpv(cpv)
+        else:
+            pkg = Package(cpv)
+            plus, minus, unset = flags.analyse_pkg(pkg)
+        for flag in plus:
+            if flag in flag_users:
+                flag_users[flag]["+"].append(cpv)
+            else:
+                flag_users[flag] = {"+": [cpv], "-": []}
+        for flag in minus:
+            if flag in flag_users:
+                flag_users[flag]["-"].append(cpv)
+            else:
+                flag_users[flag] = {"+": [], "-": [cpv]}
+        if include_unset:
+            for flag in unset:
+                if flag in flag_users:
+                    if "unset" in flag_users[flag]:
+                        flag_users[flag]["unset"].append(cpv)
+                    else:
+                        flag_users[flag]["unset"] = [cpv]
+                else:
+                    flag_users[flag] = {"+": [], "-": [], "unset": [cpv]}
+    return flag_users
 
 
 def gather_keywords_info(
-		cpvs=None,
-		system_keywords=None,
-		use_portage=False,
-		#  override-able for testing
-		keywords=portage.settings["ACCEPT_KEYWORDS"],
-		analyser = None
-		):
-	"""Analyze the installed pkgs 'keywords' for frequency of use
-
-	@param cpvs: optional list of [cat/pkg-ver,...] to analyze or
-			defaults to entire installed pkg db
-	@param system_keywords: list of the system keywords
-	@param keywords: user defined list of keywords to check and report on
-			or reports on all relevant keywords found to have been used.
-	@param _get_kwds: overridable function for testing
-	@param _get_used: overridable function for testing
-	@rtype dict. {keyword:{"stable":[cat/pkg-ver,...], "testing":[cat/pkg-ver,...]}
-	"""
-	if cpvs is None:
-		cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-	keyword_users = {}
-	for cpv in cpvs:
-		if cpv.startswith("virtual"):
-			continue
-		if use_portage:
-			keyword = analyser.get_inst_keyword_cpv(cpv)
-		else:
-			pkg = Package(cpv)
-			keyword = analyser.get_inst_keyword_pkg(pkg)
-		#print "returned keyword =", cpv, keyword, keyword[0]
-		key = keyword[0]
-		if key in ["~", "-"]:
-			_kwd = keyword[1:]
-			if _kwd in keyword_users:
-				if key in ["~"]:
-					keyword_users[_kwd]["testing"].append(cpv)
-				elif key in ["-"]:
-					#print "adding cpv to missing:", cpv
-					keyword_users[_kwd]["missing"].append(cpv)
-			else:
-				if key in ["~"]:
-					keyword_users[_kwd] = {"stable": [],
-						"testing": [cpv], "missing": []}
-				elif key in ["-"]:
-					keyword_users[_kwd] = {"stable": [],
-						"testing": [], "missing": [cpv]}
-				else:
-					keyword_users[_kwd] = {"stable": [cpv],
-						"testing": [], "missing": []}
-		elif keyword in keyword_users:
-				keyword_users[keyword]["stable"].append(cpv)
-		else:
-				keyword_users[keyword] = {
-					"stable": [cpv],
-					"testing": [],
-					"missing": []
-					}
-	return keyword_users
+    cpvs=None,
+    system_keywords=None,
+    use_portage=False,
+    #  override-able for testing
+    keywords=portage.settings["ACCEPT_KEYWORDS"],
+    analyser=None,
+):
+    """Analyze the installed pkgs 'keywords' for frequency of use
+
+    @param cpvs: optional list of [cat/pkg-ver,...] to analyze or
+                    defaults to entire installed pkg db
+    @param system_keywords: list of the system keywords
+    @param keywords: user defined list of keywords to check and report on
+                    or reports on all relevant keywords found to have been used.
+    @param _get_kwds: overridable function for testing
+    @param _get_used: overridable function for testing
+    @rtype dict. {keyword:{"stable":[cat/pkg-ver,...], "testing":[cat/pkg-ver,...]}
+    """
+    if cpvs is None:
+        cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+    keyword_users = {}
+    for cpv in cpvs:
+        if cpv.startswith("virtual"):
+            continue
+        if use_portage:
+            keyword = analyser.get_inst_keyword_cpv(cpv)
+        else:
+            pkg = Package(cpv)
+            keyword = analyser.get_inst_keyword_pkg(pkg)
+        # print "returned keyword =", cpv, keyword, keyword[0]
+        key = keyword[0]
+        if key in ["~", "-"]:
+            _kwd = keyword[1:]
+            if _kwd in keyword_users:
+                if key in ["~"]:
+                    keyword_users[_kwd]["testing"].append(cpv)
+                elif key in ["-"]:
+                    # print "adding cpv to missing:", cpv
+                    keyword_users[_kwd]["missing"].append(cpv)
+            else:
+                if key in ["~"]:
+                    keyword_users[_kwd] = {
+                        "stable": [],
+                        "testing": [cpv],
+                        "missing": [],
+                    }
+                elif key in ["-"]:
+                    keyword_users[_kwd] = {
+                        "stable": [],
+                        "testing": [],
+                        "missing": [cpv],
+                    }
+                else:
+                    keyword_users[_kwd] = {
+                        "stable": [cpv],
+                        "testing": [],
+                        "missing": [],
+                    }
+        elif keyword in keyword_users:
+            keyword_users[keyword]["stable"].append(cpv)
+        else:
+            keyword_users[keyword] = {"stable": [cpv], "testing": [], "missing": []}
+    return keyword_users
 
 
 class Analyse(ModuleBase):
-	"""Installed db analysis tool to query the installed databse
-	and produce/output stats for USE flags or keywords/mask.
-	The 'rebuild' action output is in the form suitable for file type output
-	to create a new package.use, package.keywords, package.unmask
-	type files in the event of needing to rebuild the
-	/etc/portage/* user configs
-	"""
-	def __init__(self):
-		ModuleBase.__init__(self)
-		self.command_name = "enalyze"
-		self.module_name = "analyze"
-		self.options = {
-			"flags": False,
-			"keywords": False,
-			"packages": False,
-			"unset": False,
-			"verbose": False,
-			"quiet": False,
-			'prefix': False,
-			'portage': True,
-			"width": 80,
-			"prepend": "",
-		}
-		self.module_opts = {
-			"-f": ("flags", "boolean", True),
-			"--flags": ("flags", "boolean", True),
-			"-k": ("keywords", "boolean", True),
-			"--keywords": ("keywords", "boolean", True),
-			"-u": ("unset", "boolean", True),
-			"--unset": ("unset", "boolean", True),
-			"-v": ("verbose", "boolean", True),
-			"--verbose": ("verbose", "boolean", True),
-			"-p": ("prefix", "boolean", True),
-			"--prefix": ("prefix", "boolean", True),
-			"-P": ("prepend", "char", None),
-			"--prepend": ("prepend", "char", None),
-			"-G": ("portage", "boolean", False),
-			"--portage": ("portage", "boolean", False),
-			"-W": ("width", "int", 80),
-			"--width": ("width", "int", 80),
-		}
-		self.formatted_options = [
-			("  -h, --help",  "Outputs this useage message"),
-			("  -u, --unset",
-			"Additionally include any unset USE flags and the packages"),
-			("", "that could use them"),
-			("  -v, --verbose",
-			"Used in the analyze action to output more detailed information"),
-			("  -p, --prefix",
-			"Used for testing purposes only, runs report using " +
-			"a prefix keyword and 'prefix' USE flag"),
-			("  -P, --prepend",
-			"Prepend the string to any list output.  " +
-			"ie: prepend '* ' to the ""front of each package being listed."
-			"This is useful for generating preformatted wiki text."),
-			#(" -G, --portage",
-			#"Use portage directly instead of gentoolkit's Package " +
-			#"object for some operations. Usually a little faster."),
-			("  -W, --width",
-			"Format the output to wrap at 'WIDTH' ie: long line output"),
-		]
-		self.formatted_args = [
-			("  use",
-			"Causes the action to analyze the installed packages USE flags"),
-			("  pkguse",
-			"Causes the action to analyze the installed packages PKGUSE flags"),
-			("  ",
-			"These are flags that have been set in /etc/portage/package.use"),
-			("  keywords",
-			"Causes the action to analyze the installed packages keywords"),
-			("  packages",
-			"Causes the action to analyze the installed packages and the"),
-			("  ",
-			"USE flags they were installed with"),
-			("  unmask",
-			"Causes the action to analyze the installed packages"),
-			("  ",
-			"for those that need to be unmasked")
-		]
-		self.short_opts = "huvpGP:W:"
-		self.long_opts = ("help", "unset", "verbose", "prefix", "prepend=",
-						"width=") #, "portage")
-		self.need_queries = True
-		self.arg_spec = "Target"
-		self.arg_options = ['use', 'pkguse','keywords', 'packages', 'unmask']
-		self.arg_option = False
-		self.warning = (
-			"   CAUTION",
-			"This is beta software and some features/options are incomplete,",
-			"some features may change in future releases includig its name.",
-			"Feedback will be appreciated, http://bugs.gentoo.org")
-
-
-	def run(self, input_args, quiet=False):
-		"""runs the module
-
-		@param input_args: input arguments to be parsed
-		"""
-		query = self.main_setup(input_args)
-		query = self.validate_query(query)
-		self.set_quiet(quiet)
-		if query in ["use", "pkguse"]:
-			self.analyse_flags(query)
-		elif query in ["keywords"]:
-			self.analyse_keywords()
-		elif query in ["packages"]:
-			self.analyse_packages()
-		elif query in ["unmask"]:
-			self.analyse_unmask()
-
-	def analyse_flags(self, target):
-		"""This will scan the installed packages db and analyze the
-		USE flags used for installation and produce a report on how
-		they were used.
-
-		@type target: string
-		@param target: the target to be analyzed, one of ["use", "pkguse"]
-		"""
-		system_use = portage.settings["USE"].split()
-
-		self.printer = AnalysisPrinter(
-				"use",
-				self.options["verbose"],
-				system_use,
-				width=self.options["width"],
-				prepend=self.options["prepend"])
-		if self.options["verbose"]:
-			cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-			#cpvs = get_installed_cpvs()
-			#print "Total number of installed ebuilds =", len(cpvs)
-			flag_users = gather_flags_info(cpvs, system_use,
-				self.options["unset"], target=target.upper(),
-				use_portage=self.options['portage'])
-		else:
-			cpvs = get_installed_cpvs()
-			flag_users = gather_flags_info(cpvs, system_flags=system_use,
-				include_unset=self.options["unset"], target=target.upper(),
-				use_portage=self.options['portage'])
-		#print flag_users
-		flag_keys = sorted(flag_users)
-		if self.options["verbose"]:
-			print(" Flag                                 System  #pkgs   cat/pkg-ver")
-			#blankline = nl
-		elif not self.options['quiet']:
-			print(" Flag                                 System  #pkgs")
-			#blankline = lambda: None
-		for flag in flag_keys:
-			flag_pos = flag_users[flag]["+"]
-			if len(flag_pos):
-				self.printer(flag, "+", flag_pos)
-				#blankline()
-			flag_neg = flag_users[flag]["-"]
-			if len(flag_neg):
-				self.printer(flag, "-", flag_neg)
-				#blankline()
-			if "unset" in flag_users[flag] and flag_users[flag]["unset"]:
-				flag_unset = flag_users[flag]["unset"]
-				self.printer(flag, "unset", flag_unset)
-			#blankline()
-		if not self.options['quiet']:
-			print("===================================================")
-			print("Total number of flags in report =",
-				pp.output.red(str(len(flag_keys))))
-			if self.options["verbose"]:
-				print("Total number of installed ebuilds =",
-					pp.output.red(str(len([x for x in cpvs]))))
-			print()
-
-
-	def analyse_keywords(self, keywords=None):
-		"""This will scan the installed packages db and analyze the
-		keywords used for installation and produce a report on them.
-		"""
-		print()
-		system_keywords = portage.settings["ACCEPT_KEYWORDS"]
-		arch = portage.settings["ARCH"]
-		if self.options["prefix"]:
-			# build a new keyword for testing
-			system_keywords = "~" + arch + "-linux"
-		if self.options["verbose"] or self.options["prefix"]:
-			print("Current system ARCH =", arch)
-			print("Current system ACCEPT_KEYWORDS =", system_keywords)
-		system_keywords = system_keywords.split()
-		self.printer = AnalysisPrinter(
-				"keywords",
-				self.options["verbose"],
-				system_keywords,
-				width=self.options["width"],
-				prepend=self.options["prepend"])
-		self.analyser = KeywordAnalyser( arch, system_keywords, portage.db[portage.root]["vartree"].dbapi)
-		#self.analyser.set_order(portage.settings["USE"].split())
-		# only for testing
-		test_use = portage.settings["USE"].split()
-		if self.options['prefix'] and 'prefix' not in test_use:
-			print("ANALYSE_KEYWORDS() 'prefix' flag not found in system",
-				"USE flags!!!  appending for testing")
-			print()
-			test_use.append('prefix')
-		self.analyser.set_order(test_use)
-		# /end testing
-
-		if self.options["verbose"]:
-			cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-			#print "Total number of installed ebuilds =", len(cpvs)
-			keyword_users = gather_keywords_info(
-				cpvs=cpvs,
-				system_keywords=system_keywords,
-				use_portage=self.options['portage'],
-				keywords=keywords, analyser = self.analyser
-				)
-			blankline = nl
-		else:
-			keyword_users = gather_keywords_info(
-				system_keywords=system_keywords,
-				use_portage=self.options['portage'],
-				keywords=keywords,
-				analyser = self.analyser
-				)
-			blankline = lambda: None
-		#print keyword_users
-		keyword_keys = sorted(keyword_users)
-		if self.options["verbose"]:
-			print(" Keyword               System  #pkgs   cat/pkg-ver")
-		elif not self.options['quiet']:
-			print(" Keyword               System  #pkgs")
-		for keyword in keyword_keys:
-			kwd_stable = keyword_users[keyword]["stable"]
-			if len(kwd_stable):
-				self.printer(keyword, " ", kwd_stable)
-				blankline()
-			kwd_testing = keyword_users[keyword]["testing"]
-			if len(kwd_testing):
-				self.printer(keyword, "~", kwd_testing)
-				blankline()
-			kwd_missing = keyword_users[keyword]["missing"]
-			if len(kwd_missing):
-				self.printer(keyword, "-", kwd_missing)
-				blankline
-		if not self.options['quiet']:
-			if self.analyser.mismatched:
-				print("_________________________________________________")
-				print(("The following packages were found to have a \n" +
-					"different recorded ARCH than the current system ARCH"))
-				for cpv in self.analyser.mismatched:
-					print("\t", pp.cpv(cpv))
-			print("===================================================")
-			print("Total number of keywords in report =",
-				pp.output.red(str(len(keyword_keys))))
-			if self.options["verbose"]:
-				print("Total number of installed ebuilds =",
-					pp.output.red(str(len(cpvs))))
-			print()
-
-
-	def analyse_packages(self):
-		"""This will scan the installed packages db and analyze the
-		USE flags used for installation and produce a report.
-
-		@type target: string
-		@param target: the target to be analyzed, one of ["use", "pkguse"]
-		"""
-		system_use = portage.settings["USE"].split()
-		if self.options["verbose"]:
-			cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-			key_width = 45
-		else:
-			cpvs = get_installed_cpvs()
-			key_width = 1
-
-		self.printer = AnalysisPrinter(
-				"packages",
-				self.options["verbose"],
-				key_width=key_width,
-				width=self.options["width"],
-				prepend=self.options["prepend"])
-
-		cpvs = sorted(cpvs)
-		flags = FlagAnalyzer(
-					system=system_use,
-					filter_defaults=False,
-					target="USE"
-					)
-
-		if self.options["verbose"]:
-			print("   cat/pkg-ver                             USE Flags")
-				#   "app-emulation/emul-linux-x86-sdl-20100915 ...."
-			#blankline = nl
-		elif not self.options['quiet']:
-			print("   cat/pkg-ver                             USE Flags")
-			#blankline = lambda: None
-		for cpv in cpvs:
-			(flag_plus, flag_neg, unset) = flags.analyse_cpv(cpv)
-			if self.options["unset"]:
-				self.printer(cpv, "", (sorted(flag_plus), sorted(flag_neg),
-					sorted(unset)))
-			else:
-				self.printer(cpv, "", (sorted(flag_plus), sorted(flag_neg), []))
-		if not self.options['quiet']:
-			print("===================================================")
-			print("Total number of installed ebuilds =",
-				pp.output.red(str(len([x for x in cpvs]))))
-			print()
-
-
-	def analyse_unmask(self):
-		"""This will scan the installed packages db and analyze the
-		unmasking used for installation and produce a report on them.
-		"""
-		self.not_implemented("unmask")
-
+    """Installed db analysis tool to query the installed databse
+    and produce/output stats for USE flags or keywords/mask.
+    The 'rebuild' action output is in the form suitable for file type output
+    to create a new package.use, package.keywords, package.unmask
+    type files in the event of needing to rebuild the
+    /etc/portage/* user configs
+    """
+
+    def __init__(self):
+        ModuleBase.__init__(self)
+        self.command_name = "enalyze"
+        self.module_name = "analyze"
+        self.options = {
+            "flags": False,
+            "keywords": False,
+            "packages": False,
+            "unset": False,
+            "verbose": False,
+            "quiet": False,
+            "prefix": False,
+            "portage": True,
+            "width": 80,
+            "prepend": "",
+        }
+        self.module_opts = {
+            "-f": ("flags", "boolean", True),
+            "--flags": ("flags", "boolean", True),
+            "-k": ("keywords", "boolean", True),
+            "--keywords": ("keywords", "boolean", True),
+            "-u": ("unset", "boolean", True),
+            "--unset": ("unset", "boolean", True),
+            "-v": ("verbose", "boolean", True),
+            "--verbose": ("verbose", "boolean", True),
+            "-p": ("prefix", "boolean", True),
+            "--prefix": ("prefix", "boolean", True),
+            "-P": ("prepend", "char", None),
+            "--prepend": ("prepend", "char", None),
+            "-G": ("portage", "boolean", False),
+            "--portage": ("portage", "boolean", False),
+            "-W": ("width", "int", 80),
+            "--width": ("width", "int", 80),
+        }
+        self.formatted_options = [
+            ("  -h, --help", "Outputs this useage message"),
+            (
+                "  -u, --unset",
+                "Additionally include any unset USE flags and the packages",
+            ),
+            ("", "that could use them"),
+            (
+                "  -v, --verbose",
+                "Used in the analyze action to output more detailed information",
+            ),
+            (
+                "  -p, --prefix",
+                "Used for testing purposes only, runs report using "
+                + "a prefix keyword and 'prefix' USE flag",
+            ),
+            (
+                "  -P, --prepend",
+                "Prepend the string to any list output.  " + "ie: prepend '* ' to the "
+                "front of each package being listed."
+                "This is useful for generating preformatted wiki text.",
+            ),
+            # (" -G, --portage",
+            # "Use portage directly instead of gentoolkit's Package " +
+            # "object for some operations. Usually a little faster."),
+            (
+                "  -W, --width",
+                "Format the output to wrap at 'WIDTH' ie: long line output",
+            ),
+        ]
+        self.formatted_args = [
+            ("  use", "Causes the action to analyze the installed packages USE flags"),
+            (
+                "  pkguse",
+                "Causes the action to analyze the installed packages PKGUSE flags",
+            ),
+            ("  ", "These are flags that have been set in /etc/portage/package.use"),
+            (
+                "  keywords",
+                "Causes the action to analyze the installed packages keywords",
+            ),
+            (
+                "  packages",
+                "Causes the action to analyze the installed packages and the",
+            ),
+            ("  ", "USE flags they were installed with"),
+            ("  unmask", "Causes the action to analyze the installed packages"),
+            ("  ", "for those that need to be unmasked"),
+        ]
+        self.short_opts = "huvpGP:W:"
+        self.long_opts = (
+            "help",
+            "unset",
+            "verbose",
+            "prefix",
+            "prepend=",
+            "width=",
+        )  # , "portage")
+        self.need_queries = True
+        self.arg_spec = "Target"
+        self.arg_options = ["use", "pkguse", "keywords", "packages", "unmask"]
+        self.arg_option = False
+        self.warning = (
+            "   CAUTION",
+            "This is beta software and some features/options are incomplete,",
+            "some features may change in future releases includig its name.",
+            "Feedback will be appreciated, http://bugs.gentoo.org",
+        )
+
+    def run(self, input_args, quiet=False):
+        """runs the module
+
+        @param input_args: input arguments to be parsed
+        """
+        query = self.main_setup(input_args)
+        query = self.validate_query(query)
+        self.set_quiet(quiet)
+        if query in ["use", "pkguse"]:
+            self.analyse_flags(query)
+        elif query in ["keywords"]:
+            self.analyse_keywords()
+        elif query in ["packages"]:
+            self.analyse_packages()
+        elif query in ["unmask"]:
+            self.analyse_unmask()
+
+    def analyse_flags(self, target):
+        """This will scan the installed packages db and analyze the
+        USE flags used for installation and produce a report on how
+        they were used.
+
+        @type target: string
+        @param target: the target to be analyzed, one of ["use", "pkguse"]
+        """
+        system_use = portage.settings["USE"].split()
+
+        self.printer = AnalysisPrinter(
+            "use",
+            self.options["verbose"],
+            system_use,
+            width=self.options["width"],
+            prepend=self.options["prepend"],
+        )
+        if self.options["verbose"]:
+            cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+            # cpvs = get_installed_cpvs()
+            # print "Total number of installed ebuilds =", len(cpvs)
+            flag_users = gather_flags_info(
+                cpvs,
+                system_use,
+                self.options["unset"],
+                target=target.upper(),
+                use_portage=self.options["portage"],
+            )
+        else:
+            cpvs = get_installed_cpvs()
+            flag_users = gather_flags_info(
+                cpvs,
+                system_flags=system_use,
+                include_unset=self.options["unset"],
+                target=target.upper(),
+                use_portage=self.options["portage"],
+            )
+        # print flag_users
+        flag_keys = sorted(flag_users)
+        if self.options["verbose"]:
+            print(" Flag                                 System  #pkgs   cat/pkg-ver")
+            # blankline = nl
+        elif not self.options["quiet"]:
+            print(" Flag                                 System  #pkgs")
+            # blankline = lambda: None
+        for flag in flag_keys:
+            flag_pos = flag_users[flag]["+"]
+            if len(flag_pos):
+                self.printer(flag, "+", flag_pos)
+                # blankline()
+            flag_neg = flag_users[flag]["-"]
+            if len(flag_neg):
+                self.printer(flag, "-", flag_neg)
+                # blankline()
+            if "unset" in flag_users[flag] and flag_users[flag]["unset"]:
+                flag_unset = flag_users[flag]["unset"]
+                self.printer(flag, "unset", flag_unset)
+            # blankline()
+        if not self.options["quiet"]:
+            print("===================================================")
+            print(
+                "Total number of flags in report =", pp.output.red(str(len(flag_keys)))
+            )
+            if self.options["verbose"]:
+                print(
+                    "Total number of installed ebuilds =",
+                    pp.output.red(str(len([x for x in cpvs]))),
+                )
+            print()
+
+    def analyse_keywords(self, keywords=None):
+        """This will scan the installed packages db and analyze the
+        keywords used for installation and produce a report on them.
+        """
+        print()
+        system_keywords = portage.settings["ACCEPT_KEYWORDS"]
+        arch = portage.settings["ARCH"]
+        if self.options["prefix"]:
+            # build a new keyword for testing
+            system_keywords = "~" + arch + "-linux"
+        if self.options["verbose"] or self.options["prefix"]:
+            print("Current system ARCH =", arch)
+            print("Current system ACCEPT_KEYWORDS =", system_keywords)
+        system_keywords = system_keywords.split()
+        self.printer = AnalysisPrinter(
+            "keywords",
+            self.options["verbose"],
+            system_keywords,
+            width=self.options["width"],
+            prepend=self.options["prepend"],
+        )
+        self.analyser = KeywordAnalyser(
+            arch, system_keywords, portage.db[portage.root]["vartree"].dbapi
+        )
+        # self.analyser.set_order(portage.settings["USE"].split())
+        # only for testing
+        test_use = portage.settings["USE"].split()
+        if self.options["prefix"] and "prefix" not in test_use:
+            print(
+                "ANALYSE_KEYWORDS() 'prefix' flag not found in system",
+                "USE flags!!!  appending for testing",
+            )
+            print()
+            test_use.append("prefix")
+        self.analyser.set_order(test_use)
+        # /end testing
+
+        if self.options["verbose"]:
+            cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+            # print "Total number of installed ebuilds =", len(cpvs)
+            keyword_users = gather_keywords_info(
+                cpvs=cpvs,
+                system_keywords=system_keywords,
+                use_portage=self.options["portage"],
+                keywords=keywords,
+                analyser=self.analyser,
+            )
+            blankline = nl
+        else:
+            keyword_users = gather_keywords_info(
+                system_keywords=system_keywords,
+                use_portage=self.options["portage"],
+                keywords=keywords,
+                analyser=self.analyser,
+            )
+            blankline = lambda: None
+        # print keyword_users
+        keyword_keys = sorted(keyword_users)
+        if self.options["verbose"]:
+            print(" Keyword               System  #pkgs   cat/pkg-ver")
+        elif not self.options["quiet"]:
+            print(" Keyword               System  #pkgs")
+        for keyword in keyword_keys:
+            kwd_stable = keyword_users[keyword]["stable"]
+            if len(kwd_stable):
+                self.printer(keyword, " ", kwd_stable)
+                blankline()
+            kwd_testing = keyword_users[keyword]["testing"]
+            if len(kwd_testing):
+                self.printer(keyword, "~", kwd_testing)
+                blankline()
+            kwd_missing = keyword_users[keyword]["missing"]
+            if len(kwd_missing):
+                self.printer(keyword, "-", kwd_missing)
+                blankline
+        if not self.options["quiet"]:
+            if self.analyser.mismatched:
+                print("_________________________________________________")
+                print(
+                    (
+                        "The following packages were found to have a \n"
+                        + "different recorded ARCH than the current system ARCH"
+                    )
+                )
+                for cpv in self.analyser.mismatched:
+                    print("\t", pp.cpv(cpv))
+            print("===================================================")
+            print(
+                "Total number of keywords in report =",
+                pp.output.red(str(len(keyword_keys))),
+            )
+            if self.options["verbose"]:
+                print(
+                    "Total number of installed ebuilds =", pp.output.red(str(len(cpvs)))
+                )
+            print()
+
+    def analyse_packages(self):
+        """This will scan the installed packages db and analyze the
+        USE flags used for installation and produce a report.
+
+        @type target: string
+        @param target: the target to be analyzed, one of ["use", "pkguse"]
+        """
+        system_use = portage.settings["USE"].split()
+        if self.options["verbose"]:
+            cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+            key_width = 45
+        else:
+            cpvs = get_installed_cpvs()
+            key_width = 1
+
+        self.printer = AnalysisPrinter(
+            "packages",
+            self.options["verbose"],
+            key_width=key_width,
+            width=self.options["width"],
+            prepend=self.options["prepend"],
+        )
+
+        cpvs = sorted(cpvs)
+        flags = FlagAnalyzer(system=system_use, filter_defaults=False, target="USE")
+
+        if self.options["verbose"]:
+            print("   cat/pkg-ver                             USE Flags")
+            #   "app-emulation/emul-linux-x86-sdl-20100915 ...."
+            # blankline = nl
+        elif not self.options["quiet"]:
+            print("   cat/pkg-ver                             USE Flags")
+            # blankline = lambda: None
+        for cpv in cpvs:
+            (flag_plus, flag_neg, unset) = flags.analyse_cpv(cpv)
+            if self.options["unset"]:
+                self.printer(
+                    cpv, "", (sorted(flag_plus), sorted(flag_neg), sorted(unset))
+                )
+            else:
+                self.printer(cpv, "", (sorted(flag_plus), sorted(flag_neg), []))
+        if not self.options["quiet"]:
+            print("===================================================")
+            print(
+                "Total number of installed ebuilds =",
+                pp.output.red(str(len([x for x in cpvs]))),
+            )
+            print()
+
+    def analyse_unmask(self):
+        """This will scan the installed packages db and analyze the
+        unmasking used for installation and produce a report on them.
+        """
+        self.not_implemented("unmask")
 
 
 def main(input_args):
-	"""Common starting method by the analyze master
-	unless all modules are converted to this class method.
+    """Common starting method by the analyze master
+    unless all modules are converted to this class method.
+
+    @param input_args: input args as supplied by equery master module.
+    """
+    query_module = Analyse()
+    query_module.run(input_args, gentoolkit.CONFIG["quiet"])
 
-	@param input_args: input args as supplied by equery master module.
-	"""
-	query_module = Analyse()
-	query_module.run(input_args, gentoolkit.CONFIG['quiet'])
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/enalyze/lib.py b/pym/gentoolkit/enalyze/lib.py
index 50c7d11..6d7cb4e 100644
--- a/pym/gentoolkit/enalyze/lib.py
+++ b/pym/gentoolkit/enalyze/lib.py
@@ -10,349 +10,358 @@
 
 from gentoolkit import errors
 from gentoolkit.keyword import reduce_keywords
-from gentoolkit.flag import (reduce_flags, get_flags, get_all_cpv_use,
-	filter_flags, get_installed_use, defaulted_flags)
-#from gentoolkit.package import Package
+from gentoolkit.flag import (
+    reduce_flags,
+    get_flags,
+    get_all_cpv_use,
+    filter_flags,
+    get_installed_use,
+    defaulted_flags,
+)
+
+# from gentoolkit.package import Package
 
 import portage
 
 
 class FlagAnalyzer:
-	"""Specialty functions for analysing an installed package's
-	USE flags.  Can be used for single or mulitple use without
-	needing to be reset unless the system USE flags are changed.
-
-	@type system: list or set
-	@param system: the default system USE flags.
-	@type _get_flags: function
-	@param _get_flags: Normally defaulted, can be overriden for testing
-	@type _get_used: function
-	@param _get_used: Normally defaulted, can be overriden for testing
-		"""
-	def __init__(self,
-		system,
-		filter_defaults=False,
-		target="USE",
-		_get_flags=get_flags,
-		_get_used=get_installed_use
-	):
-		self.get_flags = _get_flags
-		self.get_used = _get_used
-		self.filter_defaults = filter_defaults
-		self.target = target
-		self.reset(system)
-
-	def reset(self, system):
-		"""Resets the internal system USE flags and use_expand variables
-		to the new setting. The use_expand variable is handled internally.
-
-		@type system: list or set
-		@param system: the default system USE flags.
-		"""
-		self.system = set(system)
-		self.use_expand = portage.settings['USE_EXPAND'].lower().split()
-
-	def analyse_cpv(self, cpv):
-		"""Gets all relavent USE flag info for a cpv and breaks them down
-		into 3 sets, plus (package.use enabled), minus ( package.use disabled),
-		unset.
-
-		@param cpv: string. 'cat/pkg-ver'
-		@rtype tuple of sets
-		@return (plus, minus, unset) sets of USE flags
-		"""
-		installed = set(self.get_used(cpv, self.target))
-		_iuse = self.get_flags(cpv)
-		iuse =  set(reduce_flags(_iuse))
-		iuse_defaults = defaulted_flags(_iuse)
-		return self._analyse(installed, iuse, iuse_defaults)
-
-	def _analyse(self, installed, iuse, iuse_defaults):
-		"""Analyzes the supplied info and returns the flag settings
-		that differ from the defaults
-
-		@type installed: set
-		@param installed: the installed with use flags
-		@type iuse: set
-		@param iuse: the current ebuilds IUSE
-		"""
-		defaults = self.system.intersection(iuse)
-		# update defaults with iuse_defaults
-		defaults.update(iuse_defaults['+'])
-		defaults = defaults.difference(iuse_defaults['-'])
-		usedflags = iuse.intersection(set(installed))
-		if self.filter_defaults:
-			plus = usedflags.difference(defaults)
-		else:
-			plus = usedflags
-		minus = defaults.difference(usedflags)
-		unset = iuse.difference(defaults, plus, minus)
-		cleaned_unset = self.remove_expanding(unset)
-		return (plus, minus, cleaned_unset)
-
-	def analyse_pkg(self, pkg):
-		"""Gets all relevent USE flag info for a pkg and breaks them down
-		into 3 sets, plus (package.use enabled), minus ( package.use disabled),
-		unset.
-
-		@param pkg: gentoolkit.package.Package object
-		@rtype tuple of sets
-		@return (plus, minus, unset) sets of USE flags
-		"""
-		installed = set(self.pkg_used(pkg))
-		#print("installed =", installed)
-		_iuse =  self.pkg_flags(pkg)
-		iuse =  set(reduce_flags(_iuse))
-		iuse_defaults = defaulted_flags(_iuse)
-		#print("iuse =", iuse)
-		return self._analyse(installed, iuse, iuse_defaults)
-
-	def pkg_used(self, pkg):
-		if self.target == "USE":
-			return pkg.use().split()
-		return pkg.environment(self.target).split()
-
-	def pkg_flags(self, pkg):
-		final_use, use_expand_hidden, usemasked, useforced = \
-			get_all_cpv_use(pkg.cpv)
-		flags = pkg.environment("IUSE", prefer_vdb=False).split()
-		return filter_flags(flags, use_expand_hidden, usemasked, useforced)
-
-	def redundant(self, cpv, iuse):
-		"""Checks for redundant settings.
-		future function. Not yet implemented.
-		"""
-		pass
-
-	def remove_expanding(self, flags):
-		"""Remove unwanted USE_EXPAND flags
-		from unset IUSE sets
-
-		@param flags: short list or set of USE flags
-		@rtype set
-		@return USE flags
-		"""
-		_flags = set(flags)
-		for expander in self.use_expand:
-			for flag in flags:
-				if expander in flag:
-					_flags.remove(flag)
-			if not _flags:
-				break
-		return _flags
+    """Specialty functions for analysing an installed package's
+    USE flags.  Can be used for single or mulitple use without
+    needing to be reset unless the system USE flags are changed.
+
+    @type system: list or set
+    @param system: the default system USE flags.
+    @type _get_flags: function
+    @param _get_flags: Normally defaulted, can be overriden for testing
+    @type _get_used: function
+    @param _get_used: Normally defaulted, can be overriden for testing
+    """
+
+    def __init__(
+        self,
+        system,
+        filter_defaults=False,
+        target="USE",
+        _get_flags=get_flags,
+        _get_used=get_installed_use,
+    ):
+        self.get_flags = _get_flags
+        self.get_used = _get_used
+        self.filter_defaults = filter_defaults
+        self.target = target
+        self.reset(system)
+
+    def reset(self, system):
+        """Resets the internal system USE flags and use_expand variables
+        to the new setting. The use_expand variable is handled internally.
+
+        @type system: list or set
+        @param system: the default system USE flags.
+        """
+        self.system = set(system)
+        self.use_expand = portage.settings["USE_EXPAND"].lower().split()
+
+    def analyse_cpv(self, cpv):
+        """Gets all relavent USE flag info for a cpv and breaks them down
+        into 3 sets, plus (package.use enabled), minus ( package.use disabled),
+        unset.
+
+        @param cpv: string. 'cat/pkg-ver'
+        @rtype tuple of sets
+        @return (plus, minus, unset) sets of USE flags
+        """
+        installed = set(self.get_used(cpv, self.target))
+        _iuse = self.get_flags(cpv)
+        iuse = set(reduce_flags(_iuse))
+        iuse_defaults = defaulted_flags(_iuse)
+        return self._analyse(installed, iuse, iuse_defaults)
+
+    def _analyse(self, installed, iuse, iuse_defaults):
+        """Analyzes the supplied info and returns the flag settings
+        that differ from the defaults
+
+        @type installed: set
+        @param installed: the installed with use flags
+        @type iuse: set
+        @param iuse: the current ebuilds IUSE
+        """
+        defaults = self.system.intersection(iuse)
+        # update defaults with iuse_defaults
+        defaults.update(iuse_defaults["+"])
+        defaults = defaults.difference(iuse_defaults["-"])
+        usedflags = iuse.intersection(set(installed))
+        if self.filter_defaults:
+            plus = usedflags.difference(defaults)
+        else:
+            plus = usedflags
+        minus = defaults.difference(usedflags)
+        unset = iuse.difference(defaults, plus, minus)
+        cleaned_unset = self.remove_expanding(unset)
+        return (plus, minus, cleaned_unset)
+
+    def analyse_pkg(self, pkg):
+        """Gets all relevent USE flag info for a pkg and breaks them down
+        into 3 sets, plus (package.use enabled), minus ( package.use disabled),
+        unset.
+
+        @param pkg: gentoolkit.package.Package object
+        @rtype tuple of sets
+        @return (plus, minus, unset) sets of USE flags
+        """
+        installed = set(self.pkg_used(pkg))
+        # print("installed =", installed)
+        _iuse = self.pkg_flags(pkg)
+        iuse = set(reduce_flags(_iuse))
+        iuse_defaults = defaulted_flags(_iuse)
+        # print("iuse =", iuse)
+        return self._analyse(installed, iuse, iuse_defaults)
+
+    def pkg_used(self, pkg):
+        if self.target == "USE":
+            return pkg.use().split()
+        return pkg.environment(self.target).split()
+
+    def pkg_flags(self, pkg):
+        final_use, use_expand_hidden, usemasked, useforced = get_all_cpv_use(pkg.cpv)
+        flags = pkg.environment("IUSE", prefer_vdb=False).split()
+        return filter_flags(flags, use_expand_hidden, usemasked, useforced)
+
+    def redundant(self, cpv, iuse):
+        """Checks for redundant settings.
+        future function. Not yet implemented.
+        """
+        pass
+
+    def remove_expanding(self, flags):
+        """Remove unwanted USE_EXPAND flags
+        from unset IUSE sets
+
+        @param flags: short list or set of USE flags
+        @rtype set
+        @return USE flags
+        """
+        _flags = set(flags)
+        for expander in self.use_expand:
+            for flag in flags:
+                if expander in flag:
+                    _flags.remove(flag)
+            if not _flags:
+                break
+        return _flags
 
 
 class KeywordAnalyser:
-	"""Specialty functions for analysing the installed package db for
-	keyword useage and the packages that used them.
-
-	Note: should be initialized with the internal set_order() before use.
-	See internal set_order() for more details.
-	This class of functions can be used for single cpv checks or
-	used repeatedly for an entire package db.
-
-	@type  arch: string
-	@param arch: the system ARCH setting
-	@type  accept_keywords: list
-	@param accept_keywords: eg. ['x86', '~x86']
-	@type  get_aux: function, defaults to: portage.db[portage.root]["vartree"].dbapi.aux_get
-	@param vardb: vardb class of functions, needed=aux_get()
-		to return => KEYWORDS & USE flags for a cpv
-		= aux_get(cpv, ["KEYWORDS", "USE"])
-	"""
-
-	# parsing order to determine appropriate keyword used for installation
-	normal_order = ['stable', 'testing', 'prefix', 'testing_prefix', 'missing']
-	prefix_order = ['prefix', 'testing_prefix', 'stable', 'testing', 'missing']
-	parse_range = list(range(len(normal_order)))
-
-
-	def __init__(self, arch, accept_keywords, vardb=portage.db[portage.root]["vartree"].dbapi):
-		self.arch = arch
-		self.accept_keywords = accept_keywords
-		self.vardb = vardb
-		self.prefix = ''
-		self.parse_order = None
-		self.check_key = {
-			'stable': self._stable,
-			'testing': self._testing,
-			'prefix': self._prefix,
-			'testing_prefix': self._testing_prefix,
-			'missing': self._missing
-			}
-		self.mismatched = []
-
-	def determine_keyword(self, keywords, used, cpv):
-		"""Determine the keyword from the installed USE flags and
-		the KEYWORDS that was used to install a package.
-
-		@param keywords: list of keywords available to install a pkg
-		@param used: list of USE flalgs recorded for the installed pkg
-		@rtype: string
-		@return a keyword or null string
-		"""
-		used = set(used)
-		kwd = None
-		result = ''
-		if keywords:
-			absolute_kwds = reduce_keywords(keywords)
-			kwd = list(used.intersection(absolute_kwds))
-			#if keywords == ['~ppc64']:
-				#print "Checked keywords for kwd", keywords, used, "kwd =", kwd
-		if not kwd:
-			#print "Checking for kwd against portage.archlist"
-			absolute_kwds = reduce_keywords(keywords)
-			# check for one against archlist then re-check
-			kwd = list(absolute_kwds.intersection(portage.archlist))
-			#print "determined keyword =", kwd
-		if len(kwd) == 1:
-			key = kwd[0]
-			#print "determined keyword =", key
-		elif not kwd:
-			#print "kwd != 1", kwd, cpv
-			result = self._missing(self.keyword, keywords)
-		else: # too many, try to narrow them dowm
-			#print "too many kwd's, trying to match against arch"
-			_kwd = list(set(kwd).intersection(self.arch))
-			key = ''
-			if _kwd:
-				#print "found one! :)", _kwd
-				key = _kwd
-			else: # try re-running the short list against archlist
-				#print "Checking kwd for _kwd against portage.archlist"
-				_kwd = list(set(kwd).intersection(portage.archlist))
-				if _kwd and len(_kwd) == 1:
-					#print "found one! :)", _kwd
-					key = _kwd[0]
-				else:
-					#print " :( didn't work, _kwd =", _kwd, "giving up on:", cpv
-					result = self._missing(self.keyword, keywords)
-		i = 0
-		while not result and i in self.parse_range:
-			parsekey = self.parse_order[i]
-			result = self.check_key[parsekey](key, keywords)
-			i += 1
-		return result
-
-	def _stable(self, key, keywords):
-		"""test for a normal stable keyword"""
-		if key in keywords:
-			return key
-		return ''
-
-	def _testing(self, key, keywords):
-		"""test for a normal testing keyword"""
-		if ("~" + key) in keywords:
-			return "~" + key
-		return ''
-
-	def _prefix(self, key, keywords):
-		"""test for a stable prefix keyword"""
-		if not self.prefix:
-			return ''
-		_key = '-'.join([key, self.prefix])
-		if _key in keywords:
-			#print key, "is in", keywords
-			return _key
-		return ''
-
-	def _testing_prefix(self, key, keywords):
-		"""test for a testing prefix keyword"""
-		if not self.prefix:
-			return ''
-		_key = "~" +'-'.join([key, self.prefix])
-		if _key in keywords:
-			#print key, "is in", keywords
-			return _key
-		return ''
-
-	def _missing(self, key, keywords):
-		"""generates a missing keyword to return"""
-		if self.prefix and key != self.keyword:
-			_key = '-'.join([key, self.prefix])
-		else:
-			_key = '-' + key
-		#print "_missisng :(  _key =", _key
-		return _key
-
-	def get_inst_keyword_cpv(self, cpv):
-		"""Determines the installed with keyword for cpv
-
-		@type cpv: string
-		@param cpv: an installed CAT/PKG-VER
-		@rtype: string
-		@returns a keyword determined to have been used to install cpv
-		"""
-		keywords, used = self.vardb.aux_get(cpv, ["KEYWORDS", "USE"])
-		keywords = keywords.split()
-		used = used.split()
-		return self._parse(keywords, used, cpv=cpv)
-
-	def get_inst_keyword_pkg(self, pkg):
-		"""Determines the installed with keyword for cpv
-
-		@param pkg: gentoolkit.package.Package object
-		@rtype: string
-		@returns a keyword determined to have been used to install cpv
-		"""
-		keywords, used = pkg.environment(["KEYWORDS", "USE"],
-			prefer_vdb=True, fallback=False)
-		keywords = keywords.split()
-		used = used.split()
-		return self._parse(keywords, used, pkg=pkg)
-
-	def _parse(self, keywords, used, pkg=None, cpv=None):
-		if pkg:
-			_cpv = pkg.cpv
-		else:
-			_cpv = cpv
-		if not self.parse_order:
-			self.set_order(used)
-		keyword = self.keyword
-		# sanity check
-		if self.arch not in used:
-			#print "Found a mismatch = ", cpv, self.arch, used
-			self.mismatched.append(_cpv)
-		if keyword in keywords:
-			#print "keyword", keyword, "is in", keywords
-			return keyword
-		elif "~"+keyword in keywords:
-			#print "~keyword", keyword, "is in", keywords
-			return "~"+keyword
-		else:
-			keyword = self.determine_keyword(keywords, used, _cpv)
-			if not keyword:
-				raise errors.GentoolkitUnknownKeyword(_cpv, ' '.join(keywords), used)
-			return keyword
-
-	def set_order(self, used):
-		"""Used to set the parsing order to determine a keyword
-		used for installation.
-
-		This is needed due to the way prefix arch's and keywords
-		work with portage.  It looks for the 'prefix' flag. A positive result
-		sets it to the prefix order and keyword.
-
-		@type used: list
-		@param used: a list of pkg USE flags or the system USE flags"""
-		if 'prefix' in used:
-			#print "SET_ORDER() Setting parse order to prefix"
-			prefix = None
-			self.parse_order = self.prefix_order
-			for key in self.accept_keywords:
-				#print "SET_ORDER()  '"+key+"'"
-				if '-' in key:
-					#print "SET_ORDER()found prefix keyword :", key
-					if self.arch in key:
-						prefix = key.split('-')[1]
-						#print "prefix =", prefix
-						self.prefix = prefix
-			self.keyword = '-'.join([self.arch, prefix])
-		else:
-			#print "SET_ORDER() Setting parse order to normal"
-			self.parse_order = self.normal_order
-			self.keyword = self.arch
-		#print "SET_ORDER() completed: prefix =", self.prefix, ", keyword =", \
-		#   self.keyword, "parse order =",self.parse_order
-		#print
-
+    """Specialty functions for analysing the installed package db for
+    keyword useage and the packages that used them.
+
+    Note: should be initialized with the internal set_order() before use.
+    See internal set_order() for more details.
+    This class of functions can be used for single cpv checks or
+    used repeatedly for an entire package db.
+
+    @type  arch: string
+    @param arch: the system ARCH setting
+    @type  accept_keywords: list
+    @param accept_keywords: eg. ['x86', '~x86']
+    @type  get_aux: function, defaults to: portage.db[portage.root]["vartree"].dbapi.aux_get
+    @param vardb: vardb class of functions, needed=aux_get()
+            to return => KEYWORDS & USE flags for a cpv
+            = aux_get(cpv, ["KEYWORDS", "USE"])
+    """
+
+    # parsing order to determine appropriate keyword used for installation
+    normal_order = ["stable", "testing", "prefix", "testing_prefix", "missing"]
+    prefix_order = ["prefix", "testing_prefix", "stable", "testing", "missing"]
+    parse_range = list(range(len(normal_order)))
+
+    def __init__(
+        self, arch, accept_keywords, vardb=portage.db[portage.root]["vartree"].dbapi
+    ):
+        self.arch = arch
+        self.accept_keywords = accept_keywords
+        self.vardb = vardb
+        self.prefix = ""
+        self.parse_order = None
+        self.check_key = {
+            "stable": self._stable,
+            "testing": self._testing,
+            "prefix": self._prefix,
+            "testing_prefix": self._testing_prefix,
+            "missing": self._missing,
+        }
+        self.mismatched = []
+
+    def determine_keyword(self, keywords, used, cpv):
+        """Determine the keyword from the installed USE flags and
+        the KEYWORDS that was used to install a package.
+
+        @param keywords: list of keywords available to install a pkg
+        @param used: list of USE flalgs recorded for the installed pkg
+        @rtype: string
+        @return a keyword or null string
+        """
+        used = set(used)
+        kwd = None
+        result = ""
+        if keywords:
+            absolute_kwds = reduce_keywords(keywords)
+            kwd = list(used.intersection(absolute_kwds))
+            # if keywords == ['~ppc64']:
+            # print "Checked keywords for kwd", keywords, used, "kwd =", kwd
+        if not kwd:
+            # print "Checking for kwd against portage.archlist"
+            absolute_kwds = reduce_keywords(keywords)
+            # check for one against archlist then re-check
+            kwd = list(absolute_kwds.intersection(portage.archlist))
+            # print "determined keyword =", kwd
+        if len(kwd) == 1:
+            key = kwd[0]
+            # print "determined keyword =", key
+        elif not kwd:
+            # print "kwd != 1", kwd, cpv
+            result = self._missing(self.keyword, keywords)
+        else:  # too many, try to narrow them dowm
+            # print "too many kwd's, trying to match against arch"
+            _kwd = list(set(kwd).intersection(self.arch))
+            key = ""
+            if _kwd:
+                # print "found one! :)", _kwd
+                key = _kwd
+            else:  # try re-running the short list against archlist
+                # print "Checking kwd for _kwd against portage.archlist"
+                _kwd = list(set(kwd).intersection(portage.archlist))
+                if _kwd and len(_kwd) == 1:
+                    # print "found one! :)", _kwd
+                    key = _kwd[0]
+                else:
+                    # print " :( didn't work, _kwd =", _kwd, "giving up on:", cpv
+                    result = self._missing(self.keyword, keywords)
+        i = 0
+        while not result and i in self.parse_range:
+            parsekey = self.parse_order[i]
+            result = self.check_key[parsekey](key, keywords)
+            i += 1
+        return result
+
+    def _stable(self, key, keywords):
+        """test for a normal stable keyword"""
+        if key in keywords:
+            return key
+        return ""
+
+    def _testing(self, key, keywords):
+        """test for a normal testing keyword"""
+        if ("~" + key) in keywords:
+            return "~" + key
+        return ""
+
+    def _prefix(self, key, keywords):
+        """test for a stable prefix keyword"""
+        if not self.prefix:
+            return ""
+        _key = "-".join([key, self.prefix])
+        if _key in keywords:
+            # print key, "is in", keywords
+            return _key
+        return ""
+
+    def _testing_prefix(self, key, keywords):
+        """test for a testing prefix keyword"""
+        if not self.prefix:
+            return ""
+        _key = "~" + "-".join([key, self.prefix])
+        if _key in keywords:
+            # print key, "is in", keywords
+            return _key
+        return ""
+
+    def _missing(self, key, keywords):
+        """generates a missing keyword to return"""
+        if self.prefix and key != self.keyword:
+            _key = "-".join([key, self.prefix])
+        else:
+            _key = "-" + key
+        # print "_missisng :(  _key =", _key
+        return _key
+
+    def get_inst_keyword_cpv(self, cpv):
+        """Determines the installed with keyword for cpv
+
+        @type cpv: string
+        @param cpv: an installed CAT/PKG-VER
+        @rtype: string
+        @returns a keyword determined to have been used to install cpv
+        """
+        keywords, used = self.vardb.aux_get(cpv, ["KEYWORDS", "USE"])
+        keywords = keywords.split()
+        used = used.split()
+        return self._parse(keywords, used, cpv=cpv)
+
+    def get_inst_keyword_pkg(self, pkg):
+        """Determines the installed with keyword for cpv
+
+        @param pkg: gentoolkit.package.Package object
+        @rtype: string
+        @returns a keyword determined to have been used to install cpv
+        """
+        keywords, used = pkg.environment(
+            ["KEYWORDS", "USE"], prefer_vdb=True, fallback=False
+        )
+        keywords = keywords.split()
+        used = used.split()
+        return self._parse(keywords, used, pkg=pkg)
+
+    def _parse(self, keywords, used, pkg=None, cpv=None):
+        if pkg:
+            _cpv = pkg.cpv
+        else:
+            _cpv = cpv
+        if not self.parse_order:
+            self.set_order(used)
+        keyword = self.keyword
+        # sanity check
+        if self.arch not in used:
+            # print "Found a mismatch = ", cpv, self.arch, used
+            self.mismatched.append(_cpv)
+        if keyword in keywords:
+            # print "keyword", keyword, "is in", keywords
+            return keyword
+        elif "~" + keyword in keywords:
+            # print "~keyword", keyword, "is in", keywords
+            return "~" + keyword
+        else:
+            keyword = self.determine_keyword(keywords, used, _cpv)
+            if not keyword:
+                raise errors.GentoolkitUnknownKeyword(_cpv, " ".join(keywords), used)
+            return keyword
+
+    def set_order(self, used):
+        """Used to set the parsing order to determine a keyword
+        used for installation.
+
+        This is needed due to the way prefix arch's and keywords
+        work with portage.  It looks for the 'prefix' flag. A positive result
+        sets it to the prefix order and keyword.
+
+        @type used: list
+        @param used: a list of pkg USE flags or the system USE flags"""
+        if "prefix" in used:
+            # print "SET_ORDER() Setting parse order to prefix"
+            prefix = None
+            self.parse_order = self.prefix_order
+            for key in self.accept_keywords:
+                # print "SET_ORDER()  '"+key+"'"
+                if "-" in key:
+                    # print "SET_ORDER()found prefix keyword :", key
+                    if self.arch in key:
+                        prefix = key.split("-")[1]
+                        # print "prefix =", prefix
+                        self.prefix = prefix
+            self.keyword = "-".join([self.arch, prefix])
+        else:
+            # print "SET_ORDER() Setting parse order to normal"
+            self.parse_order = self.normal_order
+            self.keyword = self.arch
+        # print "SET_ORDER() completed: prefix =", self.prefix, ", keyword =", \
+        #   self.keyword, "parse order =",self.parse_order
+        # print

diff --git a/pym/gentoolkit/enalyze/output.py b/pym/gentoolkit/enalyze/output.py
index 993565a..1778304 100644
--- a/pym/gentoolkit/enalyze/output.py
+++ b/pym/gentoolkit/enalyze/output.py
@@ -14,275 +14,282 @@ from gentoolkit import pprinter as pp
 from gentoolkit.formatters import CpvValueWrapper
 from gentoolkit.cpv import split_cpv
 
+
 def nl(lines=1):
-	"""small utility function to print blank lines
+    """small utility function to print blank lines
+
+    @type lines: integer
+    @param lines: optional number of blank lines to print
+            default = 1
+    """
+    print(("\n" * lines))
 
-	@type lines: integer
-	@param lines: optional number of blank lines to print
-		default = 1
-		"""
-	print(('\n' * lines))
 
 class AnalysisPrinter(CpvValueWrapper):
-	"""Printing functions"""
-	def __init__(self, target, verbose=True, references=None, key_width=1,
-				width=None, prepend=''):
-		"""@param references: list of accepted keywords or
-				the system use flags
-				"""
-		self.references = references
-		self.key_width = key_width
-		self.width = width
-		self.prepend = prepend
-		CpvValueWrapper.__init__(self, cpv_width=key_width, width=width)
-		self.set_target(target, verbose)
-
-	def set_target(self, target, verbose=True):
-		if target in ["use"]:
-			if verbose:
-				self.print_fn = self.print_use_verbose
-			else:
-				self.print_fn = self.print_use_quiet
-			self._format_key = self._format_use_keyword
-		elif target in ["keywords"]:
-			if verbose:
-				self.print_fn = self.print_keyword_verbose
-			else:
-				self.print_fn = self.print_keyword_quiet
-			self._format_key = self._format_use_keyword
-		elif target in ["packages"]:
-			if verbose:
-				self.print_fn = self.print_pkg_verbose
-			else:
-				self.print_fn = self.print_pkg_quiet
-			self._format_key = self._format_pkg
-
-	def __call__(self, key, active, data):
-		self._format_key(key, active, data)
-
-	def _format_use_keyword(self, key, active, pkgs):
-		"""Determines the stats for key, formats it and
-		calls the pre-determined print function
-		"""
-		occurred = str(len(pkgs))
-		if active in ["-", "~"]:
-			_key = active + key
-		else:
-			_key = key
-		if _key in self.references:
-			default = "default"
-		else:
-			default = "......."
-		count = ' '*(5-len(occurred)) + occurred
-		pkgs.sort()
-		self.print_fn(key, active, default, count, pkgs)
-
-	def print_use_verbose(self, key, active, default, count, pkgs):
-		"""Verbosely prints a set of use flag info. including the pkgs
-		using them.
-		"""
-		_pkgs = pkgs[:]
-		if active in ["+", "-"]:
-			_key = pp.useflag((active+key), active=="+")
-		else:
-			_key = (" " + key)
-		cpv = _pkgs.pop(0)
-		print(self.prepend + _key,'.'*(35-len(key)), default, pp.number(count),
-			pp.cpv(cpv))
-		while _pkgs:
-			cpv = _pkgs.pop(0)
-			print(' '*52 + pp.cpv(cpv))
-
-	def print_use_quiet(self, key, active, default, count, pkgs):
-		"""Quietly prints a subset set of USE flag info..
-		"""
-		if active in ["+", "-"]:
-			_key = pp.useflag((active+key), active=="+")
-		else:
-			_key = (" " + key)
-		print(self.prepend + _key,'.'*(35-len(key)), default, pp.number(count))
-
-	def print_keyword_verbose(self, key, stability, default, count, pkgs):
-		"""Verbosely prints a set of keywords info. including the pkgs
-		using them.
-		"""
-		_pkgs = pkgs[:]
-		_key = (pp.keyword((stability+key),stable=(stability==" "),
-			hard_masked=stability=="-"))
-		cpv = _pkgs.pop(0)
-		print(self.prepend + _key,'.'*(20-len(key)), default, pp.number(count),
-			pp.cpv(cpv))
-		while _pkgs:
-			cpv = _pkgs.pop(0)
-			print(' '*37 + pp.cpv(cpv))
-
-	def print_keyword_quiet(self, key, stability, default, count, pkgs):
-		"""Quietly prints a subset set of USE flag info..
-		"""
-		_key = (pp.keyword((stability+key), stable=(stability==" "),
-			hard_masked=stability=="-"))
-		print(self.prepend + _key,'.'*(20-len(key)), default, pp.number(count))
-
-	def _format_pkg(self, key, active, flags):
-		"""Determines the stats for key, formats it and
-		calls the pre-determined print function
-		"""
-		(plus, minus, cleaned) = flags
-		_plus = []
-		_minus = []
-		_cleaned = []
-		for flag in plus:
-			_flag = flag.strip()
-			if _flag:
-				_plus.append(_flag)
-		for flag in minus:
-			_flag = flag.strip()
-			if _flag:
-				_minus.append(_flag)
-		for flag in cleaned:
-			_flag = flag.strip()
-			if _flag:
-				_cleaned.append(_flag)
-		#print("cpv=", key, "_plus=", _plus, "_minus=", _minus)
-		self.print_fn(self.prepend + key, (plus, minus, cleaned))
-
-	def print_pkg_verbose(self, cpv, flags):
-		"""Verbosely prints the pkg's use flag info.
-		"""
-		(plus, minus, unset) = flags
-		_flags = []
-		for flag in plus:
-			_flags.append(pp.useflag((flag), True))
-		for flag in minus:
-			_flags.append(pp.useflag(('-' + flag), False))
-		for flag in unset:
-			_flags.append(pp.globaloption('-' + flag))
-
-		print(self._format_values(cpv, ", ".join(_flags)))
-
-
-	def print_pkg_quiet(self, cpv, flags):
-		"""Verbosely prints the pkg's use flag info.
-		"""
-		(plus, minus, unset) = flags
-		_flags = []
-		for flag in plus:
-			_flags.append(pp.useflag((flag), True))
-		for flag in minus:
-			_flags.append(pp.useflag(('-'+flag), False))
-		for flag in unset:
-			_flags.append(pp.globaloption('-' + flag))
-
-		print(self._format_values(cpv, ", ".join(_flags)))
+    """Printing functions"""
+
+    def __init__(
+        self, target, verbose=True, references=None, key_width=1, width=None, prepend=""
+    ):
+        """@param references: list of accepted keywords or
+        the system use flags
+        """
+        self.references = references
+        self.key_width = key_width
+        self.width = width
+        self.prepend = prepend
+        CpvValueWrapper.__init__(self, cpv_width=key_width, width=width)
+        self.set_target(target, verbose)
+
+    def set_target(self, target, verbose=True):
+        if target in ["use"]:
+            if verbose:
+                self.print_fn = self.print_use_verbose
+            else:
+                self.print_fn = self.print_use_quiet
+            self._format_key = self._format_use_keyword
+        elif target in ["keywords"]:
+            if verbose:
+                self.print_fn = self.print_keyword_verbose
+            else:
+                self.print_fn = self.print_keyword_quiet
+            self._format_key = self._format_use_keyword
+        elif target in ["packages"]:
+            if verbose:
+                self.print_fn = self.print_pkg_verbose
+            else:
+                self.print_fn = self.print_pkg_quiet
+            self._format_key = self._format_pkg
+
+    def __call__(self, key, active, data):
+        self._format_key(key, active, data)
+
+    def _format_use_keyword(self, key, active, pkgs):
+        """Determines the stats for key, formats it and
+        calls the pre-determined print function
+        """
+        occurred = str(len(pkgs))
+        if active in ["-", "~"]:
+            _key = active + key
+        else:
+            _key = key
+        if _key in self.references:
+            default = "default"
+        else:
+            default = "......."
+        count = " " * (5 - len(occurred)) + occurred
+        pkgs.sort()
+        self.print_fn(key, active, default, count, pkgs)
+
+    def print_use_verbose(self, key, active, default, count, pkgs):
+        """Verbosely prints a set of use flag info. including the pkgs
+        using them.
+        """
+        _pkgs = pkgs[:]
+        if active in ["+", "-"]:
+            _key = pp.useflag((active + key), active == "+")
+        else:
+            _key = " " + key
+        cpv = _pkgs.pop(0)
+        print(
+            self.prepend + _key,
+            "." * (35 - len(key)),
+            default,
+            pp.number(count),
+            pp.cpv(cpv),
+        )
+        while _pkgs:
+            cpv = _pkgs.pop(0)
+            print(" " * 52 + pp.cpv(cpv))
+
+    def print_use_quiet(self, key, active, default, count, pkgs):
+        """Quietly prints a subset set of USE flag info.."""
+        if active in ["+", "-"]:
+            _key = pp.useflag((active + key), active == "+")
+        else:
+            _key = " " + key
+        print(self.prepend + _key, "." * (35 - len(key)), default, pp.number(count))
+
+    def print_keyword_verbose(self, key, stability, default, count, pkgs):
+        """Verbosely prints a set of keywords info. including the pkgs
+        using them.
+        """
+        _pkgs = pkgs[:]
+        _key = pp.keyword(
+            (stability + key), stable=(stability == " "), hard_masked=stability == "-"
+        )
+        cpv = _pkgs.pop(0)
+        print(
+            self.prepend + _key,
+            "." * (20 - len(key)),
+            default,
+            pp.number(count),
+            pp.cpv(cpv),
+        )
+        while _pkgs:
+            cpv = _pkgs.pop(0)
+            print(" " * 37 + pp.cpv(cpv))
+
+    def print_keyword_quiet(self, key, stability, default, count, pkgs):
+        """Quietly prints a subset set of USE flag info.."""
+        _key = pp.keyword(
+            (stability + key), stable=(stability == " "), hard_masked=stability == "-"
+        )
+        print(self.prepend + _key, "." * (20 - len(key)), default, pp.number(count))
+
+    def _format_pkg(self, key, active, flags):
+        """Determines the stats for key, formats it and
+        calls the pre-determined print function
+        """
+        (plus, minus, cleaned) = flags
+        _plus = []
+        _minus = []
+        _cleaned = []
+        for flag in plus:
+            _flag = flag.strip()
+            if _flag:
+                _plus.append(_flag)
+        for flag in minus:
+            _flag = flag.strip()
+            if _flag:
+                _minus.append(_flag)
+        for flag in cleaned:
+            _flag = flag.strip()
+            if _flag:
+                _cleaned.append(_flag)
+        # print("cpv=", key, "_plus=", _plus, "_minus=", _minus)
+        self.print_fn(self.prepend + key, (plus, minus, cleaned))
+
+    def print_pkg_verbose(self, cpv, flags):
+        """Verbosely prints the pkg's use flag info."""
+        (plus, minus, unset) = flags
+        _flags = []
+        for flag in plus:
+            _flags.append(pp.useflag((flag), True))
+        for flag in minus:
+            _flags.append(pp.useflag(("-" + flag), False))
+        for flag in unset:
+            _flags.append(pp.globaloption("-" + flag))
+
+        print(self._format_values(cpv, ", ".join(_flags)))
+
+    def print_pkg_quiet(self, cpv, flags):
+        """Verbosely prints the pkg's use flag info."""
+        (plus, minus, unset) = flags
+        _flags = []
+        for flag in plus:
+            _flags.append(pp.useflag((flag), True))
+        for flag in minus:
+            _flags.append(pp.useflag(("-" + flag), False))
+        for flag in unset:
+            _flags.append(pp.globaloption("-" + flag))
+
+        print(self._format_values(cpv, ", ".join(_flags)))
 
 
 class RebuildPrinter(CpvValueWrapper):
-	"""Output functions"""
-	def __init__(self, target, pretend=True, exact=False,
-		slot=False, key_width=1, width=None):
-		"""@param references: list of accepted keywords or
-				the system use flags
-		"""
-		self.target = target
-		self.set_target(target)
-		self.pretend = pretend
-		CpvValueWrapper.__init__(self, cpv_width=key_width, width=width)
-		if pretend:
-			self.spacer = '  '
-			self.init_indent = len(self.spacer)
-		else:
-			self.spacer = ''
-		self.exact = exact
-		self.slot = slot
-		self.data = {}
-
-
-	def set_target(self, target):
-		if target in ["use"]:
-			self.print_fn = self.print_use
-		elif target in ["keywords"]:
-			self.print_fn = self.print_keyword
-		elif target in ["unmask"]:
-			self.print_fn = self.print_mask
-		self.lines = [self.header()]
-
-
-	def __call__(self, key, values, cp_count):
-		if self.target in ["keywords", "use"]:
-			self._format_atoms(key, values, cp_count)
-		else:
-			self._format_key(key, values)
-
-
-	def _format_key(self, key, values):
-		"""Determines the stats for key, formats it and
-		calls the pre-determined print function
-		"""
-		if self.exact:
-			_key = "=" + key
-		else:
-			parts = split_cpv(key)
-			_key = '/'.join(parts[:2])
-		values.sort()
-		self.data[_key] = values
-		self.print_fn( _key, values)
-
-	def print_use(self, key, atom=None, values=None):
-		"""Prints a USE flag string.
-		"""
-		if atom and not values:
-			values = atom.use
-		if self.pretend:
-			flags = []
-			for flag in values:
-				flags.append(pp.useflag(flag, (flag[0] != '-')))
-			print(self._format_values(self.spacer+key, ' '.join(flags)))
-		else:
-			line = ' '.join([key, ' '.join(values)])
-			self.lines.append(line)
-
-	def _format_atoms(self, key, atoms, count):
-		"""Determines if there are more than one atom in the values and
-		calls the predetermined print function for each atom.
-		"""
-		#print("_format_atoms(),", key, atoms)
-		if self.exact:
-			for atom in atoms:
-				self.print_fn(str(atom), atom=atom)
-			return
-		#print("_format_atoms(), count =", count)
-		if self.slot or count > 1:
-			for atom in atoms:
-				_key = str(atom.cp) + ":" + atom.slot
-				self.print_fn(_key, atom=atom)
-		else:
-			for atom in atoms:
-				_key = str(atom.cp)
-				self.print_fn(_key, atom=atom)
-		return
-
-	def print_keyword(self, key, atom=None, keyword=None):
-		"""prints a pkg key and a keyword"""
-		#print("print_keyword(),", key, keyword)
-		if atom and not keyword:
-			keyword = atom.keyword
-		if self.pretend:
-			print(self._format_values(key, keyword))
-		else:
-			line = ' '.join([key, keyword])
-			self.lines.append(line)
-
-
-	def print_unmask(self):
-		pass
-
-	def header(self):
-		"""Generates a file header
-		"""
-
-		h=("# This package.%s file was generated by "
-			%self.target +
-			"gentoolkit's 'enalyze rebuild' module\n"
-			"# Date: " + time.asctime() + "\n"
-		)
-		return h
+    """Output functions"""
+
+    def __init__(
+        self, target, pretend=True, exact=False, slot=False, key_width=1, width=None
+    ):
+        """@param references: list of accepted keywords or
+        the system use flags
+        """
+        self.target = target
+        self.set_target(target)
+        self.pretend = pretend
+        CpvValueWrapper.__init__(self, cpv_width=key_width, width=width)
+        if pretend:
+            self.spacer = "  "
+            self.init_indent = len(self.spacer)
+        else:
+            self.spacer = ""
+        self.exact = exact
+        self.slot = slot
+        self.data = {}
+
+    def set_target(self, target):
+        if target in ["use"]:
+            self.print_fn = self.print_use
+        elif target in ["keywords"]:
+            self.print_fn = self.print_keyword
+        elif target in ["unmask"]:
+            self.print_fn = self.print_mask
+        self.lines = [self.header()]
+
+    def __call__(self, key, values, cp_count):
+        if self.target in ["keywords", "use"]:
+            self._format_atoms(key, values, cp_count)
+        else:
+            self._format_key(key, values)
+
+    def _format_key(self, key, values):
+        """Determines the stats for key, formats it and
+        calls the pre-determined print function
+        """
+        if self.exact:
+            _key = "=" + key
+        else:
+            parts = split_cpv(key)
+            _key = "/".join(parts[:2])
+        values.sort()
+        self.data[_key] = values
+        self.print_fn(_key, values)
+
+    def print_use(self, key, atom=None, values=None):
+        """Prints a USE flag string."""
+        if atom and not values:
+            values = atom.use
+        if self.pretend:
+            flags = []
+            for flag in values:
+                flags.append(pp.useflag(flag, (flag[0] != "-")))
+            print(self._format_values(self.spacer + key, " ".join(flags)))
+        else:
+            line = " ".join([key, " ".join(values)])
+            self.lines.append(line)
+
+    def _format_atoms(self, key, atoms, count):
+        """Determines if there are more than one atom in the values and
+        calls the predetermined print function for each atom.
+        """
+        # print("_format_atoms(),", key, atoms)
+        if self.exact:
+            for atom in atoms:
+                self.print_fn(str(atom), atom=atom)
+            return
+        # print("_format_atoms(), count =", count)
+        if self.slot or count > 1:
+            for atom in atoms:
+                _key = str(atom.cp) + ":" + atom.slot
+                self.print_fn(_key, atom=atom)
+        else:
+            for atom in atoms:
+                _key = str(atom.cp)
+                self.print_fn(_key, atom=atom)
+        return
+
+    def print_keyword(self, key, atom=None, keyword=None):
+        """prints a pkg key and a keyword"""
+        # print("print_keyword(),", key, keyword)
+        if atom and not keyword:
+            keyword = atom.keyword
+        if self.pretend:
+            print(self._format_values(key, keyword))
+        else:
+            line = " ".join([key, keyword])
+            self.lines.append(line)
+
+    def print_unmask(self):
+        pass
+
+    def header(self):
+        """Generates a file header"""
+
+        h = (
+            "# This package.%s file was generated by " % self.target
+            + "gentoolkit's 'enalyze rebuild' module\n"
+            "# Date: " + time.asctime() + "\n"
+        )
+        return h

diff --git a/pym/gentoolkit/enalyze/rebuild.py b/pym/gentoolkit/enalyze/rebuild.py
index c0ac8cb..f341680 100644
--- a/pym/gentoolkit/enalyze/rebuild.py
+++ b/pym/gentoolkit/enalyze/rebuild.py
@@ -15,8 +15,12 @@ import os
 import gentoolkit
 from gentoolkit.module_base import ModuleBase
 from gentoolkit import pprinter as pp
-from gentoolkit.enalyze.lib import (get_installed_use, get_flags, FlagAnalyzer,
-	KeywordAnalyser)
+from gentoolkit.enalyze.lib import (
+    get_installed_use,
+    get_flags,
+    FlagAnalyzer,
+    KeywordAnalyser,
+)
 from gentoolkit.enalyze.output import RebuildPrinter
 from gentoolkit.atom import Atom
 from gentoolkit.package import Package
@@ -25,343 +29,389 @@ from gentoolkit.package import Package
 import portage
 from portage import _encodings, _unicode_encode
 
-def cpv_all_diff_use(
-		cpvs=None,
-		system_flags=None,
-		#  override-able for testing
-		_get_flags=get_flags,
-		_get_used=get_installed_use
-		):
-	"""Data gathering and analysis function determines
-	the difference between the current default USE flag settings
-	and the currently installed pkgs recorded USE flag settings
 
-	@type cpvs: list
-	@param cpvs: optional list of [cat/pkg-ver,...] to analyze or
-			defaults to entire installed pkg db
-	@type: system_flags: list
-	@param system_flags: the current default USE flags as defined
-			by portage.settings["USE"].split()
-	@type _get_flags: function
-	@param _get_flags: ovride-able for testing,
-			defaults to gentoolkit.enalyze.lib.get_flags
-	@param _get_used: ovride-able for testing,
-			defaults to gentoolkit.enalyze.lib.get_installed_use
-	@rtype dict. {cpv:['flag1', '-flag2',...]}
-	"""
-	if cpvs is None:
-		cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-	cpvs.sort()
-	data = {}
-	cp_counts = {}
-	# pass them in to override for tests
-	flags = FlagAnalyzer(system_flags,
-		filter_defaults=True,
-		target="USE",
-		_get_flags=_get_flags,
-		_get_used=get_installed_use
-	)
-	for cpv in cpvs:
-		plus, minus, unset = flags.analyse_cpv(cpv)
-		atom = Atom("="+cpv)
-		atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(atom.cpv, ["SLOT"])[0]
-		for flag in minus:
-			plus.add("-"+flag)
-		if len(plus):
-			if atom.cp not in data:
-				data[atom.cp] = []
-			if atom.cp not in cp_counts:
-				cp_counts[atom.cp] = 0
-			atom.use = list(plus)
-			data[atom.cp].append(atom)
-			cp_counts[atom.cp] += 1
-	return data, cp_counts
+def cpv_all_diff_use(
+    cpvs=None,
+    system_flags=None,
+    #  override-able for testing
+    _get_flags=get_flags,
+    _get_used=get_installed_use,
+):
+    """Data gathering and analysis function determines
+    the difference between the current default USE flag settings
+    and the currently installed pkgs recorded USE flag settings
+
+    @type cpvs: list
+    @param cpvs: optional list of [cat/pkg-ver,...] to analyze or
+                    defaults to entire installed pkg db
+    @type: system_flags: list
+    @param system_flags: the current default USE flags as defined
+                    by portage.settings["USE"].split()
+    @type _get_flags: function
+    @param _get_flags: ovride-able for testing,
+                    defaults to gentoolkit.enalyze.lib.get_flags
+    @param _get_used: ovride-able for testing,
+                    defaults to gentoolkit.enalyze.lib.get_installed_use
+    @rtype dict. {cpv:['flag1', '-flag2',...]}
+    """
+    if cpvs is None:
+        cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+    cpvs.sort()
+    data = {}
+    cp_counts = {}
+    # pass them in to override for tests
+    flags = FlagAnalyzer(
+        system_flags,
+        filter_defaults=True,
+        target="USE",
+        _get_flags=_get_flags,
+        _get_used=get_installed_use,
+    )
+    for cpv in cpvs:
+        plus, minus, unset = flags.analyse_cpv(cpv)
+        atom = Atom("=" + cpv)
+        atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(
+            atom.cpv, ["SLOT"]
+        )[0]
+        for flag in minus:
+            plus.add("-" + flag)
+        if len(plus):
+            if atom.cp not in data:
+                data[atom.cp] = []
+            if atom.cp not in cp_counts:
+                cp_counts[atom.cp] = 0
+            atom.use = list(plus)
+            data[atom.cp].append(atom)
+            cp_counts[atom.cp] += 1
+    return data, cp_counts
 
 
 def cpv_all_diff_keywords(
-		cpvs=None,
-		system_keywords=None,
-		use_portage=False,
-		#  override-able for testing
-		keywords=portage.settings["ACCEPT_KEYWORDS"],
-		analyser = None
-		):
-	"""Analyze the installed pkgs 'keywords' for difference from ACCEPT_KEYWORDS
-
-	@param cpvs: optional list of [cat/pkg-ver,...] to analyze or
-			defaults to entire installed pkg db
-	@param system_keywords: list of the system keywords
-	@param keywords: user defined list of keywords to check and report on
-			or reports on all relevant keywords found to have been used.
-	@param _get_kwds: overridable function for testing
-	@param _get_used: overridable function for testing
-	@rtype dict. {keyword:{"stable":[cat/pkg-ver,...],
-						   "testing":[cat/pkg-ver,...]}
-	"""
-	if cpvs is None:
-		cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-	keyword_users = {}
-	cp_counts = {}
-	for cpv in cpvs:
-		if cpv.startswith("virtual"):
-			continue
-		if use_portage:
-			keyword = analyser.get_inst_keyword_cpv(cpv)
-		else:
-			pkg = Package(cpv)
-			keyword = analyser.get_inst_keyword_pkg(pkg)
-		#print "returned keyword =", cpv, keyword, keyword[0]
-		key = keyword[0]
-		if key in ["~", "-"] and keyword not in system_keywords:
-			atom = Atom("="+cpv)
-			if atom.cp not in keyword_users:
-				keyword_users[atom.cp] = []
-			if atom.cp not in cp_counts:
-				cp_counts[atom.cp] = 0
-			if key in ["~"]:
-				atom.keyword = keyword
-				atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(atom.cpv, ["SLOT"])[0]
-				keyword_users[atom.cp].append(atom)
-				cp_counts[atom.cp] += 1
-			elif key in ["-"]:
-				#print "adding cpv to missing:", cpv
-				atom.keyword = "**"
-				atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(atom.cpv, ["SLOT"])[0]
-				keyword_users[atom.cp].append(atom)
-				cp_counts[atom.cp] += 1
-	return keyword_users, cp_counts
+    cpvs=None,
+    system_keywords=None,
+    use_portage=False,
+    #  override-able for testing
+    keywords=portage.settings["ACCEPT_KEYWORDS"],
+    analyser=None,
+):
+    """Analyze the installed pkgs 'keywords' for difference from ACCEPT_KEYWORDS
+
+    @param cpvs: optional list of [cat/pkg-ver,...] to analyze or
+                    defaults to entire installed pkg db
+    @param system_keywords: list of the system keywords
+    @param keywords: user defined list of keywords to check and report on
+                    or reports on all relevant keywords found to have been used.
+    @param _get_kwds: overridable function for testing
+    @param _get_used: overridable function for testing
+    @rtype dict. {keyword:{"stable":[cat/pkg-ver,...],
+                                               "testing":[cat/pkg-ver,...]}
+    """
+    if cpvs is None:
+        cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+    keyword_users = {}
+    cp_counts = {}
+    for cpv in cpvs:
+        if cpv.startswith("virtual"):
+            continue
+        if use_portage:
+            keyword = analyser.get_inst_keyword_cpv(cpv)
+        else:
+            pkg = Package(cpv)
+            keyword = analyser.get_inst_keyword_pkg(pkg)
+        # print "returned keyword =", cpv, keyword, keyword[0]
+        key = keyword[0]
+        if key in ["~", "-"] and keyword not in system_keywords:
+            atom = Atom("=" + cpv)
+            if atom.cp not in keyword_users:
+                keyword_users[atom.cp] = []
+            if atom.cp not in cp_counts:
+                cp_counts[atom.cp] = 0
+            if key in ["~"]:
+                atom.keyword = keyword
+                atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(
+                    atom.cpv, ["SLOT"]
+                )[0]
+                keyword_users[atom.cp].append(atom)
+                cp_counts[atom.cp] += 1
+            elif key in ["-"]:
+                # print "adding cpv to missing:", cpv
+                atom.keyword = "**"
+                atom.slot = portage.db[portage.root]["vartree"].dbapi.aux_get(
+                    atom.cpv, ["SLOT"]
+                )[0]
+                keyword_users[atom.cp].append(atom)
+                cp_counts[atom.cp] += 1
+    return keyword_users, cp_counts
 
 
 class Rebuild(ModuleBase):
-	"""Installed db analysis tool to query the installed databse
-	and produce/output stats for USE flags or keywords/mask.
-	The 'rebuild' action output is in the form suitable for file type output
-	to create a new package.use, package.keywords, package.unmask
-	type files in the event of needing to rebuild the
-	/etc/portage/* user configs
-	"""
-	def __init__(self):
-		ModuleBase.__init__(self)
-		self.command_name = "enalyze"
-		self.module_name = "rebuild"
-		self.options = {
-			"use": False,
-			"keywords": False,
-			"unmask": False,
-			"verbose": False,
-			"quiet": False,
-			"exact": False,
-			"pretend": False,
-			"prefix": False,
-			"portage": True,
-			"slot": False
-			#"unset": False
-		}
-		self.module_opts = {
-			"-p": ("pretend", "boolean", True),
-			"--pretend": ("pretend", "boolean", True),
-			"-e": ("exact", "boolean", True),
-			"--exact": ("exact", "boolean", True),
-			"-s": ("slot", "boolean", True),
-			"--slot": ("slot", "boolean", True),
-			"-v": ("verbose", "boolean", True),
-			"--verbose": ("verbose", "boolean", True),
-		}
-		self.formatted_options = [
-			("    -h, --help",  "Outputs this useage message"),
-			("    -p, --pretend", "Does not actually create the files."),
-			("    ", "It directs the outputs to the screen"),
-			("    -e, --exact", "will atomize the package with a"),
-			("  ", "leading '=' and include the version"),
-			("    -s, --slot", "will atomize the package with a"),
-			("  ", "leading '=' and include the slot")
-		]
-		self.formatted_args = [
-			("    use",
-			"causes the action to analyze the installed packages USE flags"),
-			("    keywords",
-			"causes the action to analyze the installed packages keywords"),
-			("    unmask",
-			"causes the action to analyze the installed packages " + \
-			"current mask status")
-		]
-		self.short_opts = "hepsv"
-		self.long_opts = ("help", "exact", "pretend", "slot", "verbose")
-		self.need_queries = True
-		self.arg_spec = "TargetSpec"
-		self.arg_options = ['use', 'keywords', 'unmask']
-		self.arg_option = False
-		self.warning = (
-			"     CAUTION",
-			"This is beta software and some features/options are incomplete,",
-			"some features may change in future releases includig its name.",
-			"The file generated is saved in your home directory",
-			"Feedback will be appreciated, http://bugs.gentoo.org")
-
-
-
-	def run(self, input_args, quiet=False):
-		"""runs the module
-
-		@param input_args: input arguments to be parsed
-		"""
-		self.options['quiet'] = quiet
-		query = self.main_setup(input_args)
-		query = self.validate_query(query)
-		if query in ["use"]:
-			self.rebuild_use()
-		elif query in ["keywords"]:
-			self.rebuild_keywords()
-		elif query in ["unmask"]:
-			self.rebuild_unmask()
-
-
-	def rebuild_use(self):
-		if not self.options["quiet"]:
-			print()
-			print("  -- Scanning installed packages for USE flag settings that")
-			print("     do not match the default settings")
-		system_use = portage.settings["USE"].split()
-		output = RebuildPrinter(
-			"use", self.options["pretend"], self.options["exact"],
-				self.options['slot'])
-		pkgs, cp_counts = cpv_all_diff_use(system_flags=system_use)
-		pkg_count = len(pkgs)
-		if self.options["verbose"]:
-			print()
-			print((pp.emph("  -- Found ") +  pp.number(str(pkg_count)) +
-				pp.emph(" packages that need entries")))
-			#print pp.emph("     package.use to maintain their current setting")
-		pkg_keys = []
-		if pkgs:
-			pkg_keys = sorted(pkgs)
-			#print len(pkgs)
-			if self.options["pretend"] and not self.options["quiet"]:
-				print()
-				print(pp.globaloption(
-					"  -- These are the installed packages & use flags " +
-					"that were detected"))
-				print(pp.globaloption("     to need use flag settings other " +
-					"than the defaults."))
-				print()
-			elif not self.options["quiet"]:
-				print("  -- preparing pkgs for file entries")
-			for pkg in pkg_keys:
-				output(pkg, pkgs[pkg], cp_counts[pkg])
-			if self.options['verbose']:
-				message = (pp.emph("     ") +
-					pp.number(str(pkg_count)) +
-					pp.emph(" different packages"))
-				print()
-				print(pp.globaloption("  -- Totals"))
-				print(message)
-				#print
-				#unique = list(unique_flags)
-				#unique.sort()
-				#print unique
-			if not self.options["pretend"]:
-				filepath = os.path.expanduser('~/package.use.test')
-				self.save_file(filepath, output.lines)
-
-	def rebuild_keywords(self):
-		#print("Module action not yet available")
-		#print()
-		"""This will scan the installed packages db and analyze the
-		keywords used for installation and produce a report on them.
-		"""
-		system_keywords = portage.settings["ACCEPT_KEYWORDS"].split()
-		output = RebuildPrinter(
-			"keywords", self.options["pretend"], self.options["exact"],
-			self.options['slot'])
-		arch = portage.settings["ARCH"]
-		if self.options["prefix"]:
-			# build a new keyword for testing
-			system_keywords = "~" + arch + "-linux"
-		if self.options["verbose"] or self.options["prefix"]:
-			print("Current system ARCH =", arch)
-			print("Current system ACCEPT_KEYWORDS =", system_keywords)
-		self.analyser = KeywordAnalyser( arch, system_keywords, portage.db[portage.root]["vartree"].dbapi)
-		#self.analyser.set_order(portage.settings["USE"].split())
-		# only for testing
-		test_use = portage.settings["USE"].split()
-		if self.options['prefix'] and 'prefix' not in test_use:
-			print("REBUILD_KEYWORDS() 'prefix' flag not found in system",
-				"USE flags!!!  appending for testing")
-			print()
-			test_use.append('prefix')
-		self.analyser.set_order(test_use)
-		# /end testing
-
-		cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
-		#print "Total number of installed ebuilds =", len(cpvs)
-		pkgs, cp_counts = cpv_all_diff_keywords(
-			cpvs=cpvs,
-			system_keywords=system_keywords,
-			use_portage=self.options['portage'],
-			analyser = self.analyser
-			)
-		#print([pkgs[p][0].cpv for p in pkgs])
-		pkg_keys = []
-		if pkgs:
-			pkg_keys = sorted(pkgs)
-			#print(len(pkgs))
-			if self.options["pretend"] and not self.options["quiet"]:
-				print()
-				print(pp.globaloption(
-					"  -- These are the installed packages & keywords " +
-					"that were detected"))
-				print(pp.globaloption("     to need keyword settings other " +
-					"than the defaults."))
-				print()
-			elif not self.options["quiet"]:
-				print("  -- preparing pkgs for file entries")
-			for pkg in pkg_keys:
-				output(pkg, pkgs[pkg], cp_counts[pkg])
-		if not self.options['quiet']:
-			if self.analyser.mismatched:
-				print("_________________________________________________")
-				print(("The following packages were found to have a \n" +
-					"different recorded ARCH than the current system ARCH"))
-				for cpv in self.analyser.mismatched:
-					print("\t", pp.cpv(cpv))
-			print("===================================================")
-			print("Total number of entries in report =",
-				pp.output.red(str(len(pkg_keys))))
-			if self.options["verbose"]:
-				print("Total number of installed ebuilds =",
-					pp.output.red(str(len(cpvs))))
-			print()
-			if not self.options["pretend"]:
-				filepath = os.path.expanduser('~/package.keywords.test')
-				self.save_file(filepath, output.lines)
-
-
-	def rebuild_unmask(self):
-		self.not_implemented("unmask")
-
-
-	def save_file(self, filepath, data):
-		"""Writes the data to the file determined by filepath
-
-		@param filepath: string. eg. '/path/to/filename'
-		@param data: list of lines to write to filepath
-		"""
-		if  not self.options["quiet"]:
-			print('   - Saving file: %s' %filepath)
-		with open(_unicode_encode(filepath, encoding=_encodings['fs']), mode="w",
-				encoding=_encodings['content']) as output:
-			output.write('\n'.join(data))
-		print("   - Done")
+    """Installed db analysis tool to query the installed databse
+    and produce/output stats for USE flags or keywords/mask.
+    The 'rebuild' action output is in the form suitable for file type output
+    to create a new package.use, package.keywords, package.unmask
+    type files in the event of needing to rebuild the
+    /etc/portage/* user configs
+    """
+
+    def __init__(self):
+        ModuleBase.__init__(self)
+        self.command_name = "enalyze"
+        self.module_name = "rebuild"
+        self.options = {
+            "use": False,
+            "keywords": False,
+            "unmask": False,
+            "verbose": False,
+            "quiet": False,
+            "exact": False,
+            "pretend": False,
+            "prefix": False,
+            "portage": True,
+            "slot": False
+            # "unset": False
+        }
+        self.module_opts = {
+            "-p": ("pretend", "boolean", True),
+            "--pretend": ("pretend", "boolean", True),
+            "-e": ("exact", "boolean", True),
+            "--exact": ("exact", "boolean", True),
+            "-s": ("slot", "boolean", True),
+            "--slot": ("slot", "boolean", True),
+            "-v": ("verbose", "boolean", True),
+            "--verbose": ("verbose", "boolean", True),
+        }
+        self.formatted_options = [
+            ("    -h, --help", "Outputs this useage message"),
+            ("    -p, --pretend", "Does not actually create the files."),
+            ("    ", "It directs the outputs to the screen"),
+            ("    -e, --exact", "will atomize the package with a"),
+            ("  ", "leading '=' and include the version"),
+            ("    -s, --slot", "will atomize the package with a"),
+            ("  ", "leading '=' and include the slot"),
+        ]
+        self.formatted_args = [
+            (
+                "    use",
+                "causes the action to analyze the installed packages USE flags",
+            ),
+            (
+                "    keywords",
+                "causes the action to analyze the installed packages keywords",
+            ),
+            (
+                "    unmask",
+                "causes the action to analyze the installed packages "
+                + "current mask status",
+            ),
+        ]
+        self.short_opts = "hepsv"
+        self.long_opts = ("help", "exact", "pretend", "slot", "verbose")
+        self.need_queries = True
+        self.arg_spec = "TargetSpec"
+        self.arg_options = ["use", "keywords", "unmask"]
+        self.arg_option = False
+        self.warning = (
+            "     CAUTION",
+            "This is beta software and some features/options are incomplete,",
+            "some features may change in future releases includig its name.",
+            "The file generated is saved in your home directory",
+            "Feedback will be appreciated, http://bugs.gentoo.org",
+        )
+
+    def run(self, input_args, quiet=False):
+        """runs the module
+
+        @param input_args: input arguments to be parsed
+        """
+        self.options["quiet"] = quiet
+        query = self.main_setup(input_args)
+        query = self.validate_query(query)
+        if query in ["use"]:
+            self.rebuild_use()
+        elif query in ["keywords"]:
+            self.rebuild_keywords()
+        elif query in ["unmask"]:
+            self.rebuild_unmask()
+
+    def rebuild_use(self):
+        if not self.options["quiet"]:
+            print()
+            print("  -- Scanning installed packages for USE flag settings that")
+            print("     do not match the default settings")
+        system_use = portage.settings["USE"].split()
+        output = RebuildPrinter(
+            "use", self.options["pretend"], self.options["exact"], self.options["slot"]
+        )
+        pkgs, cp_counts = cpv_all_diff_use(system_flags=system_use)
+        pkg_count = len(pkgs)
+        if self.options["verbose"]:
+            print()
+            print(
+                (
+                    pp.emph("  -- Found ")
+                    + pp.number(str(pkg_count))
+                    + pp.emph(" packages that need entries")
+                )
+            )
+            # print pp.emph("     package.use to maintain their current setting")
+        pkg_keys = []
+        if pkgs:
+            pkg_keys = sorted(pkgs)
+            # print len(pkgs)
+            if self.options["pretend"] and not self.options["quiet"]:
+                print()
+                print(
+                    pp.globaloption(
+                        "  -- These are the installed packages & use flags "
+                        + "that were detected"
+                    )
+                )
+                print(
+                    pp.globaloption(
+                        "     to need use flag settings other " + "than the defaults."
+                    )
+                )
+                print()
+            elif not self.options["quiet"]:
+                print("  -- preparing pkgs for file entries")
+            for pkg in pkg_keys:
+                output(pkg, pkgs[pkg], cp_counts[pkg])
+            if self.options["verbose"]:
+                message = (
+                    pp.emph("     ")
+                    + pp.number(str(pkg_count))
+                    + pp.emph(" different packages")
+                )
+                print()
+                print(pp.globaloption("  -- Totals"))
+                print(message)
+                # print
+                # unique = list(unique_flags)
+                # unique.sort()
+                # print unique
+            if not self.options["pretend"]:
+                filepath = os.path.expanduser("~/package.use.test")
+                self.save_file(filepath, output.lines)
+
+    def rebuild_keywords(self):
+        # print("Module action not yet available")
+        # print()
+        """This will scan the installed packages db and analyze the
+        keywords used for installation and produce a report on them.
+        """
+        system_keywords = portage.settings["ACCEPT_KEYWORDS"].split()
+        output = RebuildPrinter(
+            "keywords",
+            self.options["pretend"],
+            self.options["exact"],
+            self.options["slot"],
+        )
+        arch = portage.settings["ARCH"]
+        if self.options["prefix"]:
+            # build a new keyword for testing
+            system_keywords = "~" + arch + "-linux"
+        if self.options["verbose"] or self.options["prefix"]:
+            print("Current system ARCH =", arch)
+            print("Current system ACCEPT_KEYWORDS =", system_keywords)
+        self.analyser = KeywordAnalyser(
+            arch, system_keywords, portage.db[portage.root]["vartree"].dbapi
+        )
+        # self.analyser.set_order(portage.settings["USE"].split())
+        # only for testing
+        test_use = portage.settings["USE"].split()
+        if self.options["prefix"] and "prefix" not in test_use:
+            print(
+                "REBUILD_KEYWORDS() 'prefix' flag not found in system",
+                "USE flags!!!  appending for testing",
+            )
+            print()
+            test_use.append("prefix")
+        self.analyser.set_order(test_use)
+        # /end testing
+
+        cpvs = portage.db[portage.root]["vartree"].dbapi.cpv_all()
+        # print "Total number of installed ebuilds =", len(cpvs)
+        pkgs, cp_counts = cpv_all_diff_keywords(
+            cpvs=cpvs,
+            system_keywords=system_keywords,
+            use_portage=self.options["portage"],
+            analyser=self.analyser,
+        )
+        # print([pkgs[p][0].cpv for p in pkgs])
+        pkg_keys = []
+        if pkgs:
+            pkg_keys = sorted(pkgs)
+            # print(len(pkgs))
+            if self.options["pretend"] and not self.options["quiet"]:
+                print()
+                print(
+                    pp.globaloption(
+                        "  -- These are the installed packages & keywords "
+                        + "that were detected"
+                    )
+                )
+                print(
+                    pp.globaloption(
+                        "     to need keyword settings other " + "than the defaults."
+                    )
+                )
+                print()
+            elif not self.options["quiet"]:
+                print("  -- preparing pkgs for file entries")
+            for pkg in pkg_keys:
+                output(pkg, pkgs[pkg], cp_counts[pkg])
+        if not self.options["quiet"]:
+            if self.analyser.mismatched:
+                print("_________________________________________________")
+                print(
+                    (
+                        "The following packages were found to have a \n"
+                        + "different recorded ARCH than the current system ARCH"
+                    )
+                )
+                for cpv in self.analyser.mismatched:
+                    print("\t", pp.cpv(cpv))
+            print("===================================================")
+            print(
+                "Total number of entries in report =", pp.output.red(str(len(pkg_keys)))
+            )
+            if self.options["verbose"]:
+                print(
+                    "Total number of installed ebuilds =", pp.output.red(str(len(cpvs)))
+                )
+            print()
+            if not self.options["pretend"]:
+                filepath = os.path.expanduser("~/package.keywords.test")
+                self.save_file(filepath, output.lines)
+
+    def rebuild_unmask(self):
+        self.not_implemented("unmask")
+
+    def save_file(self, filepath, data):
+        """Writes the data to the file determined by filepath
+
+        @param filepath: string. eg. '/path/to/filename'
+        @param data: list of lines to write to filepath
+        """
+        if not self.options["quiet"]:
+            print("   - Saving file: %s" % filepath)
+        with open(
+            _unicode_encode(filepath, encoding=_encodings["fs"]),
+            mode="w",
+            encoding=_encodings["content"],
+        ) as output:
+            output.write("\n".join(data))
+        print("   - Done")
 
 
 def main(input_args):
-	"""Common starting method by the analyze master
-	unless all modules are converted to this class method.
+    """Common starting method by the analyze master
+    unless all modules are converted to this class method.
 
-	@param input_args: input args as supplied by equery master module.
-	"""
-	query_module = Rebuild()
-	query_module.run(input_args, gentoolkit.CONFIG['quiet'])
+    @param input_args: input args as supplied by equery master module.
+    """
+    query_module = Rebuild()
+    query_module.run(input_args, gentoolkit.CONFIG["quiet"])
 
-# vim: set ts=4 sw=4 tw=79:
 
+# vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/eprefix.py b/pym/gentoolkit/eprefix.py
index 5acaa79..868a780 100644
--- a/pym/gentoolkit/eprefix.py
+++ b/pym/gentoolkit/eprefix.py
@@ -14,9 +14,9 @@ then in code add it to the filepath eg.:
 """
 # Load EPREFIX from Portage, fall back to the empty string if it fails
 try:
-	from portage.const import EPREFIX
+    from portage.const import EPREFIX
 except ImportError:
-	EPREFIX = ''
+    EPREFIX = ""
 
 if __name__ == "__main__":
-	print("EPREFIX set to:", EPREFIX)
+    print("EPREFIX set to:", EPREFIX)

diff --git a/pym/gentoolkit/equery/__init__.py b/pym/gentoolkit/equery/__init__.py
index e5b3deb..cecb5be 100644
--- a/pym/gentoolkit/equery/__init__.py
+++ b/pym/gentoolkit/equery/__init__.py
@@ -4,12 +4,8 @@
 
 """Gentoo package query tool"""
 
-__all__ = (
-	'format_options',
-	'format_package_names',
-	'mod_usage'
-)
-__docformat__ = 'epytext'
+__all__ = ("format_options", "format_package_names", "mod_usage")
+__docformat__ = "epytext"
 # version is dynamically set by distutils sdist
 __version__ = "git"
 
@@ -32,8 +28,8 @@ from gentoolkit.textwrap_ import TextWrapper
 
 __productname__ = "equery"
 __authors__ = (
-	'Karl Trygve Kalleberg - Original author',
-	'Douglas Anderson - 0.3.0 author'
+    "Karl Trygve Kalleberg - Original author",
+    "Douglas Anderson - 0.3.0 author",
 )
 
 # =======
@@ -41,318 +37,330 @@ __authors__ = (
 # =======
 
 NAME_MAP = {
-	'b': 'belongs',
-	'k': 'check',
-	'd': 'depends',
-	'g': 'depgraph',
-	'f': 'files',
-	'h': 'hasuse',
-	'l': 'list_',
-	'y': 'keywords',
-	'a': 'has',
-	'm': 'meta',
-	's': 'size',
-	'u': 'uses',
-	'w': 'which'
+    "b": "belongs",
+    "k": "check",
+    "d": "depends",
+    "g": "depgraph",
+    "f": "files",
+    "h": "hasuse",
+    "l": "list_",
+    "y": "keywords",
+    "a": "has",
+    "m": "meta",
+    "s": "size",
+    "u": "uses",
+    "w": "which",
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@param with_description (bool): Option to print module's __doc__ or not
-	"""
-
-	if with_description:
-		print(__doc__)
-	print(main_usage())
-	print()
-	print(pp.globaloption("global options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -q, --quiet", "minimal output"),
-		(" -C, --no-color", "turn off colors"),
-		(" -N, --no-pipe", "turn off pipe detection"),
-		(" -V, --version", "display version info")
-	)))
-	print()
-	print(pp.command("modules") + " (" + pp.command("short name") + ")")
-	print(format_options((
-		(" (b)elongs", "list what package FILES belong to"),
-		(" chec(k)", "verify checksums and timestamps for PKG"),
-		(" (d)epends", "list all packages directly depending on ATOM"),
-		(" dep(g)raph", "display a tree of all dependencies for PKG"),
-		(" (f)iles", "list all files installed by PKG"),
-		(" h(a)s", "list all packages for matching ENVIRONMENT data stored in /var/db/pkg"),
-		(" (h)asuse", "list all packages that have USE flag"),
-		(" ke(y)words", "display keywords for specified PKG"),
-		(" (l)ist", "list package matching PKG"),
-		(" (m)eta", "display metadata about PKG"),
-		(" (s)ize", "display total size of all files owned by PKG"),
-		(" (u)ses", "display USE flags for PKG"),
-		(" (w)hich", "print full path to ebuild for PKG")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @param with_description (bool): Option to print module's __doc__ or not
+    """
+
+    if with_description:
+        print(__doc__)
+    print(main_usage())
+    print()
+    print(pp.globaloption("global options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -q, --quiet", "minimal output"),
+                (" -C, --no-color", "turn off colors"),
+                (" -N, --no-pipe", "turn off pipe detection"),
+                (" -V, --version", "display version info"),
+            )
+        )
+    )
+    print()
+    print(pp.command("modules") + " (" + pp.command("short name") + ")")
+    print(
+        format_options(
+            (
+                (" (b)elongs", "list what package FILES belong to"),
+                (" chec(k)", "verify checksums and timestamps for PKG"),
+                (" (d)epends", "list all packages directly depending on ATOM"),
+                (" dep(g)raph", "display a tree of all dependencies for PKG"),
+                (" (f)iles", "list all files installed by PKG"),
+                (
+                    " h(a)s",
+                    "list all packages for matching ENVIRONMENT data stored in /var/db/pkg",
+                ),
+                (" (h)asuse", "list all packages that have USE flag"),
+                (" ke(y)words", "display keywords for specified PKG"),
+                (" (l)ist", "list package matching PKG"),
+                (" (m)eta", "display metadata about PKG"),
+                (" (s)ize", "display total size of all files owned by PKG"),
+                (" (u)ses", "display USE flags for PKG"),
+                (" (w)hich", "print full path to ebuild for PKG"),
+            )
+        )
+    )
 
 
 def expand_module_name(module_name):
-	"""Returns one of the values of NAME_MAP or raises KeyError"""
+    """Returns one of the values of NAME_MAP or raises KeyError"""
 
-	if module_name == 'list':
-		# list is a Python builtin type, so we must rename our module
-		return 'list_'
-	elif module_name in NAME_MAP.values():
-		return module_name
-	else:
-		return NAME_MAP[module_name]
+    if module_name == "list":
+        # list is a Python builtin type, so we must rename our module
+        return "list_"
+    elif module_name in NAME_MAP.values():
+        return module_name
+    else:
+        return NAME_MAP[module_name]
 
 
 def format_options(options):
-	"""Format module options.
-
-	@type options: list
-	@param options: [('option 1', 'description 1'), ('option 2', 'des... )]
-	@rtype: str
-	@return: formatted options string
-	"""
-
-	result = []
-	twrap = TextWrapper(width=CONFIG['termWidth'])
-	opts = (x[0] for x in options)
-	descs = (x[1] for x in options)
-	for opt, desc in zip(opts, descs):
-		twrap.initial_indent = pp.emph(opt.ljust(25))
-		twrap.subsequent_indent = " " * 25
-		result.append(twrap.fill(desc))
-
-	return '\n'.join(result)
-
-
-def format_filetype(path, fdesc, show_type=False, show_md5=False,
-		show_timestamp=False):
-	"""Format a path for printing.
-
-	@type path: str
-	@param path: the path
-	@type fdesc: list
-	@param fdesc: [file_type, timestamp, MD5 sum/symlink target]
-		file_type is one of dev, dir, obj, sym, fif.
-		If file_type is dir, there is no timestamp or MD5 sum.
-		If file_type is sym, fdesc[2] is the target of the symlink.
-	@type show_type: bool
-	@param show_type: if True, prepend the file's type to the formatted string
-	@type show_md5: bool
-	@param show_md5: if True, append MD5 sum to the formatted string
-	@type show_timestamp: bool
-	@param show_timestamp: if True, append time-of-creation after pathname
-	@rtype: str
-	@return: formatted pathname with optional added information
-	"""
-
-	ftype = fpath = stamp = md5sum = ""
-
-	if fdesc[0] == "obj":
-		ftype = "file"
-		fpath = path
-		stamp = format_timestamp(fdesc[1])
-		md5sum = fdesc[2]
-	elif fdesc[0] == "dir":
-		ftype = "dir"
-		fpath = pp.path(path)
-	elif fdesc[0] == "sym":
-		ftype = "sym"
-		stamp = format_timestamp(fdesc[1])
-		tgt = fdesc[2].split()[0]
-		if CONFIG["piping"]:
-			fpath = path
-		else:
-			fpath = pp.path_symlink(path + " -> " + tgt)
-	elif fdesc[0] == "dev":
-		ftype = "dev"
-		fpath = path
-	elif fdesc[0] == "fif":
-		ftype = "fifo"
-		fpath = path
-	else:
-		sys.stderr.write(
-			pp.error("%s has unknown type: %s" % (path, fdesc[0]))
-		)
-
-	result = ""
-	if show_type:
-		result += "%4s " % ftype
-	result += fpath
-	if show_timestamp:
-		result += "  " + stamp
-	if show_md5:
-		result += "  " + md5sum
-
-	return result
+    """Format module options.
+
+    @type options: list
+    @param options: [('option 1', 'description 1'), ('option 2', 'des... )]
+    @rtype: str
+    @return: formatted options string
+    """
+
+    result = []
+    twrap = TextWrapper(width=CONFIG["termWidth"])
+    opts = (x[0] for x in options)
+    descs = (x[1] for x in options)
+    for opt, desc in zip(opts, descs):
+        twrap.initial_indent = pp.emph(opt.ljust(25))
+        twrap.subsequent_indent = " " * 25
+        result.append(twrap.fill(desc))
+
+    return "\n".join(result)
+
+
+def format_filetype(path, fdesc, show_type=False, show_md5=False, show_timestamp=False):
+    """Format a path for printing.
+
+    @type path: str
+    @param path: the path
+    @type fdesc: list
+    @param fdesc: [file_type, timestamp, MD5 sum/symlink target]
+            file_type is one of dev, dir, obj, sym, fif.
+            If file_type is dir, there is no timestamp or MD5 sum.
+            If file_type is sym, fdesc[2] is the target of the symlink.
+    @type show_type: bool
+    @param show_type: if True, prepend the file's type to the formatted string
+    @type show_md5: bool
+    @param show_md5: if True, append MD5 sum to the formatted string
+    @type show_timestamp: bool
+    @param show_timestamp: if True, append time-of-creation after pathname
+    @rtype: str
+    @return: formatted pathname with optional added information
+    """
+
+    ftype = fpath = stamp = md5sum = ""
+
+    if fdesc[0] == "obj":
+        ftype = "file"
+        fpath = path
+        stamp = format_timestamp(fdesc[1])
+        md5sum = fdesc[2]
+    elif fdesc[0] == "dir":
+        ftype = "dir"
+        fpath = pp.path(path)
+    elif fdesc[0] == "sym":
+        ftype = "sym"
+        stamp = format_timestamp(fdesc[1])
+        tgt = fdesc[2].split()[0]
+        if CONFIG["piping"]:
+            fpath = path
+        else:
+            fpath = pp.path_symlink(path + " -> " + tgt)
+    elif fdesc[0] == "dev":
+        ftype = "dev"
+        fpath = path
+    elif fdesc[0] == "fif":
+        ftype = "fifo"
+        fpath = path
+    else:
+        sys.stderr.write(pp.error("%s has unknown type: %s" % (path, fdesc[0])))
+
+    result = ""
+    if show_type:
+        result += "%4s " % ftype
+    result += fpath
+    if show_timestamp:
+        result += "  " + stamp
+    if show_md5:
+        result += "  " + md5sum
+
+    return result
 
 
 def format_timestamp(timestamp):
-	"""Format a timestamp into, e.g., '2009-01-31 21:19:44' format"""
+    """Format a timestamp into, e.g., '2009-01-31 21:19:44' format"""
 
-	return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
+    return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
 
 
 def initialize_configuration():
-	"""Setup the standard equery config"""
+    """Setup the standard equery config"""
 
-	# Get terminal size
-	term_width = pp.output.get_term_size()[1]
-	if term_width < 1:
-		# get_term_size() failed. Set a sane default width:
-		term_width = 80
+    # Get terminal size
+    term_width = pp.output.get_term_size()[1]
+    if term_width < 1:
+        # get_term_size() failed. Set a sane default width:
+        term_width = 80
 
-	# Terminal size, minus a 1-char margin for text wrapping
-	CONFIG['termWidth'] = term_width - 1
+    # Terminal size, minus a 1-char margin for text wrapping
+    CONFIG["termWidth"] = term_width - 1
 
-	# Guess color output
-	if (CONFIG['color'] == -1 and os.getenv("NOCOLOR") in ("yes", "true") or
-		CONFIG['color'] == 0):
-		pp.output.nocolor()
+    # Guess color output
+    if (
+        CONFIG["color"] == -1
+        and os.getenv("NOCOLOR") in ("yes", "true")
+        or CONFIG["color"] == 0
+    ):
+        pp.output.nocolor()
 
-	if CONFIG['piping']:
-		CONFIG['verbose'] = False
-		# set extra wide, should disable wrapping unless
-		# there is some extra long text
-		CONFIG['termWidth'] = 600
+    if CONFIG["piping"]:
+        CONFIG["verbose"] = False
+        # set extra wide, should disable wrapping unless
+        # there is some extra long text
+        CONFIG["termWidth"] = 600
 
-	CONFIG['debug'] = bool(os.getenv('DEBUG', False))
+    CONFIG["debug"] = bool(os.getenv("DEBUG", False))
 
 
 def main_usage():
-	"""Return the main usage message for equery"""
+    """Return the main usage message for equery"""
 
-	return "%(usage)s %(product)s [%(g_opts)s] %(mod_name)s [%(mod_opts)s]" % {
-		'usage': pp.emph("Usage:"),
-		'product': pp.productname(__productname__),
-		'g_opts': pp.globaloption("global-options"),
-		'mod_name': pp.command("module-name"),
-		'mod_opts': pp.localoption("module-options")
-	}
+    return "%(usage)s %(product)s [%(g_opts)s] %(mod_name)s [%(mod_opts)s]" % {
+        "usage": pp.emph("Usage:"),
+        "product": pp.productname(__productname__),
+        "g_opts": pp.globaloption("global-options"),
+        "mod_name": pp.command("module-name"),
+        "mod_opts": pp.localoption("module-options"),
+    }
 
 
 def mod_usage(mod_name="module", arg="pkgspec", optional=False):
-	"""Provide a consistent usage message to the calling module.
+    """Provide a consistent usage message to the calling module.
 
-	@type arg: string
-	@param arg: what kind of argument the module takes (pkgspec, filename, etc)
-	@type optional: bool
-	@param optional: is the argument optional?
-	"""
+    @type arg: string
+    @param arg: what kind of argument the module takes (pkgspec, filename, etc)
+    @type optional: bool
+    @param optional: is the argument optional?
+    """
 
-	return "%(usage)s: %(mod_name)s [%(opts)s] %(arg)s" % {
-		'usage': pp.emph("Usage"),
-		'mod_name': pp.command(mod_name),
-		'opts': pp.localoption("options"),
-		'arg': ("[%s]" % pp.emph(arg)) if optional else pp.emph(arg)
-	}
+    return "%(usage)s: %(mod_name)s [%(opts)s] %(arg)s" % {
+        "usage": pp.emph("Usage"),
+        "mod_name": pp.command(mod_name),
+        "opts": pp.localoption("options"),
+        "arg": ("[%s]" % pp.emph(arg)) if optional else pp.emph(arg),
+    }
 
 
 def parse_global_options(global_opts, args):
-	"""Parse global input args and return True if we should display help for
-	the called module, else False (or display help and exit from here).
-	"""
-
-	need_help = False
-	do_help = False
-	opts = (opt[0] for opt in global_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			if args:
-				need_help = True
-			else:
-				do_help = True
-		elif opt in ('-q','--quiet'):
-			CONFIG['quiet'] = True
-		elif opt in ('-C', '--no-color', '--nocolor'):
-			CONFIG['color'] = 0
-			pp.output.nocolor()
-		elif opt in ('-N', '--no-pipe'):
-			CONFIG['piping'] = False
-		elif opt in ('-V', '--version'):
-			print_version()
-			sys.exit(0)
-		elif opt in ('--debug'):
-			CONFIG['debug'] = True
-	if do_help:
-		print_help()
-		sys.exit(0)
-	return need_help
+    """Parse global input args and return True if we should display help for
+    the called module, else False (or display help and exit from here).
+    """
+
+    need_help = False
+    do_help = False
+    opts = (opt[0] for opt in global_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            if args:
+                need_help = True
+            else:
+                do_help = True
+        elif opt in ("-q", "--quiet"):
+            CONFIG["quiet"] = True
+        elif opt in ("-C", "--no-color", "--nocolor"):
+            CONFIG["color"] = 0
+            pp.output.nocolor()
+        elif opt in ("-N", "--no-pipe"):
+            CONFIG["piping"] = False
+        elif opt in ("-V", "--version"):
+            print_version()
+            sys.exit(0)
+        elif opt in ("--debug"):
+            CONFIG["debug"] = True
+    if do_help:
+        print_help()
+        sys.exit(0)
+    return need_help
 
 
 def print_version():
-	"""Print the version of this tool to the console."""
+    """Print the version of this tool to the console."""
 
-	print("%(product)s (%(version)s) - %(docstring)s" % {
-		"product": pp.productname(__productname__),
-		"version": __version__,
-		"docstring": __doc__
-	})
+    print(
+        "%(product)s (%(version)s) - %(docstring)s"
+        % {
+            "product": pp.productname(__productname__),
+            "version": __version__,
+            "docstring": __doc__,
+        }
+    )
 
 
 def split_arguments(args):
-	"""Separate module name from module arguments"""
+    """Separate module name from module arguments"""
 
-	return args.pop(0), args
+    return args.pop(0), args
 
 
 def main(argv):
-	"""Parse input and run the program."""
-
-	short_opts = "hqCNV"
-	long_opts = (
-		'help', 'quiet', 'nocolor', 'no-color', 'no-pipe', 'version', 'debug'
-	)
-
-	initialize_configuration()
-
-	try:
-		global_opts, args = getopt(argv[1:], short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Global %s" % err))
-		print_help(with_description=False)
-		sys.exit(2)
-
-	# Parse global options
-	need_help = parse_global_options(global_opts, args)
-
-	# verbose is shorthand for the very common 'not quiet or piping'
-	if CONFIG['quiet'] or CONFIG['piping']:
-		CONFIG['verbose'] = False
-	else:
-		CONFIG['verbose'] = True
-
-	try:
-		module_name, module_args = split_arguments(args)
-	except IndexError:
-		print_help()
-		sys.exit(2)
-
-	if need_help:
-		module_args.append('--help')
-
-	try:
-		expanded_module_name = expand_module_name(module_name)
-	except KeyError:
-		sys.stderr.write(pp.error("Unknown module '%s'" % module_name))
-		print_help(with_description=False)
-		sys.exit(2)
-
-	try:
-		loaded_module = __import__(
-			expanded_module_name, globals(), locals(), [], 1
-		)
-		loaded_module.main(module_args)
-	except portage.exception.AmbiguousPackageName as err:
-		raise errors.GentoolkitAmbiguousPackage(err.args[0])
-	except IOError as err:
-		if err.errno != errno.EPIPE:
-			raise
-
-if __name__ == '__main__':
-	main(sys.argv)
+    """Parse input and run the program."""
+
+    short_opts = "hqCNV"
+    long_opts = ("help", "quiet", "nocolor", "no-color", "no-pipe", "version", "debug")
+
+    initialize_configuration()
+
+    try:
+        global_opts, args = getopt(argv[1:], short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Global %s" % err))
+        print_help(with_description=False)
+        sys.exit(2)
+
+    # Parse global options
+    need_help = parse_global_options(global_opts, args)
+
+    # verbose is shorthand for the very common 'not quiet or piping'
+    if CONFIG["quiet"] or CONFIG["piping"]:
+        CONFIG["verbose"] = False
+    else:
+        CONFIG["verbose"] = True
+
+    try:
+        module_name, module_args = split_arguments(args)
+    except IndexError:
+        print_help()
+        sys.exit(2)
+
+    if need_help:
+        module_args.append("--help")
+
+    try:
+        expanded_module_name = expand_module_name(module_name)
+    except KeyError:
+        sys.stderr.write(pp.error("Unknown module '%s'" % module_name))
+        print_help(with_description=False)
+        sys.exit(2)
+
+    try:
+        loaded_module = __import__(expanded_module_name, globals(), locals(), [], 1)
+        loaded_module.main(module_args)
+    except portage.exception.AmbiguousPackageName as err:
+        raise errors.GentoolkitAmbiguousPackage(err.args[0])
+    except IOError as err:
+        if err.errno != errno.EPIPE:
+            raise
+
+
+if __name__ == "__main__":
+    main(sys.argv)

diff --git a/pym/gentoolkit/equery/belongs.py b/pym/gentoolkit/equery/belongs.py
index 1c9ab13..d62ff9a 100644
--- a/pym/gentoolkit/equery/belongs.py
+++ b/pym/gentoolkit/equery/belongs.py
@@ -8,7 +8,7 @@ Note: Normally, only one package will own a file. If multiple packages own
       the same file, it usually constitutes a problem, and should be reported.
 """
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -18,137 +18,137 @@ import sys
 from getopt import gnu_getopt, GetoptError
 
 import gentoolkit.pprinter as pp
-from gentoolkit.equery import (format_filetype, format_options, mod_usage,
-	CONFIG)
+from gentoolkit.equery import format_filetype, format_options, mod_usage, CONFIG
 from gentoolkit.helpers import FileOwner
 
 # =======
 # Globals
 # =======
 
-QUERY_OPTS = {
-	"full_regex": False,
-	"early_out": False,
-	"name_only": False
-}
+QUERY_OPTS = {"full_regex": False, "early_out": False, "name_only": False}
 
 # =======
 # Classes
 # =======
 
+
 class BelongsPrinter:
-	"""Outputs a formatted list of packages that claim to own a files."""
-
-	def __init__(self, verbose=True, name_only=False):
-		if verbose:
-			self.print_fn = self.print_verbose
-		else:
-			self.print_fn = self.print_quiet
-
-		self.name_only = name_only
-
-	def __call__(self, pkg, cfile):
-		self.print_fn(pkg, cfile)
-
-	def print_quiet(self, pkg, cfile):
-		"Format for minimal output."
-		if self.name_only:
-			name = pkg.cp
-		else:
-			name = str(pkg.cpv)
-		pp.uprint(name)
-
-	def print_verbose(self, pkg, cfile):
-		"Format for full output."
-		file_str = pp.path(format_filetype(cfile, pkg.parsed_contents()[cfile]))
-		if self.name_only:
-			name = pkg.cp
-		else:
-			name = str(pkg.cpv)
-		pp.uprint(pp.cpv(name), "(" + file_str + ")")
+    """Outputs a formatted list of packages that claim to own a files."""
+
+    def __init__(self, verbose=True, name_only=False):
+        if verbose:
+            self.print_fn = self.print_verbose
+        else:
+            self.print_fn = self.print_quiet
+
+        self.name_only = name_only
+
+    def __call__(self, pkg, cfile):
+        self.print_fn(pkg, cfile)
+
+    def print_quiet(self, pkg, cfile):
+        "Format for minimal output."
+        if self.name_only:
+            name = pkg.cp
+        else:
+            name = str(pkg.cpv)
+        pp.uprint(name)
+
+    def print_verbose(self, pkg, cfile):
+        "Format for full output."
+        file_str = pp.path(format_filetype(cfile, pkg.parsed_contents()[cfile]))
+        if self.name_only:
+            name = pkg.cp
+        else:
+            name = str(pkg.cpv)
+        pp.uprint(pp.cpv(name), "(" + file_str + ")")
+
 
 # =========
 # Functions
 # =========
 
+
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h','--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-e', '--early-out', '--earlyout'):
-			if opt == '--earlyout':
-				sys.stderr.write(pp.warn("Use of --earlyout is deprecated."))
-				sys.stderr.write(pp.warn("Please use --early-out."))
-				print()
-			QUERY_OPTS['early_out'] = True
-		elif opt in ('-f', '--full-regex'):
-			QUERY_OPTS['full_regex'] = True
-		elif opt in ('-n', '--name-only'):
-			QUERY_OPTS['name_only'] = True
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-e", "--early-out", "--earlyout"):
+            if opt == "--earlyout":
+                sys.stderr.write(pp.warn("Use of --earlyout is deprecated."))
+                sys.stderr.write(pp.warn("Please use --early-out."))
+                print()
+            QUERY_OPTS["early_out"] = True
+        elif opt in ("-f", "--full-regex"):
+            QUERY_OPTS["full_regex"] = True
+        elif opt in ("-n", "--name-only"):
+            QUERY_OPTS["name_only"] = True
 
 
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="belongs", arg="filename"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -f, --full-regex", "supplied query is a regex" ),
-		(" -e, --early-out", "stop when first match is found"),
-		(" -n, --name-only", "don't print the version")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="belongs", arg="filename"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -f, --full-regex", "supplied query is a regex"),
+                (" -e, --early-out", "stop when first match is found"),
+                (" -n, --name-only", "don't print the version"),
+            )
+        )
+    )
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "h:fen"
-	long_opts = ('help', 'full-regex', 'early-out', 'earlyout',
-		'name-only')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	if CONFIG['verbose']:
-		pp.uprint(" * Searching for %s ... " % (
-			pp.regexpquery(",".join(queries)))
-		)
-
-	printer_fn = BelongsPrinter(
-		verbose=CONFIG['verbose'], name_only=QUERY_OPTS['name_only']
-	)
-
-	find_owner = FileOwner(
-		is_regex=QUERY_OPTS['full_regex'],
-		early_out=QUERY_OPTS['early_out'],
-		printer_fn=printer_fn
-	)
-
-	if not find_owner(queries):
-		sys.exit(1)
+    """Parse input and run the program"""
+
+    short_opts = "h:fen"
+    long_opts = ("help", "full-regex", "early-out", "earlyout", "name-only")
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    if CONFIG["verbose"]:
+        pp.uprint(" * Searching for %s ... " % (pp.regexpquery(",".join(queries))))
+
+    printer_fn = BelongsPrinter(
+        verbose=CONFIG["verbose"], name_only=QUERY_OPTS["name_only"]
+    )
+
+    find_owner = FileOwner(
+        is_regex=QUERY_OPTS["full_regex"],
+        early_out=QUERY_OPTS["early_out"],
+        printer_fn=printer_fn,
+    )
+
+    if not find_owner(queries):
+        sys.exit(1)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/check.py b/pym/gentoolkit/equery/check.py
index 47eba98..7a7c3db 100644
--- a/pym/gentoolkit/equery/check.py
+++ b/pym/gentoolkit/equery/check.py
@@ -4,7 +4,7 @@
 
 """Checks timestamps and MD5 sums for files owned by a given installed package"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -27,262 +27,265 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"check_MD5sum": True,
-	"check_timestamp" : True,
-	"is_regex": False,
-	"only_failures": False,
-	"show_progress": False,
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "check_MD5sum": True,
+    "check_timestamp": True,
+    "is_regex": False,
+    "only_failures": False,
+    "show_progress": False,
 }
 
 # =======
 # Classes
 # =======
 
+
 class VerifyContents:
-	"""Verify installed packages' CONTENTS files.
-
-	The CONTENTS file contains timestamps and MD5 sums for each file owned
-	by a package.
-	"""
-	def __init__(self, printer_fn=None):
-		"""Create a VerifyObjects instance.
-
-		@type printer_fn: callable
-		@param printer_fn: if defined, will be applied to each result as found
-		"""
-		self.check_sums = True
-		self.check_timestamps = True
-		self.printer_fn = printer_fn
-
-		self.is_regex = False
-
-	def __call__(
-		self,
-		pkgs,
-		is_regex=False,
-		check_sums=True,
-		check_timestamps=True
-	):
-		self.is_regex = is_regex
-		self.check_sums = check_sums
-		self.check_timestamps = check_timestamps
-
-		result = {}
-		for pkg in pkgs:
-			# _run_checks returns tuple(n_passed, n_checked, err)
-			check_results = self._run_checks(pkg.parsed_contents())
-			result[pkg.cpv] = check_results
-			if self.printer_fn is not None:
-				self.printer_fn(pkg.cpv, check_results)
-
-		return result
-
-	def _run_checks(self, files):
-		"""Run some basic sanity checks on a package's contents.
-
-		If the file type (ftype) is not a directory or symlink, optionally
-		verify MD5 sums or mtimes via L{self._verify_obj}.
-
-		@see: gentoolkit.packages.get_contents()
-		@type files: dict
-		@param files: in form {'PATH': ['TYPE', 'TIMESTAMP', 'MD5SUM']}
-		@rtype: tuple
-		@return:
-			n_passed (int): number of files that passed all checks
-			n_checked (int): number of files checked
-			errs (list): check errors' descriptions
-		"""
-		n_checked = 0
-		n_passed = 0
-		errs = []
-		for cfile in files:
-			n_checked += 1
-			ftype = files[cfile][0]
-			real_cfile = os.environ.get('ROOT', '') + cfile
-			if not os.path.lexists(real_cfile):
-				errs.append("%s does not exist" % cfile)
-				continue
-			elif ftype == "dir":
-				if not os.path.isdir(real_cfile):
-					err = "%(cfile)s exists, but is not a directory"
-					errs.append(err % locals())
-					continue
-			elif ftype == "obj":
-				obj_errs = self._verify_obj(files, cfile, real_cfile, errs)
-				if len(obj_errs) > len(errs):
-					errs = obj_errs[:]
-					continue
-			elif ftype == "sym":
-				target = files[cfile][2].strip()
-				if not os.path.islink(real_cfile):
-					err = "%(cfile)s exists, but is not a symlink"
-					errs.append(err % locals())
-					continue
-				tgt = os.readlink(real_cfile)
-				if tgt != target:
-					err = "%(cfile)s does not point to %(target)s"
-					errs.append(err % locals())
-					continue
-			else:
-				err = "%(cfile)s has unknown type %(ftype)s"
-				errs.append(err % locals())
-				continue
-			n_passed += 1
-
-		return n_passed, n_checked, errs
-
-	def _verify_obj(self, files, cfile, real_cfile, errs):
-		"""Verify the MD5 sum and/or mtime and return any errors."""
-
-		obj_errs = errs[:]
-		if self.check_sums:
-			md5sum = files[cfile][2]
-			try:
-				cur_checksum = checksum.perform_md5(real_cfile, calc_prelink=1)
-			except IOError:
-				err = "Insufficient permissions to read %(cfile)s"
-				obj_errs.append(err % locals())
-				return obj_errs
-			if cur_checksum != md5sum:
-				err = "%(cfile)s has incorrect MD5sum"
-				obj_errs.append(err % locals())
-				return obj_errs
-		if self.check_timestamps:
-			mtime = int(files[cfile][1])
-			st_mtime = int(os.lstat(real_cfile).st_mtime)
-			if st_mtime != mtime:
-				err = (
-					"%(cfile)s has wrong mtime (is %(st_mtime)d, should be "
-					"%(mtime)d)"
-				)
-				obj_errs.append(err % locals())
-				return obj_errs
-
-		return obj_errs
+    """Verify installed packages' CONTENTS files.
+
+    The CONTENTS file contains timestamps and MD5 sums for each file owned
+    by a package.
+    """
+
+    def __init__(self, printer_fn=None):
+        """Create a VerifyObjects instance.
+
+        @type printer_fn: callable
+        @param printer_fn: if defined, will be applied to each result as found
+        """
+        self.check_sums = True
+        self.check_timestamps = True
+        self.printer_fn = printer_fn
+
+        self.is_regex = False
+
+    def __call__(self, pkgs, is_regex=False, check_sums=True, check_timestamps=True):
+        self.is_regex = is_regex
+        self.check_sums = check_sums
+        self.check_timestamps = check_timestamps
+
+        result = {}
+        for pkg in pkgs:
+            # _run_checks returns tuple(n_passed, n_checked, err)
+            check_results = self._run_checks(pkg.parsed_contents())
+            result[pkg.cpv] = check_results
+            if self.printer_fn is not None:
+                self.printer_fn(pkg.cpv, check_results)
+
+        return result
+
+    def _run_checks(self, files):
+        """Run some basic sanity checks on a package's contents.
+
+        If the file type (ftype) is not a directory or symlink, optionally
+        verify MD5 sums or mtimes via L{self._verify_obj}.
+
+        @see: gentoolkit.packages.get_contents()
+        @type files: dict
+        @param files: in form {'PATH': ['TYPE', 'TIMESTAMP', 'MD5SUM']}
+        @rtype: tuple
+        @return:
+                n_passed (int): number of files that passed all checks
+                n_checked (int): number of files checked
+                errs (list): check errors' descriptions
+        """
+        n_checked = 0
+        n_passed = 0
+        errs = []
+        for cfile in files:
+            n_checked += 1
+            ftype = files[cfile][0]
+            real_cfile = os.environ.get("ROOT", "") + cfile
+            if not os.path.lexists(real_cfile):
+                errs.append("%s does not exist" % cfile)
+                continue
+            elif ftype == "dir":
+                if not os.path.isdir(real_cfile):
+                    err = "%(cfile)s exists, but is not a directory"
+                    errs.append(err % locals())
+                    continue
+            elif ftype == "obj":
+                obj_errs = self._verify_obj(files, cfile, real_cfile, errs)
+                if len(obj_errs) > len(errs):
+                    errs = obj_errs[:]
+                    continue
+            elif ftype == "sym":
+                target = files[cfile][2].strip()
+                if not os.path.islink(real_cfile):
+                    err = "%(cfile)s exists, but is not a symlink"
+                    errs.append(err % locals())
+                    continue
+                tgt = os.readlink(real_cfile)
+                if tgt != target:
+                    err = "%(cfile)s does not point to %(target)s"
+                    errs.append(err % locals())
+                    continue
+            else:
+                err = "%(cfile)s has unknown type %(ftype)s"
+                errs.append(err % locals())
+                continue
+            n_passed += 1
+
+        return n_passed, n_checked, errs
+
+    def _verify_obj(self, files, cfile, real_cfile, errs):
+        """Verify the MD5 sum and/or mtime and return any errors."""
+
+        obj_errs = errs[:]
+        if self.check_sums:
+            md5sum = files[cfile][2]
+            try:
+                cur_checksum = checksum.perform_md5(real_cfile, calc_prelink=1)
+            except IOError:
+                err = "Insufficient permissions to read %(cfile)s"
+                obj_errs.append(err % locals())
+                return obj_errs
+            if cur_checksum != md5sum:
+                err = "%(cfile)s has incorrect MD5sum"
+                obj_errs.append(err % locals())
+                return obj_errs
+        if self.check_timestamps:
+            mtime = int(files[cfile][1])
+            st_mtime = int(os.lstat(real_cfile).st_mtime)
+            if st_mtime != mtime:
+                err = (
+                    "%(cfile)s has wrong mtime (is %(st_mtime)d, should be "
+                    "%(mtime)d)"
+                )
+                obj_errs.append(err % locals())
+                return obj_errs
+
+        return obj_errs
+
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-
-	# Deprecation warning added by djanderson, 12/2008
-	depwarning = (
-		"Default action for this module has changed in Gentoolkit 0.3.",
-		"Use globbing to simulate the old behavior (see man equery).",
-		"Use '*' to check all installed packages.",
-		"Use 'foo-bar/*' to filter by category."
-	)
-	for line in depwarning:
-		sys.stderr.write(pp.warn(line))
-	print()
-
-	print(mod_usage(mod_name="check"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -f, --full-regex", "query is a regular expression"),
-		(" -o, --only-failures", "only display packages that do not pass"),
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+
+    # Deprecation warning added by djanderson, 12/2008
+    depwarning = (
+        "Default action for this module has changed in Gentoolkit 0.3.",
+        "Use globbing to simulate the old behavior (see man equery).",
+        "Use '*' to check all installed packages.",
+        "Use 'foo-bar/*' to filter by category.",
+    )
+    for line in depwarning:
+        sys.stderr.write(pp.warn(line))
+    print()
+
+    print(mod_usage(mod_name="check"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -f, --full-regex", "query is a regular expression"),
+                (" -o, --only-failures", "only display packages that do not pass"),
+            )
+        )
+    )
 
 
 def checks_printer(cpv, data, verbose=True, only_failures=False):
-	"""Output formatted results of pkg file(s) checks"""
-	seen = []
-
-	n_passed, n_checked, errs = data
-	n_failed = n_checked - n_passed
-	if only_failures and not n_failed:
-		return
-	else:
-		if verbose:
-			if not cpv in seen:
-				pp.uprint("* Checking %s ..." % (pp.emph(str(cpv))))
-				seen.append(cpv)
-		else:
-			pp.uprint("%s:" % cpv, end=' ')
-
-	if verbose:
-		for err in errs:
-			sys.stderr.write(pp.error(err))
-
-	if verbose:
-		n_passed = pp.number(str(n_passed))
-		n_checked = pp.number(str(n_checked))
-		info = "   %(n_passed)s out of %(n_checked)s files passed"
-		print(info % locals())
-		print()
-	else:
-		print("failed(%s)" % n_failed)
+    """Output formatted results of pkg file(s) checks"""
+    seen = []
+
+    n_passed, n_checked, errs = data
+    n_failed = n_checked - n_passed
+    if only_failures and not n_failed:
+        return
+    else:
+        if verbose:
+            if not cpv in seen:
+                pp.uprint("* Checking %s ..." % (pp.emph(str(cpv))))
+                seen.append(cpv)
+        else:
+            pp.uprint("%s:" % cpv, end=" ")
+
+    if verbose:
+        for err in errs:
+            sys.stderr.write(pp.error(err))
+
+    if verbose:
+        n_passed = pp.number(str(n_passed))
+        n_checked = pp.number(str(n_checked))
+        info = "   %(n_passed)s out of %(n_checked)s files passed"
+        print(info % locals())
+        print()
+    else:
+        print("failed(%s)" % n_failed)
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
+    """Parse module options and update QUERY_OPTS"""
 
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-f', '--full-regex'):
-			QUERY_OPTS['is_regex'] = True
-		elif opt in ('-o', '--only-failures'):
-			QUERY_OPTS['only_failures'] = True
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-f", "--full-regex"):
+            QUERY_OPTS["is_regex"] = True
+        elif opt in ("-o", "--only-failures"):
+            QUERY_OPTS["only_failures"] = True
 
 
 def main(input_args):
-	"""Parse input and run the program"""
+    """Parse input and run the program"""
+
+    short_opts = "hof"
+    long_opts = ("help", "only-failures", "full-regex")
 
-	short_opts = "hof"
-	long_opts = ('help', 'only-failures', 'full-regex')
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
 
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
+    parse_module_options(module_opts)
 
-	parse_module_options(module_opts)
+    if not queries:
+        print_help()
+        sys.exit(2)
 
-	if not queries:
-		print_help()
-		sys.exit(2)
+    first_run = True
+    for query in (Query(x, QUERY_OPTS["is_regex"]) for x in queries):
+        if not first_run:
+            print()
 
-	first_run = True
-	for query in (Query(x, QUERY_OPTS['is_regex']) for x in queries):
-		if not first_run:
-			print()
+        matches = query.smart_find(**QUERY_OPTS)
 
-		matches = query.smart_find(**QUERY_OPTS)
+        if not matches:
+            raise errors.GentoolkitNoMatches(query, in_installed=True)
 
-		if not matches:
-			raise errors.GentoolkitNoMatches(query, in_installed=True)
+        matches.sort()
 
-		matches.sort()
+        printer = partial(
+            checks_printer,
+            verbose=CONFIG["verbose"],
+            only_failures=QUERY_OPTS["only_failures"],
+        )
+        check = VerifyContents(printer_fn=printer)
+        check(matches)
 
-		printer = partial(
-			checks_printer,
-			verbose=CONFIG['verbose'],
-			only_failures=QUERY_OPTS['only_failures']
-		)
-		check = VerifyContents(printer_fn=printer)
-		check(matches)
+        first_run = False
 
-		first_run = False
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/depends.py b/pym/gentoolkit/equery/depends.py
index 74c6933..581e2b6 100644
--- a/pym/gentoolkit/equery/depends.py
+++ b/pym/gentoolkit/equery/depends.py
@@ -4,7 +4,7 @@
 
 """List all packages that depend on a atom given query"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -24,181 +24,190 @@ from gentoolkit.cpv import CPV
 # =======
 
 QUERY_OPTS = {
-	"include_masked": False,
-	"only_direct": True,
-	"max_depth": -1,
+    "include_masked": False,
+    "only_direct": True,
+    "max_depth": -1,
 }
 
 # =======
 # Classes
 # =======
 
+
 class DependPrinter:
-	"""Output L{gentoolkit.dependencies.Dependencies} objects."""
-	def __init__(self, verbose=True):
-		self.verbose = verbose
-
-		if verbose:
-			self.print_fn = self.print_verbose
-		else:
-			self.print_fn = self.print_quiet
-
-	def __call__(self, dep, dep_is_displayed=False):
-		self.format_depend(dep, dep_is_displayed)
-
-	@staticmethod
-	def print_verbose(indent, cpv, use_conditional, depatom):
-		"""Verbosely prints a set of dep strings."""
-
-		sep = ' ? ' if (depatom and use_conditional) else ''
-		pp.uprint(indent + pp.cpv(cpv), "(" + use_conditional +
-			sep + depatom + ")")
-
-	@staticmethod
-	def print_quiet(indent, cpv, use_conditional, depatom):
-		"""Quietly prints a subset set of dep strings."""
-
-		pp.uprint(indent + cpv)
-
-	def format_depend(self, dep, dep_is_displayed):
-		"""Format a dependency for printing.
-
-		@type dep: L{gentoolkit.dependencies.Dependencies}
-		@param dep: the dependency to display
-		"""
-
-		# Don't print blank lines
-		if dep_is_displayed and not self.verbose:
-			return
-
-		depth = getattr(dep, 'depth', 0)
-		indent = " " * depth
-		mdep = dep.matching_dep
-		use_conditional = ""
-		if mdep.use_conditional:
-			use_conditional = " & ".join(
-				pp.useflag(u) for u in mdep.use_conditional.split()
-			)
-		if mdep.operator == '=*':
-			formatted_dep = '=%s*' % str(mdep.cpv)
-		else:
-			formatted_dep = mdep.operator + str(mdep.cpv)
-		if mdep.slot:
-			formatted_dep += pp.emph(':') + pp.slot(mdep.slot)
-			if mdep.sub_slot:
-				formatted_dep += pp.slot('/') + pp.slot(mdep.sub_slot)
-		if mdep.use:
-			useflags = pp.useflag(','.join(mdep.use.tokens))
-			formatted_dep += (pp.emph('[') + useflags + pp.emph(']'))
-
-		if dep_is_displayed:
-			indent = indent + " " * len(str(dep.cpv))
-			self.print_fn(indent, '', use_conditional, formatted_dep)
-		else:
-			self.print_fn(indent, str(dep.cpv), use_conditional, formatted_dep)
+    """Output L{gentoolkit.dependencies.Dependencies} objects."""
+
+    def __init__(self, verbose=True):
+        self.verbose = verbose
+
+        if verbose:
+            self.print_fn = self.print_verbose
+        else:
+            self.print_fn = self.print_quiet
+
+    def __call__(self, dep, dep_is_displayed=False):
+        self.format_depend(dep, dep_is_displayed)
+
+    @staticmethod
+    def print_verbose(indent, cpv, use_conditional, depatom):
+        """Verbosely prints a set of dep strings."""
+
+        sep = " ? " if (depatom and use_conditional) else ""
+        pp.uprint(indent + pp.cpv(cpv), "(" + use_conditional + sep + depatom + ")")
+
+    @staticmethod
+    def print_quiet(indent, cpv, use_conditional, depatom):
+        """Quietly prints a subset set of dep strings."""
+
+        pp.uprint(indent + cpv)
+
+    def format_depend(self, dep, dep_is_displayed):
+        """Format a dependency for printing.
+
+        @type dep: L{gentoolkit.dependencies.Dependencies}
+        @param dep: the dependency to display
+        """
+
+        # Don't print blank lines
+        if dep_is_displayed and not self.verbose:
+            return
+
+        depth = getattr(dep, "depth", 0)
+        indent = " " * depth
+        mdep = dep.matching_dep
+        use_conditional = ""
+        if mdep.use_conditional:
+            use_conditional = " & ".join(
+                pp.useflag(u) for u in mdep.use_conditional.split()
+            )
+        if mdep.operator == "=*":
+            formatted_dep = "=%s*" % str(mdep.cpv)
+        else:
+            formatted_dep = mdep.operator + str(mdep.cpv)
+        if mdep.slot:
+            formatted_dep += pp.emph(":") + pp.slot(mdep.slot)
+            if mdep.sub_slot:
+                formatted_dep += pp.slot("/") + pp.slot(mdep.sub_slot)
+        if mdep.use:
+            useflags = pp.useflag(",".join(mdep.use.tokens))
+            formatted_dep += pp.emph("[") + useflags + pp.emph("]")
+
+        if dep_is_displayed:
+            indent = indent + " " * len(str(dep.cpv))
+            self.print_fn(indent, "", use_conditional, formatted_dep)
+        else:
+            self.print_fn(indent, str(dep.cpv), use_conditional, formatted_dep)
+
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="depends"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -a, --all-packages",
-			"include dependencies that are not installed (slow)"),
-		(" -D, --indirect",
-			"search both direct and indirect dependencies"),
-		("     --depth=N", "limit indirect dependency tree to specified depth")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="depends"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (
+                    " -a, --all-packages",
+                    "include dependencies that are not installed (slow)",
+                ),
+                (" -D, --indirect", "search both direct and indirect dependencies"),
+                ("     --depth=N", "limit indirect dependency tree to specified depth"),
+            )
+        )
+    )
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-a', '--all-packages'):
-			QUERY_OPTS['include_masked'] = True
-		elif opt in ('-D', '--indirect'):
-			QUERY_OPTS['only_direct'] = False
-		elif opt in ('--depth'):
-			if posarg.isdigit():
-				depth = int(posarg)
-			else:
-				err = "Module option --depth requires integer (got '%s')"
-				sys.stdout.write(pp.error(err % posarg))
-				print()
-				print_help(with_description=False)
-				sys.exit(2)
-			QUERY_OPTS["max_depth"] = depth
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-a", "--all-packages"):
+            QUERY_OPTS["include_masked"] = True
+        elif opt in ("-D", "--indirect"):
+            QUERY_OPTS["only_direct"] = False
+        elif opt in ("--depth"):
+            if posarg.isdigit():
+                depth = int(posarg)
+            else:
+                err = "Module option --depth requires integer (got '%s')"
+                sys.stdout.write(pp.error(err % posarg))
+                print()
+                print_help(with_description=False)
+                sys.exit(2)
+            QUERY_OPTS["max_depth"] = depth
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-	short_opts = "hadD" # -d, --direct was old option for default action
-	long_opts = ('help', 'all-packages', 'direct', 'indirect', 'depth=')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	#
-	# Output
-	#
-
-	dep_print = DependPrinter(verbose=CONFIG['verbose'])
-
-	first_run = True
-	got_match = False
-	for query in queries:
-		if not first_run:
-			print()
-
-		pkg = Dependencies(query)
-		if QUERY_OPTS['include_masked']:
-			pkggetter = get_cpvs
-		else:
-			pkggetter = get_installed_cpvs
-
-		if CONFIG['verbose']:
-			print(" * These packages depend on %s:" % pp.emph(pkg.cpv))
-		if pkg.graph_reverse_depends(
-			pkgset=sorted(pkggetter(), key=CPV),
-			max_depth=QUERY_OPTS["max_depth"],
-			only_direct=QUERY_OPTS["only_direct"],
-			printer_fn=dep_print
-		):
-			got_match = True
-
-		first_run = False
-
-	if not got_match:
-		sys.exit(1)
+    """Parse input and run the program"""
+    short_opts = "hadD"  # -d, --direct was old option for default action
+    long_opts = ("help", "all-packages", "direct", "indirect", "depth=")
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    #
+    # Output
+    #
+
+    dep_print = DependPrinter(verbose=CONFIG["verbose"])
+
+    first_run = True
+    got_match = False
+    for query in queries:
+        if not first_run:
+            print()
+
+        pkg = Dependencies(query)
+        if QUERY_OPTS["include_masked"]:
+            pkggetter = get_cpvs
+        else:
+            pkggetter = get_installed_cpvs
+
+        if CONFIG["verbose"]:
+            print(" * These packages depend on %s:" % pp.emph(pkg.cpv))
+        if pkg.graph_reverse_depends(
+            pkgset=sorted(pkggetter(), key=CPV),
+            max_depth=QUERY_OPTS["max_depth"],
+            only_direct=QUERY_OPTS["only_direct"],
+            printer_fn=dep_print,
+        ):
+            got_match = True
+
+        first_run = False
+
+    if not got_match:
+        sys.exit(1)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/depgraph.py b/pym/gentoolkit/equery/depgraph.py
index 4ee37ee..9003a47 100644
--- a/pym/gentoolkit/equery/depgraph.py
+++ b/pym/gentoolkit/equery/depgraph.py
@@ -4,7 +4,7 @@
 
 """Display a direct dependency graph for a given package"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -27,222 +27,229 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"depth": 1,
-	"no_atom": False,
-	"no_indent": False,
-	"no_useflags": False,
-	"no_mask": False,
-	"in_installed": True,
-	"in_porttree": True,
-	"in_overlay": True,
-	"include_masked": True,
-	"show_progress": (not CONFIG['quiet'])
+    "depth": 1,
+    "no_atom": False,
+    "no_indent": False,
+    "no_useflags": False,
+    "no_mask": False,
+    "in_installed": True,
+    "in_porttree": True,
+    "in_overlay": True,
+    "include_masked": True,
+    "show_progress": (not CONFIG["quiet"]),
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print("Default depth is set to 1 (direct only). Use --depth=0 for no max.")
-	print()
-	print(mod_usage(mod_name="depgraph"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -A, --no-atom", "do not show dependency atom"),
-		(" -M, --no-mask", "do not show masking status"),
-		(" -U, --no-useflags", "do not show USE flags"),
-		(" -l, --linear", "do not format the graph by indenting dependencies"),
-		("     --depth=N", "limit dependency graph to specified depth")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print("Default depth is set to 1 (direct only). Use --depth=0 for no max.")
+    print()
+    print(mod_usage(mod_name="depgraph"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -A, --no-atom", "do not show dependency atom"),
+                (" -M, --no-mask", "do not show masking status"),
+                (" -U, --no-useflags", "do not show USE flags"),
+                (" -l, --linear", "do not format the graph by indenting dependencies"),
+                ("     --depth=N", "limit dependency graph to specified depth"),
+            )
+        )
+    )
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		if opt in ('-A', '--no-atom'):
-			QUERY_OPTS["no_atom"] = True
-		if opt in ('-U', '--no-useflags'):
-			QUERY_OPTS["no_useflags"] = True
-		if opt in ('-M', '--no-mask'):
-			QUERY_OPTS["no_mask"] = True
-		if opt in ('-l', '--linear'):
-			QUERY_OPTS["no_indent"] = True
-		if opt in ('--depth'):
-			if posarg.isdigit():
-				depth = int(posarg)
-			else:
-				err = "Module option --depth requires integer (got '%s')"
-				sys.stderr.write(pp.error(err % posarg))
-				print()
-				print_help(with_description=False)
-				sys.exit(2)
-			QUERY_OPTS["depth"] = depth
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        if opt in ("-A", "--no-atom"):
+            QUERY_OPTS["no_atom"] = True
+        if opt in ("-U", "--no-useflags"):
+            QUERY_OPTS["no_useflags"] = True
+        if opt in ("-M", "--no-mask"):
+            QUERY_OPTS["no_mask"] = True
+        if opt in ("-l", "--linear"):
+            QUERY_OPTS["no_indent"] = True
+        if opt in ("--depth"):
+            if posarg.isdigit():
+                depth = int(posarg)
+            else:
+                err = "Module option --depth requires integer (got '%s')"
+                sys.stderr.write(pp.error(err % posarg))
+                print()
+                print_help(with_description=False)
+                sys.exit(2)
+            QUERY_OPTS["depth"] = depth
 
 
 def depgraph_printer(
-	depth,
-	pkg,
-	dep,
-	no_use=False,
-	no_atom=False,
-	no_indent=False,
-	initial_pkg=False,
-	no_mask=False
+    depth,
+    pkg,
+    dep,
+    no_use=False,
+    no_atom=False,
+    no_indent=False,
+    initial_pkg=False,
+    no_mask=False,
 ):
-	"""Display L{gentoolkit.dependencies.Dependencies.graph_depends} results.
-
-	@type depth: int
-	@param depth: depth of indirection, used to calculate indent
-	@type pkg: L{gentoolkit.package.Package}
-	@param pkg: "best match" package matched by B{dep}
-	@type dep: L{gentoolkit.atom.Atom}
-	@param dep: dependency that matched B{pkg}
-	@type no_use: bool
-	@param no_use: don't output USE flags
-	@type no_atom: bool
-	@param no_atom: don't output dep atom
-	@type no_indent: bool
-	@param no_indent: don't output indent based on B{depth}
-	@type initial_pkg: bool
-	@param initial_pkg: somewhat of a hack used to print the root package of
-		the graph with absolutely no indent
-	"""
-	indent = '' if no_indent or initial_pkg else ' ' + (' ' * depth)
-	decorator = '[%3d] ' % depth if no_indent else '`-- '
-	use = ''
-	atom = ''
-	mask = ''
-	try:
-		if not no_atom:
-			if dep.operator == '=*':
-				atom += ' (=%s*)' % dep.cpv
-			else:
-				atom += ' (%s%s)' % (dep.operator, dep.cpv)
-		if not no_use and dep is not None and dep.use:
-			use = ' [%s]' % ' '.join(
-				pp.useflag(x, enabled=True) for x in dep.use.tokens
-			)
-	except AttributeError:
-		# 'NoneType' object has no attribute 'atom'
-		pass
-	if pkg and not no_mask:
-		mask = pkg.mask_status()
-		if not mask:
-			mask = [determine_keyword(portage.settings["ARCH"],
-				portage.settings["ACCEPT_KEYWORDS"],
-				pkg.environment('KEYWORDS'))]
-		mask = pp.masking(mask)
-	try:
-		pp.uprint(' '.join(
-			(indent, decorator, pp.cpv(str(pkg.cpv)), atom, mask, use)
-			))
-	except AttributeError:
-		# 'NoneType' object has no attribute 'cpv'
-		pp.uprint(''.join((indent, decorator, "(no match for %r)" % dep.atom)))
+    """Display L{gentoolkit.dependencies.Dependencies.graph_depends} results.
+
+    @type depth: int
+    @param depth: depth of indirection, used to calculate indent
+    @type pkg: L{gentoolkit.package.Package}
+    @param pkg: "best match" package matched by B{dep}
+    @type dep: L{gentoolkit.atom.Atom}
+    @param dep: dependency that matched B{pkg}
+    @type no_use: bool
+    @param no_use: don't output USE flags
+    @type no_atom: bool
+    @param no_atom: don't output dep atom
+    @type no_indent: bool
+    @param no_indent: don't output indent based on B{depth}
+    @type initial_pkg: bool
+    @param initial_pkg: somewhat of a hack used to print the root package of
+            the graph with absolutely no indent
+    """
+    indent = "" if no_indent or initial_pkg else " " + (" " * depth)
+    decorator = "[%3d] " % depth if no_indent else "`-- "
+    use = ""
+    atom = ""
+    mask = ""
+    try:
+        if not no_atom:
+            if dep.operator == "=*":
+                atom += " (=%s*)" % dep.cpv
+            else:
+                atom += " (%s%s)" % (dep.operator, dep.cpv)
+        if not no_use and dep is not None and dep.use:
+            use = " [%s]" % " ".join(
+                pp.useflag(x, enabled=True) for x in dep.use.tokens
+            )
+    except AttributeError:
+        # 'NoneType' object has no attribute 'atom'
+        pass
+    if pkg and not no_mask:
+        mask = pkg.mask_status()
+        if not mask:
+            mask = [
+                determine_keyword(
+                    portage.settings["ARCH"],
+                    portage.settings["ACCEPT_KEYWORDS"],
+                    pkg.environment("KEYWORDS"),
+                )
+            ]
+        mask = pp.masking(mask)
+    try:
+        pp.uprint(" ".join((indent, decorator, pp.cpv(str(pkg.cpv)), atom, mask, use)))
+    except AttributeError:
+        # 'NoneType' object has no attribute 'cpv'
+        pp.uprint("".join((indent, decorator, "(no match for %r)" % dep.atom)))
 
 
 def make_depgraph(pkg, printer_fn):
-	"""Create and display depgraph for each package."""
+    """Create and display depgraph for each package."""
 
-	print()
-	if CONFIG['verbose']:
-		pp.uprint(" * " + pp.subsection("dependency graph for ") +
-			pp.cpv(str(pkg.cpv)))
-	else:
-		pp.uprint("%s:" % pkg.cpv)
+    print()
+    if CONFIG["verbose"]:
+        pp.uprint(" * " + pp.subsection("dependency graph for ") + pp.cpv(str(pkg.cpv)))
+    else:
+        pp.uprint("%s:" % pkg.cpv)
 
-	# Print out the first package
-	printer_fn(0, pkg, None, initial_pkg=True)
+    # Print out the first package
+    printer_fn(0, pkg, None, initial_pkg=True)
 
-	deps = pkg.deps.graph_depends(
-		max_depth=QUERY_OPTS['depth'],
-		printer_fn=printer_fn,
-		# Use this to set this pkg as the graph's root; better way?
-		result=[(0, pkg)]
-	)
+    deps = pkg.deps.graph_depends(
+        max_depth=QUERY_OPTS["depth"],
+        printer_fn=printer_fn,
+        # Use this to set this pkg as the graph's root; better way?
+        result=[(0, pkg)],
+    )
 
-	if CONFIG['verbose']:
-		pkgname = pp.cpv(str(pkg.cpv))
-		n_packages = pp.number(str(len(deps)))
-		max_seen = pp.number(str(max(x[0] for x in deps)))
-		info = "[ %s stats: packages (%s), max depth (%s) ]"
-		pp.uprint(info % (pkgname, n_packages, max_seen))
+    if CONFIG["verbose"]:
+        pkgname = pp.cpv(str(pkg.cpv))
+        n_packages = pp.number(str(len(deps)))
+        max_seen = pp.number(str(max(x[0] for x in deps)))
+        info = "[ %s stats: packages (%s), max depth (%s) ]"
+        pp.uprint(info % (pkgname, n_packages, max_seen))
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hAMUl"
-	long_opts = ('help', 'no-atom', 'no-useflags', 'no-mask', 'depth=')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	#
-	# Output
-	#
-
-	first_run = True
-	for query in (Query(x) for x in queries):
-		if not first_run:
-			print()
-
-		matches = query.smart_find(**QUERY_OPTS)
-
-		if not matches:
-			raise errors.GentoolkitNoMatches(query)
-
-		matches.sort()
-
-		if CONFIG['verbose']:
-			printer = partial(
-				depgraph_printer,
-				no_atom=QUERY_OPTS['no_atom'],
-				no_indent=QUERY_OPTS['no_indent'],
-				no_use=QUERY_OPTS['no_useflags'],
-				no_mask=QUERY_OPTS['no_mask']
-			)
-		else:
-			printer = partial(
-				depgraph_printer,
-				no_atom=True,
-				no_indent=True,
-				no_use=True,
-				no_mask=True
-			)
-
-		for pkg in matches:
-			make_depgraph(pkg, printer)
-
-		first_run = False
+    """Parse input and run the program"""
+
+    short_opts = "hAMUl"
+    long_opts = ("help", "no-atom", "no-useflags", "no-mask", "depth=")
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    #
+    # Output
+    #
+
+    first_run = True
+    for query in (Query(x) for x in queries):
+        if not first_run:
+            print()
+
+        matches = query.smart_find(**QUERY_OPTS)
+
+        if not matches:
+            raise errors.GentoolkitNoMatches(query)
+
+        matches.sort()
+
+        if CONFIG["verbose"]:
+            printer = partial(
+                depgraph_printer,
+                no_atom=QUERY_OPTS["no_atom"],
+                no_indent=QUERY_OPTS["no_indent"],
+                no_use=QUERY_OPTS["no_useflags"],
+                no_mask=QUERY_OPTS["no_mask"],
+            )
+        else:
+            printer = partial(
+                depgraph_printer,
+                no_atom=True,
+                no_indent=True,
+                no_use=True,
+                no_mask=True,
+            )
+
+        for pkg in matches:
+            make_depgraph(pkg, printer)
+
+        first_run = False
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/files.py b/pym/gentoolkit/equery/files.py
index cfcd306..ae9e386 100644
--- a/pym/gentoolkit/equery/files.py
+++ b/pym/gentoolkit/equery/files.py
@@ -4,7 +4,7 @@
 
 """List files owned by a given package."""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -17,8 +17,7 @@ from getopt import gnu_getopt, GetoptError
 import portage
 
 import gentoolkit.pprinter as pp
-from gentoolkit.equery import (format_filetype, format_options, mod_usage,
-	CONFIG)
+from gentoolkit.equery import format_filetype, format_options, mod_usage, CONFIG
 from gentoolkit.query import Query
 
 # =======
@@ -26,303 +25,314 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"include_masked": True,
-	"output_tree": False,
-	"show_progress": (not CONFIG['quiet']),
-	"show_type": False,
-	"show_timestamp": False,
-	"show_MD5": False,
-	"type_filter": None
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "include_masked": True,
+    "output_tree": False,
+    "show_progress": (not CONFIG["quiet"]),
+    "show_type": False,
+    "show_timestamp": False,
+    "show_MD5": False,
+    "type_filter": None,
 }
 
 FILTER_RULES = (
-	'dir', 'obj', 'sym', 'dev', 'path', 'conf', 'cmd', 'doc', 'man', 'info',
-	'fifo'
+    "dir",
+    "obj",
+    "sym",
+    "dev",
+    "path",
+    "conf",
+    "cmd",
+    "doc",
+    "man",
+    "info",
+    "fifo",
 )
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="files"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -m, --md5sum", "include MD5 sum in output"),
-		(" -s, --timestamp", "include timestamp in output"),
-		(" -t, --type", "include file type in output"),
-		("     --tree", "display results in a tree (turns off other options)"),
-		(" -f, --filter=RULES", "filter output by file type"),
-		("              RULES",
-			"a comma-separated list (no spaces); choose from:")
-	)))
-	print(" " * 24, ', '.join(pp.emph(x) for x in FILTER_RULES))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="files"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -m, --md5sum", "include MD5 sum in output"),
+                (" -s, --timestamp", "include timestamp in output"),
+                (" -t, --type", "include file type in output"),
+                ("     --tree", "display results in a tree (turns off other options)"),
+                (" -f, --filter=RULES", "filter output by file type"),
+                (
+                    "              RULES",
+                    "a comma-separated list (no spaces); choose from:",
+                ),
+            )
+        )
+    )
+    print(" " * 24, ", ".join(pp.emph(x) for x in FILTER_RULES))
 
 
 def display_files(contents):
-	"""Display the content of an installed package.
-
-	@see: gentoolkit.package.Package.parsed_contents
-	@type contents: dict
-	@param contents: {'path': ['filetype', ...], ...}
-	"""
-
-	filenames = list(contents.keys())
-	filenames.sort()
-	last = []
-
-	for name in filenames:
-		if QUERY_OPTS["output_tree"]:
-			dirdepth = name.count('/')
-			indent = " "
-			if dirdepth == 2:
-				indent = "   "
-			elif dirdepth > 2:
-				indent = "   " * (dirdepth - 1)
-
-			basename = name.rsplit("/", dirdepth - 1)
-			if contents[name][0] == "dir":
-				if len(last) == 0:
-					last = basename
-					pp.uprint(pp.path(indent + basename[0]))
-					continue
-				for i, directory in enumerate(basename):
-					try:
-						if directory in last[i]:
-							continue
-					except IndexError:
-						pass
-					last = basename
-					if len(last) == 1:
-						pp.uprint(pp.path(indent + last[0]))
-						continue
-					pp.uprint(pp.path(indent + "> /" + last[-1]))
-			elif contents[name][0] == "sym":
-				pp.uprint(pp.path(indent + "+"), end=' ')
-				pp.uprint(pp.path_symlink(basename[-1] + " -> " +
-					contents[name][2]))
-			else:
-				pp.uprint(pp.path(indent + "+ ") + basename[-1])
-		else:
-			pp.uprint(format_filetype(
-				name,
-				contents[name],
-				show_type=QUERY_OPTS["show_type"],
-				show_md5=QUERY_OPTS["show_MD5"],
-				show_timestamp=QUERY_OPTS["show_timestamp"]
-			))
+    """Display the content of an installed package.
+
+    @see: gentoolkit.package.Package.parsed_contents
+    @type contents: dict
+    @param contents: {'path': ['filetype', ...], ...}
+    """
+
+    filenames = list(contents.keys())
+    filenames.sort()
+    last = []
+
+    for name in filenames:
+        if QUERY_OPTS["output_tree"]:
+            dirdepth = name.count("/")
+            indent = " "
+            if dirdepth == 2:
+                indent = "   "
+            elif dirdepth > 2:
+                indent = "   " * (dirdepth - 1)
+
+            basename = name.rsplit("/", dirdepth - 1)
+            if contents[name][0] == "dir":
+                if len(last) == 0:
+                    last = basename
+                    pp.uprint(pp.path(indent + basename[0]))
+                    continue
+                for i, directory in enumerate(basename):
+                    try:
+                        if directory in last[i]:
+                            continue
+                    except IndexError:
+                        pass
+                    last = basename
+                    if len(last) == 1:
+                        pp.uprint(pp.path(indent + last[0]))
+                        continue
+                    pp.uprint(pp.path(indent + "> /" + last[-1]))
+            elif contents[name][0] == "sym":
+                pp.uprint(pp.path(indent + "+"), end=" ")
+                pp.uprint(pp.path_symlink(basename[-1] + " -> " + contents[name][2]))
+            else:
+                pp.uprint(pp.path(indent + "+ ") + basename[-1])
+        else:
+            pp.uprint(
+                format_filetype(
+                    name,
+                    contents[name],
+                    show_type=QUERY_OPTS["show_type"],
+                    show_md5=QUERY_OPTS["show_MD5"],
+                    show_timestamp=QUERY_OPTS["show_timestamp"],
+                )
+            )
 
 
 def filter_by_doc(contents, content_filter):
-	"""Return a copy of content filtered by documentation."""
+    """Return a copy of content filtered by documentation."""
 
-	filtered_content = {}
-	for doctype in ('doc' ,'man' ,'info'):
-		# List only files from /usr/share/{doc,man,info}
-		if doctype in content_filter:
-			docpath = os.path.join(os.sep, 'usr', 'share', doctype)
-			for path in contents:
-				if contents[path][0] == 'obj' and path.startswith(docpath):
-					filtered_content[path] = contents[path]
+    filtered_content = {}
+    for doctype in ("doc", "man", "info"):
+        # List only files from /usr/share/{doc,man,info}
+        if doctype in content_filter:
+            docpath = os.path.join(os.sep, "usr", "share", doctype)
+            for path in contents:
+                if contents[path][0] == "obj" and path.startswith(docpath):
+                    filtered_content[path] = contents[path]
 
-	return filtered_content
+    return filtered_content
 
 
 def filter_by_command(contents):
-	"""Return a copy of content filtered by executable commands."""
+    """Return a copy of content filtered by executable commands."""
 
-	filtered_content = {}
-	userpath = os.environ["PATH"].split(os.pathsep)
-	userpath = [os.path.normpath(x) for x in userpath]
-	for path in contents:
-		if (contents[path][0] in ['obj', 'sym'] and
-			os.path.dirname(path) in userpath):
-			filtered_content[path] = contents[path]
+    filtered_content = {}
+    userpath = os.environ["PATH"].split(os.pathsep)
+    userpath = [os.path.normpath(x) for x in userpath]
+    for path in contents:
+        if contents[path][0] in ["obj", "sym"] and os.path.dirname(path) in userpath:
+            filtered_content[path] = contents[path]
 
-	return filtered_content
+    return filtered_content
 
 
 def filter_by_path(contents):
-	"""Return a copy of content filtered by file paths."""
-
-	filtered_content = {}
-	paths = list(reversed(sorted(contents.keys())))
-	while paths:
-		basepath = paths.pop()
-		if contents[basepath][0] == 'dir':
-			check_subdirs = False
-			for path in paths:
-				if (contents[path][0] != "dir" and
-					os.path.dirname(path) == basepath):
-					filtered_content[basepath] = contents[basepath]
-					check_subdirs = True
-					break
-			if check_subdirs:
-				while (paths and paths[-1].startswith(basepath)):
-					paths.pop()
-
-	return filtered_content
+    """Return a copy of content filtered by file paths."""
+
+    filtered_content = {}
+    paths = list(reversed(sorted(contents.keys())))
+    while paths:
+        basepath = paths.pop()
+        if contents[basepath][0] == "dir":
+            check_subdirs = False
+            for path in paths:
+                if contents[path][0] != "dir" and os.path.dirname(path) == basepath:
+                    filtered_content[basepath] = contents[basepath]
+                    check_subdirs = True
+                    break
+            if check_subdirs:
+                while paths and paths[-1].startswith(basepath):
+                    paths.pop()
+
+    return filtered_content
 
 
 def filter_by_conf(contents):
-	"""Return a copy of content filtered by configuration files."""
+    """Return a copy of content filtered by configuration files."""
 
-	filtered_content = {}
-	conf_path = portage.settings["CONFIG_PROTECT"].split()
-	conf_path = tuple(os.path.normpath(x) for x in conf_path)
-	conf_mask_path = portage.settings["CONFIG_PROTECT_MASK"].split()
-	conf_mask_path = tuple(os.path.normpath(x) for x in conf_mask_path)
-	for path in contents:
-		if contents[path][0] == 'obj' and path.startswith(conf_path):
-			if not path.startswith(conf_mask_path):
-				filtered_content[path] = contents[path]
+    filtered_content = {}
+    conf_path = portage.settings["CONFIG_PROTECT"].split()
+    conf_path = tuple(os.path.normpath(x) for x in conf_path)
+    conf_mask_path = portage.settings["CONFIG_PROTECT_MASK"].split()
+    conf_mask_path = tuple(os.path.normpath(x) for x in conf_mask_path)
+    for path in contents:
+        if contents[path][0] == "obj" and path.startswith(conf_path):
+            if not path.startswith(conf_mask_path):
+                filtered_content[path] = contents[path]
 
-	return filtered_content
+    return filtered_content
 
 
 def filter_by_fifo(contents):
-	"""Return a copy of content filtered by fifo entries."""
+    """Return a copy of content filtered by fifo entries."""
 
-	filtered_content = {}
-	for path in contents:
-		if contents[path][0] in ['fif']:
-			filtered_content[path] = contents[path]
+    filtered_content = {}
+    for path in contents:
+        if contents[path][0] in ["fif"]:
+            filtered_content[path] = contents[path]
 
-	return filtered_content
+    return filtered_content
 
 
 def filter_contents(contents):
-	"""Filter files by type if specified by the user.
-
-	@see: gentoolkit.package.Package.parsed_contents
-	@type contents: dict
-	@param contents: {'path': ['filetype', ...], ...}
-	@rtype: dict
-	@return: contents with unrequested filetypes stripped
-	"""
-
-	if QUERY_OPTS['type_filter']:
-		content_filter = QUERY_OPTS['type_filter']
-	else:
-		return contents
-
-	filtered_content = {}
-	if frozenset(('dir', 'obj', 'sym', 'dev')).intersection(content_filter):
-		# Filter elements by type (as recorded in CONTENTS)
-		for path in contents:
-			if contents[path][0] in content_filter:
-				filtered_content[path] = contents[path]
-	if "cmd" in content_filter:
-		filtered_content.update(filter_by_command(contents))
-	if "path" in content_filter:
-		filtered_content.update(filter_by_path(contents))
-	if "conf" in content_filter:
-		filtered_content.update(filter_by_conf(contents))
-	if frozenset(('doc' ,'man' ,'info')).intersection(content_filter):
-		filtered_content.update(filter_by_doc(contents, content_filter))
-	if "fifo" in content_filter:
-		filtered_content.update(filter_by_fifo(contents))
-
-	return filtered_content
+    """Filter files by type if specified by the user.
+
+    @see: gentoolkit.package.Package.parsed_contents
+    @type contents: dict
+    @param contents: {'path': ['filetype', ...], ...}
+    @rtype: dict
+    @return: contents with unrequested filetypes stripped
+    """
+
+    if QUERY_OPTS["type_filter"]:
+        content_filter = QUERY_OPTS["type_filter"]
+    else:
+        return contents
+
+    filtered_content = {}
+    if frozenset(("dir", "obj", "sym", "dev")).intersection(content_filter):
+        # Filter elements by type (as recorded in CONTENTS)
+        for path in contents:
+            if contents[path][0] in content_filter:
+                filtered_content[path] = contents[path]
+    if "cmd" in content_filter:
+        filtered_content.update(filter_by_command(contents))
+    if "path" in content_filter:
+        filtered_content.update(filter_by_path(contents))
+    if "conf" in content_filter:
+        filtered_content.update(filter_by_conf(contents))
+    if frozenset(("doc", "man", "info")).intersection(content_filter):
+        filtered_content.update(filter_by_doc(contents, content_filter))
+    if "fifo" in content_filter:
+        filtered_content.update(filter_by_fifo(contents))
+
+    return filtered_content
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	content_filter = []
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-m', '--md5sum'):
-			QUERY_OPTS["show_MD5"] = True
-		elif opt in ('-s', '--timestamp'):
-			QUERY_OPTS["show_timestamp"] = True
-		elif opt in ('-t', '--type'):
-			QUERY_OPTS["show_type"] = True
-		elif opt in ('--tree'):
-			QUERY_OPTS["output_tree"] = True
-		elif opt in ('-f', '--filter'):
-			f_split = posarg.split(',')
-			content_filter.extend(x.lstrip('=') for x in f_split)
-			for rule in content_filter:
-				if not rule in FILTER_RULES:
-					sys.stderr.write(
-						pp.error("Invalid filter rule '%s'" % rule)
-					)
-					print()
-					print_help(with_description=False)
-					sys.exit(2)
-			QUERY_OPTS["type_filter"] = content_filter
+    """Parse module options and update QUERY_OPTS"""
+
+    content_filter = []
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-m", "--md5sum"):
+            QUERY_OPTS["show_MD5"] = True
+        elif opt in ("-s", "--timestamp"):
+            QUERY_OPTS["show_timestamp"] = True
+        elif opt in ("-t", "--type"):
+            QUERY_OPTS["show_type"] = True
+        elif opt in ("--tree"):
+            QUERY_OPTS["output_tree"] = True
+        elif opt in ("-f", "--filter"):
+            f_split = posarg.split(",")
+            content_filter.extend(x.lstrip("=") for x in f_split)
+            for rule in content_filter:
+                if not rule in FILTER_RULES:
+                    sys.stderr.write(pp.error("Invalid filter rule '%s'" % rule))
+                    print()
+                    print_help(with_description=False)
+                    sys.exit(2)
+            QUERY_OPTS["type_filter"] = content_filter
 
 
 def main(input_args):
-	"""Parse input and run the program"""
+    """Parse input and run the program"""
+
+    # -e, --exact-name is legacy option. djanderson '09
+    short_opts = "hemstf:"
+    long_opts = ("help", "exact-name", "md5sum", "timestamp", "type", "tree", "filter=")
 
-	# -e, --exact-name is legacy option. djanderson '09
-	short_opts = "hemstf:"
-	long_opts = ('help', 'exact-name', 'md5sum', 'timestamp', 'type', 'tree',
-		'filter=')
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
 
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
+    parse_module_options(module_opts)
 
-	parse_module_options(module_opts)
+    if not queries:
+        print_help()
+        sys.exit(2)
 
-	if not queries:
-		print_help()
-		sys.exit(2)
+    # Turn off filtering for tree output
+    if QUERY_OPTS["output_tree"]:
+        QUERY_OPTS["type_filter"] = None
 
-	# Turn off filtering for tree output
-	if QUERY_OPTS["output_tree"]:
-		QUERY_OPTS["type_filter"] = None
+    #
+    # Output files
+    #
 
-	#
-	# Output files
-	#
+    first_run = True
+    for query in queries:
+        if not first_run:
+            print()
 
-	first_run = True
-	for query in queries:
-		if not first_run:
-			print()
+        matches = Query(query).smart_find(**QUERY_OPTS)
 
-		matches = Query(query).smart_find(**QUERY_OPTS)
+        if not matches:
+            sys.stderr.write(pp.error("No matching packages found for %s" % query))
 
-		if not matches:
-			sys.stderr.write(
-				pp.error("No matching packages found for %s" % query)
-			)
+        matches.sort()
 
-		matches.sort()
+        for pkg in matches:
+            if CONFIG["verbose"]:
+                pp.uprint(" * Contents of %s:" % pp.cpv(str(pkg.cpv)))
 
-		for pkg in matches:
-			if CONFIG['verbose']:
-				pp.uprint(" * Contents of %s:" % pp.cpv(str(pkg.cpv)))
+            contents = pkg.parsed_contents()
+            display_files(filter_contents(contents))
 
-			contents = pkg.parsed_contents()
-			display_files(filter_contents(contents))
+        first_run = False
 
-		first_run = False
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/has.py b/pym/gentoolkit/equery/has.py
index c1c8bba..bc98097 100644
--- a/pym/gentoolkit/equery/has.py
+++ b/pym/gentoolkit/equery/has.py
@@ -4,7 +4,7 @@
 
 """List all installed packages that match for a given ENVIRONMENT variable"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -24,184 +24,194 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"include_masked": True,
-	"show_progress": False,
-	"package_format": None,
-	"package_filter": None,
-	"env_var": None
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "include_masked": True,
+    "show_progress": False,
+    "package_format": None,
+    "package_filter": None,
+    "env_var": None,
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="has", arg="env_var [expr]"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -I, --exclude-installed",
-			"exclude installed packages from search path"),
-		(" -o, --overlay-tree", "include overlays in search path"),
-		(" -p, --portage-tree", "include entire portage tree in search path"),
-		(" -F, --format=TMPL", "specify a custom output format"),
-		("              TMPL",
-			"a format template using (see man page):")
-	)))
-#	print(" " * 24, ', '.join(pp.emph(x) for x in FORMAT_TMPL_VARS))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="has", arg="env_var [expr]"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (
+                    " -I, --exclude-installed",
+                    "exclude installed packages from search path",
+                ),
+                (" -o, --overlay-tree", "include overlays in search path"),
+                (" -p, --portage-tree", "include entire portage tree in search path"),
+                (" -F, --format=TMPL", "specify a custom output format"),
+                ("              TMPL", "a format template using (see man page):"),
+            )
+        )
+    )
+
+
+# 	print(" " * 24, ', '.join(pp.emph(x) for x in FORMAT_TMPL_VARS))
 
 
 def query_in_env(query, env_var, pkg):
-	"""Check if the query is in the pkg's environment."""
+    """Check if the query is in the pkg's environment."""
 
-	try:
-		if env_var in ("USE", "IUSE", "CFLAGS", "CXXFLAGS", "LDFLAGS"):
-			results = set(
-				[x.lstrip("+-") for x in pkg.environment(env_var).split()]
-			)
-		else:
-			results = set(pkg.environment(env_var).split())
-	except errors.GentoolkitFatalError:
-		# aux_get KeyError or other unexpected result
-		return False
+    try:
+        if env_var in ("USE", "IUSE", "CFLAGS", "CXXFLAGS", "LDFLAGS"):
+            results = set([x.lstrip("+-") for x in pkg.environment(env_var).split()])
+        else:
+            results = set(pkg.environment(env_var).split())
+    except errors.GentoolkitFatalError:
+        # aux_get KeyError or other unexpected result
+        return False
 
-	if query in results:
-		return True
+    if query in results:
+        return True
 
-	return False
+    return False
 
 
 def display_pkg(query, env_var, pkg):
-	"""Display information for a given package."""
-
-	if CONFIG['verbose']:
-		pkgstr = PackageFormatter(
-			pkg,
-			do_format=True,
-			custom_format=QUERY_OPTS["package_format"]
-		)
-	else:
-		pkgstr = PackageFormatter(
-			pkg,
-			do_format=False,
-			custom_format=QUERY_OPTS["package_format"]
-		)
-
-	if (QUERY_OPTS["in_installed"] and
-		not QUERY_OPTS["in_porttree"] and
-		not QUERY_OPTS["in_overlay"]):
-		if not 'I' in  pkgstr.location:
-			return False
-	if (QUERY_OPTS["in_porttree"] and
-		not QUERY_OPTS["in_overlay"]):
-		if not 'P' in  pkgstr.location:
-			return False
-	if (QUERY_OPTS["in_overlay"] and
-		not QUERY_OPTS["in_porttree"]):
-		if not 'O' in  pkgstr.location:
-			return False
-	pp.uprint(pkgstr)
-
-	return True
+    """Display information for a given package."""
+
+    if CONFIG["verbose"]:
+        pkgstr = PackageFormatter(
+            pkg, do_format=True, custom_format=QUERY_OPTS["package_format"]
+        )
+    else:
+        pkgstr = PackageFormatter(
+            pkg, do_format=False, custom_format=QUERY_OPTS["package_format"]
+        )
+
+    if (
+        QUERY_OPTS["in_installed"]
+        and not QUERY_OPTS["in_porttree"]
+        and not QUERY_OPTS["in_overlay"]
+    ):
+        if not "I" in pkgstr.location:
+            return False
+    if QUERY_OPTS["in_porttree"] and not QUERY_OPTS["in_overlay"]:
+        if not "P" in pkgstr.location:
+            return False
+    if QUERY_OPTS["in_overlay"] and not QUERY_OPTS["in_porttree"]:
+        if not "O" in pkgstr.location:
+            return False
+    pp.uprint(pkgstr)
+
+    return True
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	# Parse module options
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-I', '--exclue-installed'):
-			QUERY_OPTS['in_installed'] = False
-		elif opt in ('-p', '--portage-tree'):
-			QUERY_OPTS['in_porttree'] = True
-		elif opt in ('-o', '--overlay-tree'):
-			QUERY_OPTS['in_overlay'] = True
-		elif opt in ('-F', '--format'):
-			QUERY_OPTS["package_format"] = posarg
-		elif opt in ('--package'):
-			QUERY_OPTS["package_filter"] = posarg
+    """Parse module options and update QUERY_OPTS"""
+
+    # Parse module options
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-I", "--exclue-installed"):
+            QUERY_OPTS["in_installed"] = False
+        elif opt in ("-p", "--portage-tree"):
+            QUERY_OPTS["in_porttree"] = True
+        elif opt in ("-o", "--overlay-tree"):
+            QUERY_OPTS["in_overlay"] = True
+        elif opt in ("-F", "--format"):
+            QUERY_OPTS["package_format"] = posarg
+        elif opt in ("--package"):
+            QUERY_OPTS["package_filter"] = posarg
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hiIpoF:" # -i was option for default action
-	# --installed is no longer needed, kept for compatibility (djanderson '09)
-	long_opts = ('help', 'installed', 'exclude-installed', 'portage-tree',
-		'overlay-tree', 'format=', 'package=')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	query_scope = QUERY_OPTS['package_filter'] or '*'
-	matches = Query(query_scope).smart_find(**QUERY_OPTS)
-	matches.sort()
-
-	# split out the first query since it is suppose to be the env_var
-	QUERY_OPTS['env_var'] = queries.pop(0)
-	env_var = QUERY_OPTS['env_var']
-
-	#
-	# Output
-	#
-
-	if not queries:
-		if not QUERY_OPTS['package_filter']:
-			err = "Used ENV_VAR without match_expression or --package"
-			raise errors.GentoolkitFatalError(err, is_serious=False)
-		else:
-			if len(matches) > 1:
-				raise errors.AmbiguousPackageName(matches)
-			for match in matches:
-				env = QUERY_OPTS['env_var']
-				print(match.environment(env))
-
-	first_run = True
-	got_match = False
-	for query in queries:
-		if not first_run:
-			print()
-
-		if CONFIG['verbose']:
-			status = " * Searching for {0} {1} ... "
-			pp.uprint(status.format(env_var, pp.emph(query)))
-
-		for pkg in matches:
-			if query_in_env(query, env_var, pkg):
-				display_pkg(query, env_var, pkg)
-				got_match = True
-		first_run = False
-
-	if not got_match:
-		sys.exit(1)
+    """Parse input and run the program"""
+
+    short_opts = "hiIpoF:"  # -i was option for default action
+    # --installed is no longer needed, kept for compatibility (djanderson '09)
+    long_opts = (
+        "help",
+        "installed",
+        "exclude-installed",
+        "portage-tree",
+        "overlay-tree",
+        "format=",
+        "package=",
+    )
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    query_scope = QUERY_OPTS["package_filter"] or "*"
+    matches = Query(query_scope).smart_find(**QUERY_OPTS)
+    matches.sort()
+
+    # split out the first query since it is suppose to be the env_var
+    QUERY_OPTS["env_var"] = queries.pop(0)
+    env_var = QUERY_OPTS["env_var"]
+
+    #
+    # Output
+    #
+
+    if not queries:
+        if not QUERY_OPTS["package_filter"]:
+            err = "Used ENV_VAR without match_expression or --package"
+            raise errors.GentoolkitFatalError(err, is_serious=False)
+        else:
+            if len(matches) > 1:
+                raise errors.AmbiguousPackageName(matches)
+            for match in matches:
+                env = QUERY_OPTS["env_var"]
+                print(match.environment(env))
+
+    first_run = True
+    got_match = False
+    for query in queries:
+        if not first_run:
+            print()
+
+        if CONFIG["verbose"]:
+            status = " * Searching for {0} {1} ... "
+            pp.uprint(status.format(env_var, pp.emph(query)))
+
+        for pkg in matches:
+            if query_in_env(query, env_var, pkg):
+                display_pkg(query, env_var, pkg)
+                got_match = True
+        first_run = False
+
+    if not got_match:
+        sys.exit(1)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/hasuse.py b/pym/gentoolkit/equery/hasuse.py
index 07207bc..13fbb24 100644
--- a/pym/gentoolkit/equery/hasuse.py
+++ b/pym/gentoolkit/equery/hasuse.py
@@ -4,7 +4,7 @@
 
 """List all installed packages that have a given USE flag"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -24,153 +24,161 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"include_masked": True,
-	"show_progress": False,
-	"package_format": None
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "include_masked": True,
+    "show_progress": False,
+    "package_format": None,
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="hasuse", arg="USE-flag"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -I, --exclude-installed",
-			"exclude installed packages from search path"),
-		(" -o, --overlay-tree", "include overlays in search path"),
-		(" -p, --portage-tree", "include entire portage tree in search path"),
-		(" -F, --format=TMPL", "specify a custom output format"),
-		("              TMPL",
-			"a format template using (see man page):")
-	)))
-	print(" " * 24, ', '.join(pp.emph(x) for x in FORMAT_TMPL_VARS))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="hasuse", arg="USE-flag"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (
+                    " -I, --exclude-installed",
+                    "exclude installed packages from search path",
+                ),
+                (" -o, --overlay-tree", "include overlays in search path"),
+                (" -p, --portage-tree", "include entire portage tree in search path"),
+                (" -F, --format=TMPL", "specify a custom output format"),
+                ("              TMPL", "a format template using (see man page):"),
+            )
+        )
+    )
+    print(" " * 24, ", ".join(pp.emph(x) for x in FORMAT_TMPL_VARS))
 
 
 def display_useflags(query, pkg):
-	"""Display USE flag information for a given package."""
-
-	try:
-		useflags = [x.lstrip("+-") for x in pkg.environment("IUSE").split()]
-	except errors.GentoolkitFatalError:
-		# aux_get KeyError or other unexpected result
-		return False
-
-	if query not in useflags:
-		return False
-
-	if CONFIG['verbose']:
-		pkgstr = PackageFormatter(
-			pkg,
-			do_format=True,
-			custom_format=QUERY_OPTS["package_format"]
-		)
-	else:
-		pkgstr = PackageFormatter(
-			pkg,
-			do_format=False,
-			custom_format=QUERY_OPTS["package_format"]
-		)
-
-	if (QUERY_OPTS["in_installed"] and
-		not QUERY_OPTS["in_porttree"] and
-		not QUERY_OPTS["in_overlay"]):
-		if not 'I' in  pkgstr.location:
-			return False
-	if (QUERY_OPTS["in_porttree"] and
-		not QUERY_OPTS["in_overlay"]):
-		if not 'P' in  pkgstr.location:
-			return False
-	if (QUERY_OPTS["in_overlay"] and
-		not QUERY_OPTS["in_porttree"]):
-		if not 'O' in  pkgstr.location:
-			return False
-	pp.uprint(pkgstr)
-
-	return True
-
+    """Display USE flag information for a given package."""
+
+    try:
+        useflags = [x.lstrip("+-") for x in pkg.environment("IUSE").split()]
+    except errors.GentoolkitFatalError:
+        # aux_get KeyError or other unexpected result
+        return False
+
+    if query not in useflags:
+        return False
+
+    if CONFIG["verbose"]:
+        pkgstr = PackageFormatter(
+            pkg, do_format=True, custom_format=QUERY_OPTS["package_format"]
+        )
+    else:
+        pkgstr = PackageFormatter(
+            pkg, do_format=False, custom_format=QUERY_OPTS["package_format"]
+        )
+
+    if (
+        QUERY_OPTS["in_installed"]
+        and not QUERY_OPTS["in_porttree"]
+        and not QUERY_OPTS["in_overlay"]
+    ):
+        if not "I" in pkgstr.location:
+            return False
+    if QUERY_OPTS["in_porttree"] and not QUERY_OPTS["in_overlay"]:
+        if not "P" in pkgstr.location:
+            return False
+    if QUERY_OPTS["in_overlay"] and not QUERY_OPTS["in_porttree"]:
+        if not "O" in pkgstr.location:
+            return False
+    pp.uprint(pkgstr)
+
+    return True
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	# Parse module options
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-I', '--exclue-installed'):
-			QUERY_OPTS['in_installed'] = False
-		elif opt in ('-p', '--portage-tree'):
-			QUERY_OPTS['in_porttree'] = True
-		elif opt in ('-o', '--overlay-tree'):
-			QUERY_OPTS['in_overlay'] = True
-		elif opt in ('-F', '--format'):
-			QUERY_OPTS["package_format"] = posarg
+    """Parse module options and update QUERY_OPTS"""
+
+    # Parse module options
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-I", "--exclue-installed"):
+            QUERY_OPTS["in_installed"] = False
+        elif opt in ("-p", "--portage-tree"):
+            QUERY_OPTS["in_porttree"] = True
+        elif opt in ("-o", "--overlay-tree"):
+            QUERY_OPTS["in_overlay"] = True
+        elif opt in ("-F", "--format"):
+            QUERY_OPTS["package_format"] = posarg
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hiIpoF:" # -i was option for default action
-	# --installed is no longer needed, kept for compatibility (djanderson '09)
-	long_opts = ('help', 'installed', 'exclude-installed', 'portage-tree',
-		'overlay-tree', 'format=')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	matches = Query("*").smart_find(**QUERY_OPTS)
-	matches.sort()
-
-	#
-	# Output
-	#
-
-	first_run = True
-	got_match = False
-	for query in queries:
-		if not first_run:
-			print()
-
-		if CONFIG['verbose']:
-			pp.uprint(" * Searching for USE flag %s ... " % pp.emph(query))
-
-		for pkg in matches:
-			if display_useflags(query, pkg):
-				got_match = True
-
-		first_run = False
+    """Parse input and run the program"""
+
+    short_opts = "hiIpoF:"  # -i was option for default action
+    # --installed is no longer needed, kept for compatibility (djanderson '09)
+    long_opts = (
+        "help",
+        "installed",
+        "exclude-installed",
+        "portage-tree",
+        "overlay-tree",
+        "format=",
+    )
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    matches = Query("*").smart_find(**QUERY_OPTS)
+    matches.sort()
+
+    #
+    # Output
+    #
+
+    first_run = True
+    got_match = False
+    for query in queries:
+        if not first_run:
+            print()
+
+        if CONFIG["verbose"]:
+            pp.uprint(" * Searching for USE flag %s ... " % pp.emph(query))
+
+        for pkg in matches:
+            if display_useflags(query, pkg):
+                got_match = True
+
+        first_run = False
+
+    if not got_match:
+        sys.exit(1)
 
-	if not got_match:
-		sys.exit(1)
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/keywords.py b/pym/gentoolkit/equery/keywords.py
index be79fe3..dbe5133 100644
--- a/pym/gentoolkit/equery/keywords.py
+++ b/pym/gentoolkit/equery/keywords.py
@@ -1,8 +1,9 @@
-#	vim:fileencoding=utf-8
+# 	vim:fileencoding=utf-8
 # Copyright 2010 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
 from gentoolkit.eshowkw import main as emain
 
+
 def main(input_args):
-	emain(input_args, True)
\ No newline at end of file
+    emain(input_args, True)

diff --git a/pym/gentoolkit/equery/list_.py b/pym/gentoolkit/equery/list_.py
index f66dfed..1e03d91 100644
--- a/pym/gentoolkit/equery/list_.py
+++ b/pym/gentoolkit/equery/list_.py
@@ -4,7 +4,7 @@
 
 """List installed packages matching the query pattern"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -25,231 +25,249 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"duplicates": False,
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"include_mask_reason": False,
-	"is_regex": False,
-	"show_progress": (not CONFIG['quiet']),
-	"package_format": None,
-	"binpkgs-missing": False
+    "duplicates": False,
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "include_mask_reason": False,
+    "is_regex": False,
+    "show_progress": (not CONFIG["quiet"]),
+    "package_format": None,
+    "binpkgs-missing": False,
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-
-	# Deprecation warning added by djanderson, 12/2008
-	depwarning = (
-		"Default action for this module has changed in Gentoolkit 0.3.",
-		"Use globbing to simulate the old behavior (see man equery).",
-		"Use '*' to check all installed packages.",
-		"Use 'foo-bar/*' to filter by category."
-	)
-	for line in depwarning:
-		sys.stderr.write(pp.warn(line))
-	print()
-
-	print(mod_usage(mod_name="list"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -d, --duplicates", "list only installed duplicate packages"),
-		(" -b, --binpkgs-missing", "list only installed packages without a corresponding binary package"),
-		(" -f, --full-regex", "query is a regular expression"),
-		(" -m, --mask-reason", "include reason for package mask"),
-		(" -I, --exclude-installed",
-			"exclude installed packages from output"),
-		(" -o, --overlay-tree", "list packages in overlays"),
-		(" -p, --portage-tree", "list packages in the main portage tree"),
-		(" -F, --format=TMPL", "specify a custom output format"),
-        ("              TMPL",
-			"a format template using (see man page):")
-	)))
-	print(" " * 24, ', '.join(pp.emph(x) for x in FORMAT_TMPL_VARS))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+
+    # Deprecation warning added by djanderson, 12/2008
+    depwarning = (
+        "Default action for this module has changed in Gentoolkit 0.3.",
+        "Use globbing to simulate the old behavior (see man equery).",
+        "Use '*' to check all installed packages.",
+        "Use 'foo-bar/*' to filter by category.",
+    )
+    for line in depwarning:
+        sys.stderr.write(pp.warn(line))
+    print()
+
+    print(mod_usage(mod_name="list"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -d, --duplicates", "list only installed duplicate packages"),
+                (
+                    " -b, --binpkgs-missing",
+                    "list only installed packages without a corresponding binary package",
+                ),
+                (" -f, --full-regex", "query is a regular expression"),
+                (" -m, --mask-reason", "include reason for package mask"),
+                (" -I, --exclude-installed", "exclude installed packages from output"),
+                (" -o, --overlay-tree", "list packages in overlays"),
+                (" -p, --portage-tree", "list packages in the main portage tree"),
+                (" -F, --format=TMPL", "specify a custom output format"),
+                ("              TMPL", "a format template using (see man page):"),
+            )
+        )
+    )
+    print(" " * 24, ", ".join(pp.emph(x) for x in FORMAT_TMPL_VARS))
 
 
 def get_duplicates(matches):
-	"""Return only packages that have more than one version installed."""
+    """Return only packages that have more than one version installed."""
 
-	dups = {}
-	result = []
-	for pkg in matches:
-		if pkg.cp in dups:
-			dups[pkg.cp].append(pkg)
-		else:
-			dups[pkg.cp] = [pkg]
+    dups = {}
+    result = []
+    for pkg in matches:
+        if pkg.cp in dups:
+            dups[pkg.cp].append(pkg)
+        else:
+            dups[pkg.cp] = [pkg]
 
-	for cpv in dups.values():
-		if len(cpv) > 1:
-			result.extend(cpv)
+    for cpv in dups.values():
+        if len(cpv) > 1:
+            result.extend(cpv)
 
-	return result
+    return result
 
 
 def get_binpkgs_missing(matches):
-	"""Return only packages that do not have a corresponding binary package."""
+    """Return only packages that do not have a corresponding binary package."""
 
-	result = []
-	binary_packages = set(get_bintree_cpvs())
-	matched_packages = set(x.cpv for x in matches)
-	missing_binary_packages = set(matched_packages.difference(binary_packages))
+    result = []
+    binary_packages = set(get_bintree_cpvs())
+    matched_packages = set(x.cpv for x in matches)
+    missing_binary_packages = set(matched_packages.difference(binary_packages))
 
-	for pkg in matches:
-		if pkg.cpv in missing_binary_packages:
-			result.append(pkg)
-	return result
+    for pkg in matches:
+        if pkg.cpv in missing_binary_packages:
+            result.append(pkg)
+    return result
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	posargs = (x[1] for x in module_opts)
-	for opt, posarg in zip(opts, posargs):
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-I', '--exclude-installed'):
-			QUERY_OPTS['in_installed'] = False
-		elif opt in ('-p', '--portage-tree'):
-			QUERY_OPTS['in_porttree'] = True
-		elif opt in ('-o', '--overlay-tree'):
-			QUERY_OPTS['in_overlay'] = True
-		elif opt in ('-f', '--full-regex'):
-			QUERY_OPTS['is_regex'] = True
-		elif opt in ('-m', '--mask-reason'):
-			QUERY_OPTS['include_mask_reason'] = True
-		elif opt in ('-e', '--exact-name'):
-			sys.stderr.write(pp.warn("-e, --exact-name is now default."))
-			sys.stderr.write(
-				pp.warn("Use globbing to simulate the old behavior.")
-			)
-			print()
-		elif opt in ('-d', '--duplicates'):
-			QUERY_OPTS['duplicates'] = True
-		elif opt in ('-b', '--binpkgs-missing'):
-			QUERY_OPTS['binpkgs-missing'] = True
-		elif opt in ('-F', '--format'):
-			QUERY_OPTS["package_format"] = posarg
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    posargs = (x[1] for x in module_opts)
+    for opt, posarg in zip(opts, posargs):
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-I", "--exclude-installed"):
+            QUERY_OPTS["in_installed"] = False
+        elif opt in ("-p", "--portage-tree"):
+            QUERY_OPTS["in_porttree"] = True
+        elif opt in ("-o", "--overlay-tree"):
+            QUERY_OPTS["in_overlay"] = True
+        elif opt in ("-f", "--full-regex"):
+            QUERY_OPTS["is_regex"] = True
+        elif opt in ("-m", "--mask-reason"):
+            QUERY_OPTS["include_mask_reason"] = True
+        elif opt in ("-e", "--exact-name"):
+            sys.stderr.write(pp.warn("-e, --exact-name is now default."))
+            sys.stderr.write(pp.warn("Use globbing to simulate the old behavior."))
+            print()
+        elif opt in ("-d", "--duplicates"):
+            QUERY_OPTS["duplicates"] = True
+        elif opt in ("-b", "--binpkgs-missing"):
+            QUERY_OPTS["binpkgs-missing"] = True
+        elif opt in ("-F", "--format"):
+            QUERY_OPTS["package_format"] = posarg
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hdbefiImopF:" # -i, -e were options for default actions
-
-	# 04/09: djanderson
-	# --all is no longer needed. Kept for compatibility.
-	# --installed is no longer needed. Kept for compatibility.
-	# --exact-name is no longer needed. Kept for compatibility.
-	long_opts = ('help', 'all', 'installed', 'exclude-installed',
-		'mask-reason', 'portage-tree', 'overlay-tree', 'format=', 'full-regex',
-		'exact-name', 'duplicates', 'binpkgs-missing')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	# Only search installed packages when listing duplicate or missing binary packages
-	if QUERY_OPTS["duplicates"] or QUERY_OPTS["binpkgs-missing"]:
-		QUERY_OPTS["in_installed"] = True
-		QUERY_OPTS["in_porttree"] = False
-		QUERY_OPTS["in_overlay"] = False
-		QUERY_OPTS["include_mask_reason"] = False
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	first_run = True
-	for query in (Query(x, QUERY_OPTS['is_regex']) for x in queries):
-		if not first_run:
-			print()
-
-		# if we are in quiet mode, do not raise GentoolkitNoMatches exception
-		# instead we raise GentoolkitNonZeroExit to exit with an exit value of 3
-		try:
-			matches = query.smart_find(**QUERY_OPTS)
-		except errors.GentoolkitNoMatches:
-			if CONFIG['verbose']:
-				raise
-			else:
-				raise errors.GentoolkitNonZeroExit(3)
-
-		# Find duplicate packages
-		if QUERY_OPTS["duplicates"]:
-			matches = get_duplicates(matches)
-
-		# Find missing binary packages
-		if QUERY_OPTS["binpkgs-missing"]:
-			matches = get_binpkgs_missing(matches)
-
-		matches.sort()
-
-		#
-		# Output
-		#
-
-		for pkg in matches:
-			pkgstr = PackageFormatter(
-				pkg,
-				do_format=CONFIG['verbose'],
-				custom_format=QUERY_OPTS["package_format"]
-			)
-
-			if (QUERY_OPTS["in_porttree"] and
-				not QUERY_OPTS["in_overlay"]):
-				if not 'P' in pkgstr.location:
-					continue
-			if (QUERY_OPTS["in_overlay"] and
-				not QUERY_OPTS["in_porttree"]):
-				if not 'O' in pkgstr.location:
-					continue
-			pp.uprint(pkgstr)
-
-			if QUERY_OPTS["include_mask_reason"]:
-				ms_int, ms_orig = pkgstr.format_mask_status()
-				if ms_int < 3:
-					# ms_int is a number representation of mask level.
-					# Only 2 and above are "hard masked" and have reasons.
-					continue
-				mask_reason = pkg.mask_reason()
-				if not mask_reason:
-					# Package not on system or not masked
-					continue
-				elif not any(mask_reason):
-					print(" * No mask reason given")
-				else:
-					status = ', '.join(ms_orig)
-					explanation = mask_reason[0]
-					mask_location = mask_reason[1]
-					pp.uprint(" * Masked by %r" % status)
-					pp.uprint(" * %s:" % mask_location)
-					pp.uprint('\n'.join(
-						[' * %s' % line.lstrip(' #')
-							for line in explanation.splitlines()]
-						))
-
-		first_run = False
+    """Parse input and run the program"""
+
+    short_opts = "hdbefiImopF:"  # -i, -e were options for default actions
+
+    # 04/09: djanderson
+    # --all is no longer needed. Kept for compatibility.
+    # --installed is no longer needed. Kept for compatibility.
+    # --exact-name is no longer needed. Kept for compatibility.
+    long_opts = (
+        "help",
+        "all",
+        "installed",
+        "exclude-installed",
+        "mask-reason",
+        "portage-tree",
+        "overlay-tree",
+        "format=",
+        "full-regex",
+        "exact-name",
+        "duplicates",
+        "binpkgs-missing",
+    )
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    # Only search installed packages when listing duplicate or missing binary packages
+    if QUERY_OPTS["duplicates"] or QUERY_OPTS["binpkgs-missing"]:
+        QUERY_OPTS["in_installed"] = True
+        QUERY_OPTS["in_porttree"] = False
+        QUERY_OPTS["in_overlay"] = False
+        QUERY_OPTS["include_mask_reason"] = False
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    first_run = True
+    for query in (Query(x, QUERY_OPTS["is_regex"]) for x in queries):
+        if not first_run:
+            print()
+
+        # if we are in quiet mode, do not raise GentoolkitNoMatches exception
+        # instead we raise GentoolkitNonZeroExit to exit with an exit value of 3
+        try:
+            matches = query.smart_find(**QUERY_OPTS)
+        except errors.GentoolkitNoMatches:
+            if CONFIG["verbose"]:
+                raise
+            else:
+                raise errors.GentoolkitNonZeroExit(3)
+
+        # Find duplicate packages
+        if QUERY_OPTS["duplicates"]:
+            matches = get_duplicates(matches)
+
+        # Find missing binary packages
+        if QUERY_OPTS["binpkgs-missing"]:
+            matches = get_binpkgs_missing(matches)
+
+        matches.sort()
+
+        #
+        # Output
+        #
+
+        for pkg in matches:
+            pkgstr = PackageFormatter(
+                pkg,
+                do_format=CONFIG["verbose"],
+                custom_format=QUERY_OPTS["package_format"],
+            )
+
+            if QUERY_OPTS["in_porttree"] and not QUERY_OPTS["in_overlay"]:
+                if not "P" in pkgstr.location:
+                    continue
+            if QUERY_OPTS["in_overlay"] and not QUERY_OPTS["in_porttree"]:
+                if not "O" in pkgstr.location:
+                    continue
+            pp.uprint(pkgstr)
+
+            if QUERY_OPTS["include_mask_reason"]:
+                ms_int, ms_orig = pkgstr.format_mask_status()
+                if ms_int < 3:
+                    # ms_int is a number representation of mask level.
+                    # Only 2 and above are "hard masked" and have reasons.
+                    continue
+                mask_reason = pkg.mask_reason()
+                if not mask_reason:
+                    # Package not on system or not masked
+                    continue
+                elif not any(mask_reason):
+                    print(" * No mask reason given")
+                else:
+                    status = ", ".join(ms_orig)
+                    explanation = mask_reason[0]
+                    mask_location = mask_reason[1]
+                    pp.uprint(" * Masked by %r" % status)
+                    pp.uprint(" * %s:" % mask_location)
+                    pp.uprint(
+                        "\n".join(
+                            [
+                                " * %s" % line.lstrip(" #")
+                                for line in explanation.splitlines()
+                            ]
+                        )
+                    )
+
+        first_run = False
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/meta.py b/pym/gentoolkit/equery/meta.py
index 2e632d5..7747893 100644
--- a/pym/gentoolkit/equery/meta.py
+++ b/pym/gentoolkit/equery/meta.py
@@ -4,7 +4,7 @@
 
 """Display metadata about a given package."""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -28,533 +28,558 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	'current': False,
-	'description': False,
-	'herd': False,
-	'keywords': False,
-	'license': False,
-	'maintainer': False,
-	'stablereq': False,
-	'useflags': False,
-	'upstream': False,
-	'xml': False
+    "current": False,
+    "description": False,
+    "herd": False,
+    "keywords": False,
+    "license": False,
+    "maintainer": False,
+    "stablereq": False,
+    "useflags": False,
+    "upstream": False,
+    "xml": False,
 }
 
 STABLEREQ_arches = {
-	'alpha': 'alpha@gentoo.org',
-	'amd64': 'amd64@gentoo.org',
-	'arm': 'arm@gentoo.org',
-	'hppa': 'hppa@gentoo.org',
-	'ia64': 'ia64@gentoo.org',
-	'm68k': 'm68k@gentoo.org',
-	'ppc64': 'ppc64@gentoo.org',
-	'ppc': 'ppc@gentoo.org',
-	's390': 's390@gentoo.org',
-	'sh': 'sh@gentoo.org',
-	'sparc': 'sparc@gentoo.org',
-	'x86': 'x86@gentoo.org',
+    "alpha": "alpha@gentoo.org",
+    "amd64": "amd64@gentoo.org",
+    "arm": "arm@gentoo.org",
+    "hppa": "hppa@gentoo.org",
+    "ia64": "ia64@gentoo.org",
+    "m68k": "m68k@gentoo.org",
+    "ppc64": "ppc64@gentoo.org",
+    "ppc": "ppc@gentoo.org",
+    "s390": "s390@gentoo.org",
+    "sh": "sh@gentoo.org",
+    "sparc": "sparc@gentoo.org",
+    "x86": "x86@gentoo.org",
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True, with_usage=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	if with_usage:
-		print(mod_usage(mod_name="meta"))
-		print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -d, --description", "show an extended package description"),
-		(" -H, --herd", "show the herd(s) for the package"),
-		(" -k, --keywords", "show keywords for all matching package versions"),
-		(" -l, --license", "show licenses for the best maching version"),
-		(" -m, --maintainer", "show the maintainer(s) for the package"),
-		(" -r, --reverse", "show the output in reverse order if applicable"),
-		(" -S, --stablreq", "show STABLEREQ arches (cc's) for all matching package versions"),
-		(" -u, --useflags", "show per-package USE flag descriptions"),
-		(" -U, --upstream", "show package's upstream information"),
-		(" -x, --xml", "show the plain metadata.xml file")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    if with_usage:
+        print(mod_usage(mod_name="meta"))
+        print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -d, --description", "show an extended package description"),
+                (" -H, --herd", "show the herd(s) for the package"),
+                (" -k, --keywords", "show keywords for all matching package versions"),
+                (" -l, --license", "show licenses for the best maching version"),
+                (" -m, --maintainer", "show the maintainer(s) for the package"),
+                (" -r, --reverse", "show the output in reverse order if applicable"),
+                (
+                    " -S, --stablreq",
+                    "show STABLEREQ arches (cc's) for all matching package versions",
+                ),
+                (" -u, --useflags", "show per-package USE flag descriptions"),
+                (" -U, --upstream", "show package's upstream information"),
+                (" -x, --xml", "show the plain metadata.xml file"),
+            )
+        )
+    )
+
 
 def stablereq(matches):
-	"""Produce the list of cc's for a STABLREQ bug
-	@type matches: array
-	@param matches: set of L{gentoolkit.package.Package} instances whose
-		'key' are all the same.
-	@rtype: dict
-	@return: a dict with L{gentoolkit.package.Package} instance keys and
-		'array of cc's to be added to a STABLEREQ bug.
-	"""
-	result = {}
-	for pkg in matches:
-		keywords_str = pkg.environment(('KEYWORDS'), prefer_vdb=False)
-		# get any unstable keywords
-		keywords = set([x.lstrip('~') for x in keywords_str.split() if'~' in x])
-		stable_arches = set(list(STABLEREQ_arches))
-		cc_keywords = stable_arches.intersection(keywords)
-		# add cc's
-		result[pkg] = [STABLEREQ_arches[x] for x in cc_keywords]
-	return result
+    """Produce the list of cc's for a STABLREQ bug
+    @type matches: array
+    @param matches: set of L{gentoolkit.package.Package} instances whose
+            'key' are all the same.
+    @rtype: dict
+    @return: a dict with L{gentoolkit.package.Package} instance keys and
+            'array of cc's to be added to a STABLEREQ bug.
+    """
+    result = {}
+    for pkg in matches:
+        keywords_str = pkg.environment(("KEYWORDS"), prefer_vdb=False)
+        # get any unstable keywords
+        keywords = set([x.lstrip("~") for x in keywords_str.split() if "~" in x])
+        stable_arches = set(list(STABLEREQ_arches))
+        cc_keywords = stable_arches.intersection(keywords)
+        # add cc's
+        result[pkg] = [STABLEREQ_arches[x] for x in cc_keywords]
+    return result
+
 
 def filter_keywords(matches):
-	"""Filters non-unique keywords per slot.
-
-	Does not filter arch mask keywords (-). Besides simple non-unique keywords,
-	also remove unstable keywords (~) if a higher version in the same slot is
-	stable. This view makes version bumps easier for package maintainers.
-
-	@type matches: array
-	@param matches: set of L{gentoolkit.package.Package} instances whose
-		'key' are all the same.
-	@rtype: dict
-	@return: a dict with L{gentoolkit.package.Package} instance keys and
-		'array of keywords not found in a higher version of pkg within the
-		same slot' values.
-	"""
-	def del_archmask(keywords):
-		"""Don't add arch_masked to filter set."""
-		return [x for x in keywords if not x.startswith('-')]
-
-	def add_unstable(keywords):
-		"""Add unstable keyword for all stable keywords to filter set."""
-		result = list(keywords)
-		result.extend(
-			['~%s' % x for x in keywords if not x.startswith(('-', '~'))]
-		)
-		return result
-
-	result = {}
-	slot_map = {}
-	# Start from the newest
-	rev_matches = reversed(matches)
-	for pkg in rev_matches:
-		keywords_str, slot = pkg.environment(('KEYWORDS', 'SLOT'),
-			prefer_vdb=False)
-		keywords = keywords_str.split()
-		result[pkg] = [x for x in keywords if x not in slot_map.get(slot, [])]
-		try:
-			slot_map[slot].update(del_archmask(add_unstable(keywords)))
-		except KeyError:
-			slot_map[slot] = set(del_archmask(add_unstable(keywords)))
-
-	return result
+    """Filters non-unique keywords per slot.
+
+    Does not filter arch mask keywords (-). Besides simple non-unique keywords,
+    also remove unstable keywords (~) if a higher version in the same slot is
+    stable. This view makes version bumps easier for package maintainers.
+
+    @type matches: array
+    @param matches: set of L{gentoolkit.package.Package} instances whose
+            'key' are all the same.
+    @rtype: dict
+    @return: a dict with L{gentoolkit.package.Package} instance keys and
+            'array of keywords not found in a higher version of pkg within the
+            same slot' values.
+    """
+
+    def del_archmask(keywords):
+        """Don't add arch_masked to filter set."""
+        return [x for x in keywords if not x.startswith("-")]
+
+    def add_unstable(keywords):
+        """Add unstable keyword for all stable keywords to filter set."""
+        result = list(keywords)
+        result.extend(["~%s" % x for x in keywords if not x.startswith(("-", "~"))])
+        return result
+
+    result = {}
+    slot_map = {}
+    # Start from the newest
+    rev_matches = reversed(matches)
+    for pkg in rev_matches:
+        keywords_str, slot = pkg.environment(("KEYWORDS", "SLOT"), prefer_vdb=False)
+        keywords = keywords_str.split()
+        result[pkg] = [x for x in keywords if x not in slot_map.get(slot, [])]
+        try:
+            slot_map[slot].update(del_archmask(add_unstable(keywords)))
+        except KeyError:
+            slot_map[slot] = set(del_archmask(add_unstable(keywords)))
+
+    return result
 
 
 def format_herds(herds):
-	"""Format herd information for display."""
+    """Format herd information for display."""
 
-	result = []
-	for herd in herds:
-		herdstr = ''
-		email = "(%s)" % herd[1] if herd[1] else ''
-		herdstr = herd[0]
-		if CONFIG['verbose']:
-			herdstr += " %s" % (email,)
-		result.append(herdstr)
+    result = []
+    for herd in herds:
+        herdstr = ""
+        email = "(%s)" % herd[1] if herd[1] else ""
+        herdstr = herd[0]
+        if CONFIG["verbose"]:
+            herdstr += " %s" % (email,)
+        result.append(herdstr)
 
-	return result
+    return result
 
 
 def format_maintainers(maints):
-	"""Format maintainer information for display."""
+    """Format maintainer information for display."""
 
-	result = []
-	for maint in maints:
-		maintstr = maint.email or ''
-		if CONFIG['verbose']:
-			maintstr += " (%s)" % (maint.name,) if maint.name else ''
-			maintstr += " - %s" % (maint.restrict,) if maint.restrict else ''
-			maintstr += "\n%s" % (maint.description,) if maint.description else ''
-		result.append(maintstr)
+    result = []
+    for maint in maints:
+        maintstr = maint.email or ""
+        if CONFIG["verbose"]:
+            maintstr += " (%s)" % (maint.name,) if maint.name else ""
+            maintstr += " - %s" % (maint.restrict,) if maint.restrict else ""
+            maintstr += "\n%s" % (maint.description,) if maint.description else ""
+        result.append(maintstr)
 
-	return result
+    return result
 
 
 def format_upstream(upstream):
-	"""Format upstream information for display."""
-
-	def _format_upstream_docs(docs):
-		result = []
-		for doc in docs:
-			doc_location = doc[0]
-			doc_lang = doc[1]
-			docstr = doc_location
-			if doc_lang is not None:
-				docstr += " (%s)" % (doc_lang,)
-			result.append(docstr)
-		return result
-
-	def _format_upstream_ids(ids):
-		result = []
-		for id_ in ids:
-			site = id_[0]
-			proj_id = id_[1]
-			idstr = "%s ID: %s" % (site, proj_id)
-			result.append(idstr)
-		return result
-
-	result = []
-	for up in upstream:
-		upmaints = format_maintainers(up.maintainers)
-		for upmaint in upmaints:
-			result.append(format_line(upmaint, "Maintainer:  ", " " * 13))
-
-		for upchange in up.changelogs:
-			result.append(format_line(upchange, "ChangeLog:   ", " " * 13))
-
-		updocs = _format_upstream_docs(up.docs)
-		for updoc in updocs:
-			result.append(format_line(updoc, "Docs:       ", " " * 13))
-
-		for upbug in up.bugtrackers:
-			result.append(format_line(upbug, "Bugs-to:     ", " " * 13))
-
-		upids = _format_upstream_ids(up.remoteids)
-		for upid in upids:
-			result.append(format_line(upid, "Remote-ID:   ", " " * 13))
-
-	return result
+    """Format upstream information for display."""
+
+    def _format_upstream_docs(docs):
+        result = []
+        for doc in docs:
+            doc_location = doc[0]
+            doc_lang = doc[1]
+            docstr = doc_location
+            if doc_lang is not None:
+                docstr += " (%s)" % (doc_lang,)
+            result.append(docstr)
+        return result
+
+    def _format_upstream_ids(ids):
+        result = []
+        for id_ in ids:
+            site = id_[0]
+            proj_id = id_[1]
+            idstr = "%s ID: %s" % (site, proj_id)
+            result.append(idstr)
+        return result
+
+    result = []
+    for up in upstream:
+        upmaints = format_maintainers(up.maintainers)
+        for upmaint in upmaints:
+            result.append(format_line(upmaint, "Maintainer:  ", " " * 13))
+
+        for upchange in up.changelogs:
+            result.append(format_line(upchange, "ChangeLog:   ", " " * 13))
+
+        updocs = _format_upstream_docs(up.docs)
+        for updoc in updocs:
+            result.append(format_line(updoc, "Docs:       ", " " * 13))
+
+        for upbug in up.bugtrackers:
+            result.append(format_line(upbug, "Bugs-to:     ", " " * 13))
+
+        upids = _format_upstream_ids(up.remoteids)
+        for upid in upids:
+            result.append(format_line(upid, "Remote-ID:   ", " " * 13))
+
+    return result
 
 
 def format_useflags(useflags):
-	"""Format USE flag information for display."""
+    """Format USE flag information for display."""
 
-	result = []
-	for flag in useflags:
-		result.append(pp.useflag(flag.name))
-		result.append(flag.description)
-		result.append("")
+    result = []
+    for flag in useflags:
+        result.append(pp.useflag(flag.name))
+        result.append(flag.description)
+        result.append("")
 
-	return result
+    return result
 
 
 def format_keywords(keywords):
-	"""Sort and colorize keywords for display."""
+    """Sort and colorize keywords for display."""
 
-	result = []
+    result = []
 
-	for kw in sorted(keywords, key=Keyword):
-		if kw.startswith('-'):
-			# arch masked
-			kw = pp.keyword(kw, stable=False, hard_masked=True)
-		elif kw.startswith('~'):
-			# keyword masked
-			kw = pp.keyword(kw, stable=False, hard_masked=False)
-		else:
-			# stable
-			kw = pp.keyword(kw, stable=True, hard_masked=False)
-		result.append(kw)
+    for kw in sorted(keywords, key=Keyword):
+        if kw.startswith("-"):
+            # arch masked
+            kw = pp.keyword(kw, stable=False, hard_masked=True)
+        elif kw.startswith("~"):
+            # keyword masked
+            kw = pp.keyword(kw, stable=False, hard_masked=False)
+        else:
+            # stable
+            kw = pp.keyword(kw, stable=True, hard_masked=False)
+        result.append(kw)
 
-	return ' '.join(result)
+    return " ".join(result)
 
 
 def format_keywords_line(pkg, fmtd_keywords, slot, verstr_len):
-	"""Format the entire keywords line for display."""
+    """Format the entire keywords line for display."""
 
-	ver = pkg.fullversion
-	result = "%s:%s: %s" % (ver, pp.slot(slot), fmtd_keywords)
-	if CONFIG['verbose'] and fmtd_keywords:
-		result = format_line(fmtd_keywords, "%s:%s: " % (ver, pp.slot(slot)),
-			" " * (verstr_len + 2))
+    ver = pkg.fullversion
+    result = "%s:%s: %s" % (ver, pp.slot(slot), fmtd_keywords)
+    if CONFIG["verbose"] and fmtd_keywords:
+        result = format_line(
+            fmtd_keywords, "%s:%s: " % (ver, pp.slot(slot)), " " * (verstr_len + 2)
+        )
 
-	return result
+    return result
 
 
 def format_stablereq_line(pkg, fmtd_ccs, slot):
-	"""Format the entire stablereq line for display (no indented linewrapping)
-	"""
-	return "%s:%s: %s" % (pkg.fullversion, pp.slot(slot), fmtd_ccs)
+    """Format the entire stablereq line for display (no indented linewrapping)"""
+    return "%s:%s: %s" % (pkg.fullversion, pp.slot(slot), fmtd_ccs)
 
 
 def format_homepage(homepage):
-	"""format the homepage(s) entries for dispaly"""
-	result = []
-	for page in homepage.split():
-		result.append(format_line(page, "Homepage:    ", " " * 13))
-	return result
+    """format the homepage(s) entries for dispaly"""
+    result = []
+    for page in homepage.split():
+        result.append(format_line(page, "Homepage:    ", " " * 13))
+    return result
 
 
 def call_format_functions(best_match, matches):
-	"""Call information gathering functions and display the results."""
-
-	if CONFIG['verbose']:
-		repo = best_match.repo_name()
-		pp.uprint(" * %s [%s]" % (pp.cpv(best_match.cp), pp.section(repo)))
-
-	got_opts = False
-	if any(QUERY_OPTS.values()):
-		# Specific information requested, less formatting
-		got_opts = True
-
-	if QUERY_OPTS["herd"] or not got_opts:
-		herds = best_match.metadata.herds(include_email=True)
-		if any(not h[0] for h in herds):
-			print(pp.warn("The packages metadata.xml has an empty <herd> tag"),
-				file = sys.stderr)
-			herds = [x for x in herds if x[0]]
-		herds = format_herds(herds)
-		if QUERY_OPTS["herd"]:
-			print_sequence(format_list(herds))
-		else:
-			for herd in herds:
-				pp.uprint(format_line(herd, "Herd:        ", " " * 13))
-
-	if QUERY_OPTS["maintainer"] or not got_opts:
-		maints = format_maintainers(best_match.metadata.maintainers())
-		if QUERY_OPTS["maintainer"]:
-			print_sequence(format_list(maints))
-		else:
-			if not maints:
-				pp.uprint(format_line([], "Maintainer:  ", " " * 13))
-			else:
-				for maint in maints:
-					pp.uprint(format_line(maint, "Maintainer:  ", " " * 13))
-
-	if QUERY_OPTS["upstream"] or not got_opts:
-		upstream = format_upstream(best_match.metadata.upstream())
-		homepage = format_homepage(best_match.environment("HOMEPAGE"))
-		if QUERY_OPTS["upstream"]:
-			upstream = format_list(upstream)
-		else:
-			upstream = format_list(upstream, "Upstream:    ", " " * 13)
-		print_sequence(upstream)
-		print_sequence(homepage)
-
-	if not got_opts:
-		pkg_loc = best_match.package_path()
-		pp.uprint(format_line(pkg_loc, "Location:    ", " " * 13))
-
-	if QUERY_OPTS["keywords"] or not got_opts:
-		# Get {<Package 'dev-libs/glib-2.20.5'>: [u'ia64', u'm68k', ...], ...}
-		keyword_map = filter_keywords(matches)
-
-		for match in matches:
-			slot = match.environment('SLOT')
-			verstr_len = len(match.fullversion) + len(slot)
-			fmtd_keywords = format_keywords(keyword_map[match])
-			keywords_line = format_keywords_line(
-				match, fmtd_keywords, slot, verstr_len
-			)
-			if QUERY_OPTS["keywords"]:
-				pp.uprint(keywords_line)
-			else:
-				indent = " " * (16 + verstr_len)
-				pp.uprint(format_line(keywords_line, "Keywords:    ", indent))
-
-	if QUERY_OPTS["description"]:
-		desc = best_match.metadata.descriptions()
-		print_sequence(format_list(desc))
-
-	if QUERY_OPTS["useflags"]:
-		useflags = format_useflags(best_match.metadata.use())
-		print_sequence(format_list(useflags))
-
-	if QUERY_OPTS["license"] or not got_opts:
-		_license = best_match.environment(["LICENSE"])
-		if QUERY_OPTS["license"]:
-			_license = format_list(_license)
-		else:
-			_license = format_list(_license, "License:     ", " " * 13)
-		print_sequence(_license)
-
-	if QUERY_OPTS["stablereq"]:
-		# Get {<Package 'dev-libs/glib-2.20.5'>: [u'ia64', u'm68k', ...], ...}
-		stablereq_map = stablereq(matches)
-		for match in matches:
-			slot = match.environment('SLOT')
-			verstr_len = len(match.fullversion) + len(slot)
-			fmtd_ccs = ','.join(sorted(stablereq_map[match]))
-			stablereq_line = format_stablereq_line(
-				match, fmtd_ccs, slot
-			)
-			#print("STABLEREQ:", )
-			pp.uprint(stablereq_line)
-
-	if QUERY_OPTS["xml"]:
-		print_file(os.path.join(best_match.package_path(), 'metadata.xml'))
+    """Call information gathering functions and display the results."""
+
+    if CONFIG["verbose"]:
+        repo = best_match.repo_name()
+        pp.uprint(" * %s [%s]" % (pp.cpv(best_match.cp), pp.section(repo)))
+
+    got_opts = False
+    if any(QUERY_OPTS.values()):
+        # Specific information requested, less formatting
+        got_opts = True
+
+    if QUERY_OPTS["herd"] or not got_opts:
+        herds = best_match.metadata.herds(include_email=True)
+        if any(not h[0] for h in herds):
+            print(
+                pp.warn("The packages metadata.xml has an empty <herd> tag"),
+                file=sys.stderr,
+            )
+            herds = [x for x in herds if x[0]]
+        herds = format_herds(herds)
+        if QUERY_OPTS["herd"]:
+            print_sequence(format_list(herds))
+        else:
+            for herd in herds:
+                pp.uprint(format_line(herd, "Herd:        ", " " * 13))
+
+    if QUERY_OPTS["maintainer"] or not got_opts:
+        maints = format_maintainers(best_match.metadata.maintainers())
+        if QUERY_OPTS["maintainer"]:
+            print_sequence(format_list(maints))
+        else:
+            if not maints:
+                pp.uprint(format_line([], "Maintainer:  ", " " * 13))
+            else:
+                for maint in maints:
+                    pp.uprint(format_line(maint, "Maintainer:  ", " " * 13))
+
+    if QUERY_OPTS["upstream"] or not got_opts:
+        upstream = format_upstream(best_match.metadata.upstream())
+        homepage = format_homepage(best_match.environment("HOMEPAGE"))
+        if QUERY_OPTS["upstream"]:
+            upstream = format_list(upstream)
+        else:
+            upstream = format_list(upstream, "Upstream:    ", " " * 13)
+        print_sequence(upstream)
+        print_sequence(homepage)
+
+    if not got_opts:
+        pkg_loc = best_match.package_path()
+        pp.uprint(format_line(pkg_loc, "Location:    ", " " * 13))
+
+    if QUERY_OPTS["keywords"] or not got_opts:
+        # Get {<Package 'dev-libs/glib-2.20.5'>: [u'ia64', u'm68k', ...], ...}
+        keyword_map = filter_keywords(matches)
+
+        for match in matches:
+            slot = match.environment("SLOT")
+            verstr_len = len(match.fullversion) + len(slot)
+            fmtd_keywords = format_keywords(keyword_map[match])
+            keywords_line = format_keywords_line(match, fmtd_keywords, slot, verstr_len)
+            if QUERY_OPTS["keywords"]:
+                pp.uprint(keywords_line)
+            else:
+                indent = " " * (16 + verstr_len)
+                pp.uprint(format_line(keywords_line, "Keywords:    ", indent))
+
+    if QUERY_OPTS["description"]:
+        desc = best_match.metadata.descriptions()
+        print_sequence(format_list(desc))
+
+    if QUERY_OPTS["useflags"]:
+        useflags = format_useflags(best_match.metadata.use())
+        print_sequence(format_list(useflags))
+
+    if QUERY_OPTS["license"] or not got_opts:
+        _license = best_match.environment(["LICENSE"])
+        if QUERY_OPTS["license"]:
+            _license = format_list(_license)
+        else:
+            _license = format_list(_license, "License:     ", " " * 13)
+        print_sequence(_license)
+
+    if QUERY_OPTS["stablereq"]:
+        # Get {<Package 'dev-libs/glib-2.20.5'>: [u'ia64', u'm68k', ...], ...}
+        stablereq_map = stablereq(matches)
+        for match in matches:
+            slot = match.environment("SLOT")
+            verstr_len = len(match.fullversion) + len(slot)
+            fmtd_ccs = ",".join(sorted(stablereq_map[match]))
+            stablereq_line = format_stablereq_line(match, fmtd_ccs, slot)
+            # print("STABLEREQ:", )
+            pp.uprint(stablereq_line)
+
+    if QUERY_OPTS["xml"]:
+        print_file(os.path.join(best_match.package_path(), "metadata.xml"))
 
 
 def format_line(line, first="", subsequent="", force_quiet=False):
-	"""Wrap a string at word boundaries and optionally indent the first line
-	and/or subsequent lines with custom strings.
-
-	Preserve newlines if the longest line is not longer than
-	CONFIG['termWidth']. To force the preservation of newlines and indents,
-	split the string into a list and feed it to format_line via format_list.
-
-	@see: format_list()
-	@type line: string
-	@param line: text to format
-	@type first: string
-	@param first: text to prepend to the first line
-	@type subsequent: string
-	@param subsequent: text to prepend to subsequent lines
-	@type force_quiet: boolean
-	@rtype: string
-	@return: A wrapped line
-	"""
-
-	if line:
-		line = line.expandtabs().strip("\n").splitlines()
-	else:
-		if force_quiet:
-			return
-		else:
-			return first + "None specified"
-
-	if len(first) > len(subsequent):
-		wider_indent = first
-	else:
-		wider_indent = subsequent
-
-	widest_line_len = len(max(line, key=len)) + len(wider_indent)
-
-	if widest_line_len > CONFIG['termWidth']:
-		twrap = TextWrapper(width=CONFIG['termWidth'], expand_tabs=False,
-			initial_indent=first, subsequent_indent=subsequent)
-		line = " ".join(line)
-		line = re.sub(r"\s+", " ", line)
-		line = line.lstrip()
-		result = twrap.fill(line)
-	else:
-		# line will fit inside CONFIG['termWidth'], so preserve whitespace and
-		# newlines
-		line[0] = first + line[0]          # Avoid two newlines if len == 1
-
-		if len(line) > 1:
-			line[0] = line[0] + "\n"
-			for i in range(1, (len(line[1:-1]) + 1)):
-				line[i] = subsequent + line[i] + "\n"
-			line[-1] = subsequent + line[-1]  # Avoid two newlines on last line
-
-		if line[-1].isspace():
-			del line[-1]                # Avoid trailing blank lines
-
-		result = "".join(line)
-
-	return result
+    """Wrap a string at word boundaries and optionally indent the first line
+    and/or subsequent lines with custom strings.
+
+    Preserve newlines if the longest line is not longer than
+    CONFIG['termWidth']. To force the preservation of newlines and indents,
+    split the string into a list and feed it to format_line via format_list.
+
+    @see: format_list()
+    @type line: string
+    @param line: text to format
+    @type first: string
+    @param first: text to prepend to the first line
+    @type subsequent: string
+    @param subsequent: text to prepend to subsequent lines
+    @type force_quiet: boolean
+    @rtype: string
+    @return: A wrapped line
+    """
+
+    if line:
+        line = line.expandtabs().strip("\n").splitlines()
+    else:
+        if force_quiet:
+            return
+        else:
+            return first + "None specified"
+
+    if len(first) > len(subsequent):
+        wider_indent = first
+    else:
+        wider_indent = subsequent
+
+    widest_line_len = len(max(line, key=len)) + len(wider_indent)
+
+    if widest_line_len > CONFIG["termWidth"]:
+        twrap = TextWrapper(
+            width=CONFIG["termWidth"],
+            expand_tabs=False,
+            initial_indent=first,
+            subsequent_indent=subsequent,
+        )
+        line = " ".join(line)
+        line = re.sub(r"\s+", " ", line)
+        line = line.lstrip()
+        result = twrap.fill(line)
+    else:
+        # line will fit inside CONFIG['termWidth'], so preserve whitespace and
+        # newlines
+        line[0] = first + line[0]  # Avoid two newlines if len == 1
+
+        if len(line) > 1:
+            line[0] = line[0] + "\n"
+            for i in range(1, (len(line[1:-1]) + 1)):
+                line[i] = subsequent + line[i] + "\n"
+            line[-1] = subsequent + line[-1]  # Avoid two newlines on last line
+
+        if line[-1].isspace():
+            del line[-1]  # Avoid trailing blank lines
+
+        result = "".join(line)
+
+    return result
 
 
 def format_list(lst, first="", subsequent="", force_quiet=False):
-	"""Feed elements of a list to format_line().
-
-	@see: format_line()
-	@type lst: list
-	@param lst: list to format
-	@type first: string
-	@param first: text to prepend to the first line
-	@type subsequent: string
-	@param subsequent: text to prepend to subsequent lines
-	@rtype: list
-	@return: list with element text wrapped at CONFIG['termWidth']
-	"""
-
-	result = []
-	if lst:
-		# Format the first line
-		line = format_line(lst[0], first, subsequent, force_quiet)
-		result.append(line)
-		# Format subsequent lines
-		for elem in lst[1:]:
-			if elem:
-				result.append(format_line(elem, subsequent, subsequent,
-					force_quiet))
-			else:
-				# We don't want to send a blank line to format_line()
-				result.append("")
-	else:
-		if CONFIG['verbose']:
-			if force_quiet:
-				result = None
-			else:
-				# Send empty list, we'll get back first + `None specified'
-				result.append(format_line(lst, first, subsequent))
-
-	return result
+    """Feed elements of a list to format_line().
+
+    @see: format_line()
+    @type lst: list
+    @param lst: list to format
+    @type first: string
+    @param first: text to prepend to the first line
+    @type subsequent: string
+    @param subsequent: text to prepend to subsequent lines
+    @rtype: list
+    @return: list with element text wrapped at CONFIG['termWidth']
+    """
+
+    result = []
+    if lst:
+        # Format the first line
+        line = format_line(lst[0], first, subsequent, force_quiet)
+        result.append(line)
+        # Format subsequent lines
+        for elem in lst[1:]:
+            if elem:
+                result.append(format_line(elem, subsequent, subsequent, force_quiet))
+            else:
+                # We don't want to send a blank line to format_line()
+                result.append("")
+    else:
+        if CONFIG["verbose"]:
+            if force_quiet:
+                result = None
+            else:
+                # Send empty list, we'll get back first + `None specified'
+                result.append(format_line(lst, first, subsequent))
+
+    return result
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-d', '--description'):
-			QUERY_OPTS["description"] = True
-		elif opt in ('-H', '--herd'):
-			QUERY_OPTS["herd"] = True
-		elif opt in ('-l', '--license'):
-			QUERY_OPTS["license"] = True
-		elif opt in ('-m', '--maintainer'):
-			QUERY_OPTS["maintainer"] = True
-		elif opt in ('-k', '--keywords'):
-			QUERY_OPTS["keywords"] = True
-		elif opt in ('-S', '--stablereq'):
-			QUERY_OPTS["stablereq"] = True
-		elif opt in ('-u', '--useflags'):
-			QUERY_OPTS["useflags"] = True
-		elif opt in ('-U', '--upstream'):
-			QUERY_OPTS["upstream"] = True
-		elif opt in ('-x', '--xml'):
-			QUERY_OPTS["xml"] = True
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-d", "--description"):
+            QUERY_OPTS["description"] = True
+        elif opt in ("-H", "--herd"):
+            QUERY_OPTS["herd"] = True
+        elif opt in ("-l", "--license"):
+            QUERY_OPTS["license"] = True
+        elif opt in ("-m", "--maintainer"):
+            QUERY_OPTS["maintainer"] = True
+        elif opt in ("-k", "--keywords"):
+            QUERY_OPTS["keywords"] = True
+        elif opt in ("-S", "--stablereq"):
+            QUERY_OPTS["stablereq"] = True
+        elif opt in ("-u", "--useflags"):
+            QUERY_OPTS["useflags"] = True
+        elif opt in ("-U", "--upstream"):
+            QUERY_OPTS["upstream"] = True
+        elif opt in ("-x", "--xml"):
+            QUERY_OPTS["xml"] = True
 
 
 def main(input_args):
-	"""Parse input and run the program."""
-
-	short_opts = "hdHklmrSuUx"
-	long_opts = ('help', 'description', 'herd', 'keywords', 'license',
-		'maintainer', 'reverse', 'stablereq', 'useflags', 'upstream', 'xml')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	# Find queries' Portage directory and throw error if invalid
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	first_run = True
-	for query in (Query(x) for x in queries):
-		best_match = query.find_best()
-		matches = query.find(include_masked=True)
-		if best_match is None or not matches:
-			raise errors.GentoolkitNoMatches(query)
-
-		if best_match.metadata is None:
-			print(pp.warn("Package {0} is missing "
-				"metadata.xml".format(best_match.cpv)),
-				file = sys.stderr)
-			continue
-
-		if not first_run:
-			print()
-
-		matches.sort()
-		matches.sort(reverse=any(name in ('-r', '--reverse')
-		    for name, opt in module_opts))
-		call_format_functions(best_match, matches)
-
-		first_run = False
+    """Parse input and run the program."""
+
+    short_opts = "hdHklmrSuUx"
+    long_opts = (
+        "help",
+        "description",
+        "herd",
+        "keywords",
+        "license",
+        "maintainer",
+        "reverse",
+        "stablereq",
+        "useflags",
+        "upstream",
+        "xml",
+    )
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    # Find queries' Portage directory and throw error if invalid
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    first_run = True
+    for query in (Query(x) for x in queries):
+        best_match = query.find_best()
+        matches = query.find(include_masked=True)
+        if best_match is None or not matches:
+            raise errors.GentoolkitNoMatches(query)
+
+        if best_match.metadata is None:
+            print(
+                pp.warn(
+                    "Package {0} is missing " "metadata.xml".format(best_match.cpv)
+                ),
+                file=sys.stderr,
+            )
+            continue
+
+        if not first_run:
+            print()
+
+        matches.sort()
+        matches.sort(
+            reverse=any(name in ("-r", "--reverse") for name, opt in module_opts)
+        )
+        call_format_functions(best_match, matches)
+
+        first_run = False
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/size.py b/pym/gentoolkit/equery/size.py
index cb570ba..068e701 100644
--- a/pym/gentoolkit/equery/size.py
+++ b/pym/gentoolkit/equery/size.py
@@ -4,7 +4,7 @@
 
 """Print total size of files contained in a given package"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -22,171 +22,176 @@ from gentoolkit.query import Query
 # =======
 
 QUERY_OPTS = {
-	"in_installed": True,
-	"in_porttree": False,
-	"in_overlay": False,
-	"include_masked": True,
-	"is_regex": False,
-	"show_progress": False,
-	"size_in_bytes": False
+    "in_installed": True,
+    "in_porttree": False,
+    "in_overlay": False,
+    "include_masked": True,
+    "is_regex": False,
+    "show_progress": False,
+    "size_in_bytes": False,
 }
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-
-	# Deprecation warning added by djanderson, 12/2008
-	depwarning = (
-		"Default action for this module has changed in Gentoolkit 0.3.",
-		"Use globbing to simulate the old behavior (see man equery).",
-		"Use '*' to check all installed packages.",
-		"Use 'foo-bar/*' to filter by category."
-	)
-	for line in depwarning:
-		sys.stderr.write(pp.warn(line))
-	print()
-
-	print(mod_usage(mod_name="size"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -b, --bytes", "report size in bytes"),
-		(" -f, --full-regex", "query is a regular expression")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+
+    # Deprecation warning added by djanderson, 12/2008
+    depwarning = (
+        "Default action for this module has changed in Gentoolkit 0.3.",
+        "Use globbing to simulate the old behavior (see man equery).",
+        "Use '*' to check all installed packages.",
+        "Use 'foo-bar/*' to filter by category.",
+    )
+    for line in depwarning:
+        sys.stderr.write(pp.warn(line))
+    print()
+
+    print(mod_usage(mod_name="size"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -b, --bytes", "report size in bytes"),
+                (" -f, --full-regex", "query is a regular expression"),
+            )
+        )
+    )
 
 
 def display_size(match_set):
-	"""Display the total size of all accessible files owned by packages.
+    """Display the total size of all accessible files owned by packages.
 
-	@type match_set: list
-	@param match_set: package cat/pkg-ver strings
-	"""
+    @type match_set: list
+    @param match_set: package cat/pkg-ver strings
+    """
 
-	for pkg in match_set:
-		size, files, uncounted = pkg.size()
+    for pkg in match_set:
+        size, files, uncounted = pkg.size()
 
-		if CONFIG['verbose']:
-			pp.uprint(" * %s" % pp.cpv(str(pkg.cpv)))
-			print("Total files : %s".rjust(25) % pp.number(str(files)))
+        if CONFIG["verbose"]:
+            pp.uprint(" * %s" % pp.cpv(str(pkg.cpv)))
+            print("Total files : %s".rjust(25) % pp.number(str(files)))
 
-			if uncounted:
-				print(("Inaccessible files : %s".rjust(25) %
-					pp.number(str(uncounted))))
+            if uncounted:
+                print(("Inaccessible files : %s".rjust(25) % pp.number(str(uncounted))))
 
-			if QUERY_OPTS["size_in_bytes"]:
-				size_str = pp.number(str(size))
-			else:
-				size_str = "%s %s" % format_bytes(size)
+            if QUERY_OPTS["size_in_bytes"]:
+                size_str = pp.number(str(size))
+            else:
+                size_str = "%s %s" % format_bytes(size)
 
-			print("Total size  : %s".rjust(25) % size_str)
-		else:
-			info = "%s: total(%d), inaccessible(%d), size(%s)"
-			pp.uprint(info % (str(pkg.cpv), files, uncounted, size))
+            print("Total size  : %s".rjust(25) % size_str)
+        else:
+            info = "%s: total(%d), inaccessible(%d), size(%s)"
+            pp.uprint(info % (str(pkg.cpv), files, uncounted, size))
 
 
 def format_bytes(bytes_, precision=2):
-	"""Format bytes into human-readable format (IEC naming standard).
+    """Format bytes into human-readable format (IEC naming standard).
 
-	@see: http://mail.python.org/pipermail/python-list/2008-August/503423.html
-	@rtype: tuple
-	@return: (str(num), str(label))
-	"""
+    @see: http://mail.python.org/pipermail/python-list/2008-August/503423.html
+    @rtype: tuple
+    @return: (str(num), str(label))
+    """
 
-	labels = (
-		(1<<40, 'TiB'),
-		(1<<30, 'GiB'),
-		(1<<20, 'MiB'),
-		(1<<10, 'KiB'),
-		(1, 'bytes')
-	)
+    labels = (
+        (1 << 40, "TiB"),
+        (1 << 30, "GiB"),
+        (1 << 20, "MiB"),
+        (1 << 10, "KiB"),
+        (1, "bytes"),
+    )
 
-	if bytes_ == 0:
-		return (pp.number('0'), 'bytes')
-	elif bytes_ == 1:
-		return (pp.number('1'), 'byte')
+    if bytes_ == 0:
+        return (pp.number("0"), "bytes")
+    elif bytes_ == 1:
+        return (pp.number("1"), "byte")
 
-	for factor, label in labels:
-		if not bytes_ >= factor:
-			continue
+    for factor, label in labels:
+        if not bytes_ >= factor:
+            continue
 
-		float_split = str(bytes_/float(factor)).split('.')
-		integer = float_split[0]
-		decimal = float_split[1]
-		if int(decimal[0:precision]):
-			float_string = '.'.join([integer, decimal[0:precision]])
-		else:
-			float_string = integer
+        float_split = str(bytes_ / float(factor)).split(".")
+        integer = float_split[0]
+        decimal = float_split[1]
+        if int(decimal[0:precision]):
+            float_string = ".".join([integer, decimal[0:precision]])
+        else:
+            float_string = integer
 
-		return (pp.number(float_string), label)
+        return (pp.number(float_string), label)
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
-
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-b', '--bytes'):
-			QUERY_OPTS["size_in_bytes"] = True
-		elif opt in ('-e', '--exact-name'):
-			sys.stderr.write(pp.warn("-e, --exact-name is now default."))
-			warning = pp.warn("Use globbing to simulate the old behavior.")
-			sys.stderr.write(warning)
-			print()
-		elif opt in ('-f', '--full-regex'):
-			QUERY_OPTS['is_regex'] = True
+    """Parse module options and update QUERY_OPTS"""
+
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-b", "--bytes"):
+            QUERY_OPTS["size_in_bytes"] = True
+        elif opt in ("-e", "--exact-name"):
+            sys.stderr.write(pp.warn("-e, --exact-name is now default."))
+            warning = pp.warn("Use globbing to simulate the old behavior.")
+            sys.stderr.write(warning)
+            print()
+        elif opt in ("-f", "--full-regex"):
+            QUERY_OPTS["is_regex"] = True
 
 
 def main(input_args):
-	"""Parse input and run the program"""
+    """Parse input and run the program"""
+
+    # -e, --exact-name is no longer needed. Kept for compatibility.
+    # 04/09 djanderson
+    short_opts = "hbfe"
+    long_opts = ("help", "bytes", "full-regex", "exact-name")
 
-	# -e, --exact-name is no longer needed. Kept for compatibility.
-	# 04/09 djanderson
-	short_opts = "hbfe"
-	long_opts = ('help', 'bytes', 'full-regex', 'exact-name')
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
 
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
+    parse_module_options(module_opts)
 
-	parse_module_options(module_opts)
+    if not queries:
+        print_help()
+        sys.exit(2)
 
-	if not queries:
-		print_help()
-		sys.exit(2)
+    first_run = True
+    for query in (Query(x, QUERY_OPTS["is_regex"]) for x in queries):
+        if not first_run:
+            print()
 
-	first_run = True
-	for query in (Query(x, QUERY_OPTS['is_regex']) for x in queries):
-		if not first_run:
-			print()
+        matches = query.smart_find(**QUERY_OPTS)
 
-		matches = query.smart_find(**QUERY_OPTS)
+        if not matches:
+            sys.stderr.write(pp.error("No package found matching %s" % query))
 
-		if not matches:
-			sys.stderr.write(pp.error("No package found matching %s" % query))
+        matches.sort()
 
-		matches.sort()
+        display_size(matches)
 
-		display_size(matches)
+        first_run = False
 
-		first_run = False
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/uses.py b/pym/gentoolkit/equery/uses.py
index dfb6f31..7c90b90 100644
--- a/pym/gentoolkit/equery/uses.py
+++ b/pym/gentoolkit/equery/uses.py
@@ -4,7 +4,7 @@
 
 """Display USE flags for a given package"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -31,288 +31,294 @@ from gentoolkit.flag import get_flags, reduce_flags
 # Globals
 # =======
 
-QUERY_OPTS = {"all_versions" : False, "ignore_l10n" : False}
+QUERY_OPTS = {"all_versions": False, "ignore_l10n": False}
 
 # =========
 # Functions
 # =========
 
-def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
 
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name=__name__.split('.')[-1]))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -a, --all", "include all package versions"),
-		(" -i, --ignore-l10n", "don't show l10n USE flags")
-	)))
+def print_help(with_description=True):
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name=__name__.split(".")[-1]))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -a, --all", "include all package versions"),
+                (" -i, --ignore-l10n", "don't show l10n USE flags"),
+            )
+        )
+    )
 
 
 def display_useflags(output):
-	"""Print USE flag descriptions and statuses.
-
-	@type output: list
-	@param output: [(inuse, inused, flag, desc, restrict), ...]
-		inuse (int) = 0 or 1; if 1, flag is set in make.conf
-		inused (int) = 0 or 1; if 1, package is installed with flag enabled
-		flag (str) = the name of the USE flag
-		desc (str) = the flag's description
-		restrict (str) = corresponds to the text of restrict in metadata
-	"""
-
-	maxflag_len = len(max([t[2] for t in output], key=len))
-
-	twrap = TextWrapper()
-	twrap.width = CONFIG['termWidth']
-	twrap.subsequent_indent = " " * (maxflag_len + 8)
-
-	markers = ("-", "+")
-	color = (
-		partial(pp.useflag, enabled=False), partial(pp.useflag, enabled=True)
-	)
-	for in_makeconf, in_installed, flag, desc, restrict in output:
-		if CONFIG['verbose']:
-			flag_name = ""
-			if in_makeconf != in_installed:
-				flag_name += pp.emph(" %s %s" %
-					(markers[in_makeconf], markers[in_installed]))
-			else:
-				flag_name += (" %s %s" %
-					(markers[in_makeconf], markers[in_installed]))
-
-			flag_name += " " + color[in_makeconf](flag.ljust(maxflag_len))
-			flag_name += " : "
-
-			# Strip initial whitespace at the start of the description
-			# Bug 432530
-			if desc:
-				desc = desc.lstrip()
-
-			# print description
-			if restrict:
-				restrict = "(%s %s)" % (pp.emph("Restricted to"),
-					pp.cpv(restrict))
-				twrap.initial_indent = flag_name
-				pp.uprint(twrap.fill(restrict))
-				if desc:
-					twrap.initial_indent = twrap.subsequent_indent
-					pp.uprint(twrap.fill(desc))
-				else:
-					print(" : <unknown>")
-			else:
-				if desc:
-					twrap.initial_indent = flag_name
-					desc = twrap.fill(desc)
-					pp.uprint(desc)
-				else:
-					twrap.initial_indent = flag_name
-					print(twrap.fill("<unknown>"))
-		else:
-			pp.uprint(markers[in_makeconf] + flag)
+    """Print USE flag descriptions and statuses.
+
+    @type output: list
+    @param output: [(inuse, inused, flag, desc, restrict), ...]
+            inuse (int) = 0 or 1; if 1, flag is set in make.conf
+            inused (int) = 0 or 1; if 1, package is installed with flag enabled
+            flag (str) = the name of the USE flag
+            desc (str) = the flag's description
+            restrict (str) = corresponds to the text of restrict in metadata
+    """
+
+    maxflag_len = len(max([t[2] for t in output], key=len))
+
+    twrap = TextWrapper()
+    twrap.width = CONFIG["termWidth"]
+    twrap.subsequent_indent = " " * (maxflag_len + 8)
+
+    markers = ("-", "+")
+    color = (partial(pp.useflag, enabled=False), partial(pp.useflag, enabled=True))
+    for in_makeconf, in_installed, flag, desc, restrict in output:
+        if CONFIG["verbose"]:
+            flag_name = ""
+            if in_makeconf != in_installed:
+                flag_name += pp.emph(
+                    " %s %s" % (markers[in_makeconf], markers[in_installed])
+                )
+            else:
+                flag_name += " %s %s" % (markers[in_makeconf], markers[in_installed])
+
+            flag_name += " " + color[in_makeconf](flag.ljust(maxflag_len))
+            flag_name += " : "
+
+            # Strip initial whitespace at the start of the description
+            # Bug 432530
+            if desc:
+                desc = desc.lstrip()
+
+            # print description
+            if restrict:
+                restrict = "(%s %s)" % (pp.emph("Restricted to"), pp.cpv(restrict))
+                twrap.initial_indent = flag_name
+                pp.uprint(twrap.fill(restrict))
+                if desc:
+                    twrap.initial_indent = twrap.subsequent_indent
+                    pp.uprint(twrap.fill(desc))
+                else:
+                    print(" : <unknown>")
+            else:
+                if desc:
+                    twrap.initial_indent = flag_name
+                    desc = twrap.fill(desc)
+                    pp.uprint(desc)
+                else:
+                    twrap.initial_indent = flag_name
+                    print(twrap.fill("<unknown>"))
+        else:
+            pp.uprint(markers[in_makeconf] + flag)
 
 
 def get_global_useflags():
-	"""Get global and expanded USE flag variables from
-	PORTDIR/profiles/use.desc and PORTDIR/profiles/desc/*.desc respectively.
-
-	@rtype: dict
-	@return: {'flag_name': 'flag description', ...}
-	"""
-
-	global_usedesc = {}
-	# Get global USE flag descriptions
-	try:
-		path = os.path.join(settings["PORTDIR"], 'profiles', 'use.desc')
-		with open(_unicode_encode(path, encoding=_encodings['fs']),
-				encoding=_encodings['content']) as open_file:
-			for line in open_file:
-				if line.startswith('#'):
-					continue
-				# Ex. of fields: ['syslog', 'Enables support for syslog\n']
-				fields = line.split(" - ", 1)
-				if len(fields) == 2:
-					global_usedesc[fields[0]] = fields[1].rstrip()
-	except IOError:
-		sys.stderr.write(
-			pp.warn(
-				"Could not load USE flag descriptions from %s" % pp.path(path)
-			)
-		)
-
-	del path, open_file
-	# Add USE_EXPANDED variables to usedesc hash -- Bug #238005
-	for path in glob(os.path.join(settings["PORTDIR"],
-		'profiles', 'desc', '*.desc')):
-		try:
-			with open(_unicode_encode(path, encoding=_encodings['fs']),
-					encoding=_encodings['content']) as open_file:
-				for line in open_file:
-					if line.startswith('#'):
-						continue
-					fields = [field.strip() for field in line.split(" - ", 1)]
-					if len(fields) == 2:
-						expanded_useflag = "%s_%s" % \
-							(path.split("/")[-1][0:-5], fields[0])
-						global_usedesc[expanded_useflag] = fields[1]
-		except IOError:
-			sys.stderr.write(
-				pp.warn("Could not load USE flag descriptions from %s" % path)
-			)
-
-	return global_usedesc
+    """Get global and expanded USE flag variables from
+    PORTDIR/profiles/use.desc and PORTDIR/profiles/desc/*.desc respectively.
+
+    @rtype: dict
+    @return: {'flag_name': 'flag description', ...}
+    """
+
+    global_usedesc = {}
+    # Get global USE flag descriptions
+    try:
+        path = os.path.join(settings["PORTDIR"], "profiles", "use.desc")
+        with open(
+            _unicode_encode(path, encoding=_encodings["fs"]),
+            encoding=_encodings["content"],
+        ) as open_file:
+            for line in open_file:
+                if line.startswith("#"):
+                    continue
+                # Ex. of fields: ['syslog', 'Enables support for syslog\n']
+                fields = line.split(" - ", 1)
+                if len(fields) == 2:
+                    global_usedesc[fields[0]] = fields[1].rstrip()
+    except IOError:
+        sys.stderr.write(
+            pp.warn("Could not load USE flag descriptions from %s" % pp.path(path))
+        )
+
+    del path, open_file
+    # Add USE_EXPANDED variables to usedesc hash -- Bug #238005
+    for path in glob(os.path.join(settings["PORTDIR"], "profiles", "desc", "*.desc")):
+        try:
+            with open(
+                _unicode_encode(path, encoding=_encodings["fs"]),
+                encoding=_encodings["content"],
+            ) as open_file:
+                for line in open_file:
+                    if line.startswith("#"):
+                        continue
+                    fields = [field.strip() for field in line.split(" - ", 1)]
+                    if len(fields) == 2:
+                        expanded_useflag = "%s_%s" % (
+                            path.split("/")[-1][0:-5],
+                            fields[0],
+                        )
+                        global_usedesc[expanded_useflag] = fields[1]
+        except IOError:
+            sys.stderr.write(
+                pp.warn("Could not load USE flag descriptions from %s" % path)
+            )
+
+    return global_usedesc
 
 
 def get_output_descriptions(pkg, global_usedesc):
-	"""Prepare descriptions and usage information for each USE flag."""
-
-	if pkg.metadata is None:
-		local_usedesc = []
-	else:
-		local_usedesc = pkg.metadata.use()
-
-	iuse, final_use = get_flags(pkg.cpv, final_setting=True)
-	usevar = reduce_flags(iuse)
-	usevar.sort()
-
-	if QUERY_OPTS['ignore_l10n']:
-		for a in usevar[:]:
-			#TODO: Remove linguas after transition to l10n is complete
-			if a.startswith("l10n_") or a.startswith("linguas_"):
-				usevar.remove(a)
-
-
-	if pkg.is_installed():
-		used_flags = pkg.use().split()
-	else:
-		used_flags = settings["USE"].split()
-
-	# store (inuse, inused, flag, desc, restrict)
-	output = []
-	for flag in usevar:
-		inuse = False
-		inused = False
-
-		local_use = None
-		for use in local_usedesc:
-			if use.name == flag:
-				local_use = use
-				break
-
-		try:
-			desc = local_use.description
-		except AttributeError:
-			try:
-				desc = global_usedesc[flag]
-			except KeyError:
-				desc = ""
-
-		try:
-			restrict = local_use.restrict
-			restrict = restrict if restrict is not None else ""
-		except AttributeError:
-			restrict = ""
-
-		if flag in final_use:
-			inuse = True
-		if flag in used_flags:
-			inused = True
-
-		output.append((inuse, inused, flag, desc, restrict))
-
-	return output
+    """Prepare descriptions and usage information for each USE flag."""
+
+    if pkg.metadata is None:
+        local_usedesc = []
+    else:
+        local_usedesc = pkg.metadata.use()
+
+    iuse, final_use = get_flags(pkg.cpv, final_setting=True)
+    usevar = reduce_flags(iuse)
+    usevar.sort()
+
+    if QUERY_OPTS["ignore_l10n"]:
+        for a in usevar[:]:
+            # TODO: Remove linguas after transition to l10n is complete
+            if a.startswith("l10n_") or a.startswith("linguas_"):
+                usevar.remove(a)
+
+    if pkg.is_installed():
+        used_flags = pkg.use().split()
+    else:
+        used_flags = settings["USE"].split()
+
+    # store (inuse, inused, flag, desc, restrict)
+    output = []
+    for flag in usevar:
+        inuse = False
+        inused = False
+
+        local_use = None
+        for use in local_usedesc:
+            if use.name == flag:
+                local_use = use
+                break
+
+        try:
+            desc = local_use.description
+        except AttributeError:
+            try:
+                desc = global_usedesc[flag]
+            except KeyError:
+                desc = ""
+
+        try:
+            restrict = local_use.restrict
+            restrict = restrict if restrict is not None else ""
+        except AttributeError:
+            restrict = ""
+
+        if flag in final_use:
+            inuse = True
+        if flag in used_flags:
+            inused = True
+
+        output.append((inuse, inused, flag, desc, restrict))
+
+    return output
 
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
+    """Parse module options and update QUERY_OPTS"""
 
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-a', '--all'):
-			QUERY_OPTS['all_versions'] = True
-		elif opt in ('-i', '--ignore-l10n'):
-			QUERY_OPTS['ignore_l10n'] = True
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-a", "--all"):
+            QUERY_OPTS["all_versions"] = True
+        elif opt in ("-i", "--ignore-l10n"):
+            QUERY_OPTS["ignore_l10n"] = True
 
 
 def print_legend():
-	"""Print a legend to explain the output format."""
+    """Print a legend to explain the output format."""
 
-	print("[ Legend : %s - final flag setting for installation]" % pp.emph("U"))
-	print("[        : %s - package is installed with flag     ]" % pp.emph("I"))
-	print("[ Colors : %s, %s                             ]" % (
-		pp.useflag("set", enabled=True), pp.useflag("unset", enabled=False)))
+    print("[ Legend : %s - final flag setting for installation]" % pp.emph("U"))
+    print("[        : %s - package is installed with flag     ]" % pp.emph("I"))
+    print(
+        "[ Colors : %s, %s                             ]"
+        % (pp.useflag("set", enabled=True), pp.useflag("unset", enabled=False))
+    )
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hai"
-	long_opts = ('help', 'all', 'ignore-l10n')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	#
-	# Output
-	#
-
-	first_run = True
-	legend_printed = False
-	for query in (Query(x) for x in queries):
-		if not first_run:
-			print()
-
-		if QUERY_OPTS["all_versions"]:
-			matches = query.find(include_masked=True)
-		else:
-			matches = [query.find_best()]
-
-		if not any(matches):
-			raise errors.GentoolkitNoMatches(query)
-
-		matches.sort()
-
-		global_usedesc = get_global_useflags()
-		for pkg in matches:
-
-			output = get_output_descriptions(pkg, global_usedesc)
-			if output:
-				if CONFIG['verbose']:
-					if not legend_printed:
-						print_legend()
-						legend_printed = True
-					print((" * Found these USE flags for %s:" %
-						pp.cpv(str(pkg.cpv))))
-					print(pp.emph(" U I"))
-				display_useflags(output)
-			else:
-				if CONFIG['verbose']:
-					sys.stderr.write(
-						pp.warn("No USE flags found for %s" % pp.cpv(pkg.cpv))
-					)
-
-		first_run = False
+    """Parse input and run the program"""
+
+    short_opts = "hai"
+    long_opts = ("help", "all", "ignore-l10n")
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    #
+    # Output
+    #
+
+    first_run = True
+    legend_printed = False
+    for query in (Query(x) for x in queries):
+        if not first_run:
+            print()
+
+        if QUERY_OPTS["all_versions"]:
+            matches = query.find(include_masked=True)
+        else:
+            matches = [query.find_best()]
+
+        if not any(matches):
+            raise errors.GentoolkitNoMatches(query)
+
+        matches.sort()
+
+        global_usedesc = get_global_useflags()
+        for pkg in matches:
+
+            output = get_output_descriptions(pkg, global_usedesc)
+            if output:
+                if CONFIG["verbose"]:
+                    if not legend_printed:
+                        print_legend()
+                        legend_printed = True
+                    print((" * Found these USE flags for %s:" % pp.cpv(str(pkg.cpv))))
+                    print(pp.emph(" U I"))
+                display_useflags(output)
+            else:
+                if CONFIG["verbose"]:
+                    sys.stderr.write(
+                        pp.warn("No USE flags found for %s" % pp.cpv(pkg.cpv))
+                    )
+
+        first_run = False
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/equery/which.py b/pym/gentoolkit/equery/which.py
index c7fabd7..0b726dd 100644
--- a/pym/gentoolkit/equery/which.py
+++ b/pym/gentoolkit/equery/which.py
@@ -6,7 +6,7 @@
 configuration
 """
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -28,94 +28,98 @@ from portage import _encodings, _unicode_encode
 # Globals
 # =======
 
-QUERY_OPTS = {
-	"include_masked": False,
-	"ebuild":False
-	}
+QUERY_OPTS = {"include_masked": False, "ebuild": False}
 
 # =========
 # Functions
 # =========
 
+
 def print_help(with_description=True):
-	"""Print description, usage and a detailed help message.
-
-	@type with_description: bool
-	@param with_description: if true, print module's __doc__ string
-	"""
-
-	if with_description:
-		print(__doc__.strip())
-		print()
-	print(mod_usage(mod_name="which"))
-	print()
-	print(pp.command("options"))
-	print(format_options((
-		(" -h, --help", "display this help message"),
-		(" -m, --include-masked", "return highest version ebuild available"),
-		(" -e, --ebuild", "print the ebuild")
-	)))
+    """Print description, usage and a detailed help message.
+
+    @type with_description: bool
+    @param with_description: if true, print module's __doc__ string
+    """
+
+    if with_description:
+        print(__doc__.strip())
+        print()
+    print(mod_usage(mod_name="which"))
+    print()
+    print(pp.command("options"))
+    print(
+        format_options(
+            (
+                (" -h, --help", "display this help message"),
+                (" -m, --include-masked", "return highest version ebuild available"),
+                (" -e, --ebuild", "print the ebuild"),
+            )
+        )
+    )
+
 
 def print_ebuild(ebuild_path):
-	"""Output the ebuild to std_out"""
-	with open(_unicode_encode(ebuild_path, encoding=_encodings['fs']),
-			encoding=_encodings['content']) as f:
-		lines = f.readlines()
-		print("\n\n")
-		print("".join(lines))
-		print("\n")
+    """Output the ebuild to std_out"""
+    with open(
+        _unicode_encode(ebuild_path, encoding=_encodings["fs"]),
+        encoding=_encodings["content"],
+    ) as f:
+        lines = f.readlines()
+        print("\n\n")
+        print("".join(lines))
+        print("\n")
+
 
 def parse_module_options(module_opts):
-	"""Parse module options and update QUERY_OPTS"""
+    """Parse module options and update QUERY_OPTS"""
 
-	opts = (x[0] for x in module_opts)
-	for opt in opts:
-		if opt in ('-h', '--help'):
-			print_help()
-			sys.exit(0)
-		elif opt in ('-m', '--include-masked'):
-			QUERY_OPTS['include_masked'] = True
-		elif opt in ('-e', '--ebuild'):
-			QUERY_OPTS['ebuild'] = True
+    opts = (x[0] for x in module_opts)
+    for opt in opts:
+        if opt in ("-h", "--help"):
+            print_help()
+            sys.exit(0)
+        elif opt in ("-m", "--include-masked"):
+            QUERY_OPTS["include_masked"] = True
+        elif opt in ("-e", "--ebuild"):
+            QUERY_OPTS["ebuild"] = True
 
 
 def main(input_args):
-	"""Parse input and run the program"""
-
-	short_opts = "hme"
-	long_opts = ('help', 'include-masked', 'ebuild')
-
-	try:
-		module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
-	except GetoptError as err:
-		sys.stderr.write(pp.error("Module %s" % err))
-		print()
-		print_help(with_description=False)
-		sys.exit(2)
-
-	parse_module_options(module_opts)
-
-	if not queries:
-		print_help()
-		sys.exit(2)
-
-	for query in (Query(x) for x in queries):
-		matches = query.find(
-			include_masked=QUERY_OPTS['include_masked'],
-			in_installed=False
-		)
-		if matches:
-			pkg = sorted(matches).pop()
-			ebuild_path = pkg.ebuild_path()
-			if ebuild_path:
-				pp.uprint(os.path.normpath(ebuild_path))
-				if QUERY_OPTS['ebuild']:
-					print_ebuild(ebuild_path)
-			else:
-				sys.stderr.write(
-					pp.warn("No ebuilds to satisfy %s" % pkg.cpv)
-				)
-		else:
-			raise errors.GentoolkitNoMatches(query)
+    """Parse input and run the program"""
+
+    short_opts = "hme"
+    long_opts = ("help", "include-masked", "ebuild")
+
+    try:
+        module_opts, queries = gnu_getopt(input_args, short_opts, long_opts)
+    except GetoptError as err:
+        sys.stderr.write(pp.error("Module %s" % err))
+        print()
+        print_help(with_description=False)
+        sys.exit(2)
+
+    parse_module_options(module_opts)
+
+    if not queries:
+        print_help()
+        sys.exit(2)
+
+    for query in (Query(x) for x in queries):
+        matches = query.find(
+            include_masked=QUERY_OPTS["include_masked"], in_installed=False
+        )
+        if matches:
+            pkg = sorted(matches).pop()
+            ebuild_path = pkg.ebuild_path()
+            if ebuild_path:
+                pp.uprint(os.path.normpath(ebuild_path))
+                if QUERY_OPTS["ebuild"]:
+                    print_ebuild(ebuild_path)
+            else:
+                sys.stderr.write(pp.warn("No ebuilds to satisfy %s" % pkg.cpv))
+        else:
+            raise errors.GentoolkitNoMatches(query)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/errors.py b/pym/gentoolkit/errors.py
index 15fef24..ffb79e0 100644
--- a/pym/gentoolkit/errors.py
+++ b/pym/gentoolkit/errors.py
@@ -5,153 +5,170 @@
 """Exception classes for gentoolkit"""
 
 __all__ = (
-	'GentoolkitException',
-	'GentoolkitFatalError',
-	'GentoolkitAmbiguousPackage',
-	'GentoolkitInvalidAtom',
-	'GentoolkitInvalidCategory',
-	'GentoolkitInvalidPackage',
-	'GentoolkitInvalidCPV',
-	'GentoolkitInvalidRegex',
-	'GentoolkitInvalidVersion',
-	'GentoolkitNoMatches',
-	'GentoolkitSetNotFound',
-	'GentoolkitUnknownKeyword',
-	'GentoolkitNonZeroExit'
+    "GentoolkitException",
+    "GentoolkitFatalError",
+    "GentoolkitAmbiguousPackage",
+    "GentoolkitInvalidAtom",
+    "GentoolkitInvalidCategory",
+    "GentoolkitInvalidPackage",
+    "GentoolkitInvalidCPV",
+    "GentoolkitInvalidRegex",
+    "GentoolkitInvalidVersion",
+    "GentoolkitNoMatches",
+    "GentoolkitSetNotFound",
+    "GentoolkitUnknownKeyword",
+    "GentoolkitNonZeroExit",
 )
 
 # ==========
 # Exceptions
 # ==========
 
+
 class GentoolkitException(Exception):
-	"""Base class for gentoolkit exceptions."""
-	def __init__(self, is_serious=True):
-		self.is_serious = is_serious
+    """Base class for gentoolkit exceptions."""
+
+    def __init__(self, is_serious=True):
+        self.is_serious = is_serious
 
 
 class GentoolkitFatalError(GentoolkitException):
-	"""A fatal error occurred. Usually used to catch Portage exceptions."""
-	def __init__(self, err, is_serious=True):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.err = err
+    """A fatal error occurred. Usually used to catch Portage exceptions."""
 
-	def __str__(self):
-		return "Fatal error: %s" % self.err
+    def __init__(self, err, is_serious=True):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.err = err
+
+    def __str__(self):
+        return "Fatal error: %s" % self.err
 
 
 class GentoolkitAmbiguousPackage(GentoolkitException):
-	"""Got an ambiguous package name."""
-	def __init__(self, choices, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.choices = choices
+    """Got an ambiguous package name."""
+
+    def __init__(self, choices, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.choices = choices
 
-	def __str__(self):
-		choices = '\n'.join("  %s" % x for x in self.choices)
-		return '\n'.join(("Ambiguous package name. Choose from:", choices))
+    def __str__(self):
+        choices = "\n".join("  %s" % x for x in self.choices)
+        return "\n".join(("Ambiguous package name. Choose from:", choices))
 
 
 class GentoolkitInvalidAtom(GentoolkitException):
-	"""Got a malformed package atom."""
-	def __init__(self, atom, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.atom = atom
+    """Got a malformed package atom."""
 
-	def __str__(self):
-		return "Invalid atom: '%s'" % self.atom
+    def __init__(self, atom, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.atom = atom
+
+    def __str__(self):
+        return "Invalid atom: '%s'" % self.atom
 
 
 class GentoolkitSetNotFound(GentoolkitException):
-	"""Got unknown set."""
-	def __init__(self, setname, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.setname = setname
+    """Got unknown set."""
+
+    def __init__(self, setname, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.setname = setname
 
-	def __str__(self):
-		return "Unknown set: '%s'" % self.setname
+    def __str__(self):
+        return "Unknown set: '%s'" % self.setname
 
 
 class GentoolkitInvalidCategory(GentoolkitException):
-	"""The category was not listed in portage.settings.categories."""
-	def __init__(self, category, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.category = category
+    """The category was not listed in portage.settings.categories."""
 
-	def __str__(self):
-		return "Invalid category: '%s'" % self.category
+    def __init__(self, category, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.category = category
+
+    def __str__(self):
+        return "Invalid category: '%s'" % self.category
 
 
 class GentoolkitInvalidPackage(GentoolkitException):
-	"""Got an unknown or invalid package."""
-	def __init__(self, package, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.package = package
+    """Got an unknown or invalid package."""
+
+    def __init__(self, package, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.package = package
 
-	def __str__(self):
-		return "Invalid package: '%s'" % self.package
+    def __str__(self):
+        return "Invalid package: '%s'" % self.package
 
 
 class GentoolkitInvalidCPV(GentoolkitException):
-	"""Got an invalid category/package-ver string."""
-	def __init__(self, cpv, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.cpv = cpv
+    """Got an invalid category/package-ver string."""
 
-	def __str__(self):
-		return "Invalid CPV: '%s'" % self.cpv
+    def __init__(self, cpv, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.cpv = cpv
+
+    def __str__(self):
+        return "Invalid CPV: '%s'" % self.cpv
 
 
 class GentoolkitInvalidRegex(GentoolkitException):
-	"""The regex could not be compiled."""
-	def __init__(self, regex, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.regex = regex
+    """The regex could not be compiled."""
+
+    def __init__(self, regex, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.regex = regex
 
-	def __str__(self):
-		return "Invalid regex: '%s'" % self.regex
+    def __str__(self):
+        return "Invalid regex: '%s'" % self.regex
 
 
 class GentoolkitInvalidVersion(GentoolkitException):
-	"""Got a malformed version."""
-	def __init__(self, version, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.version = version
+    """Got a malformed version."""
 
-	def __str__(self):
-		return "Malformed version: '%s'" % self.version
+    def __init__(self, version, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.version = version
+
+    def __str__(self):
+        return "Malformed version: '%s'" % self.version
 
 
 class GentoolkitNoMatches(GentoolkitException):
-	"""No packages were found matching the search query."""
-	def __init__(self, query, in_installed=False, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.query = query
-		self.in_installed = in_installed
+    """No packages were found matching the search query."""
+
+    def __init__(self, query, in_installed=False, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.query = query
+        self.in_installed = in_installed
 
-	def __str__(self):
-		inst = 'installed ' if self.in_installed else ''
-		return "No %spackages matching '%s'" % (inst, self.query)
+    def __str__(self):
+        inst = "installed " if self.in_installed else ""
+        return "No %spackages matching '%s'" % (inst, self.query)
 
 
 class GentoolkitUnknownKeyword(GentoolkitException):
-	"""No packages were found matching the search query."""
-	def __init__(self, query, keywords, use, is_serious=True):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.query = query
-		self.keywords = keywords
-		self.use = use
+    """No packages were found matching the search query."""
 
-	def __str__(self):
-		return ("Unable to determine the install keyword for:\n" +
-			"'%s', KEYWORDS = '%s'\nUSE flags = '%s'"
-			% (self.query, self.keywords, self.use))
+    def __init__(self, query, keywords, use, is_serious=True):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.query = query
+        self.keywords = keywords
+        self.use = use
+
+    def __str__(self):
+        return (
+            "Unable to determine the install keyword for:\n"
+            + "'%s', KEYWORDS = '%s'\nUSE flags = '%s'"
+            % (self.query, self.keywords, self.use)
+        )
 
 
 class GentoolkitNonZeroExit(GentoolkitException):
-	"""Used to signal, that a non-fatal, no warning error occurred.
-	   The primary use case is for not returning any data."""
-	def __init__(self, return_code=1, is_serious=False):
-		GentoolkitException.__init__(self, is_serious=is_serious)
-		self.return_code = return_code
+    """Used to signal, that a non-fatal, no warning error occurred.
+    The primary use case is for not returning any data."""
+
+    def __init__(self, return_code=1, is_serious=False):
+        GentoolkitException.__init__(self, is_serious=is_serious)
+        self.return_code = return_code
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/eshowkw/__init__.py b/pym/gentoolkit/eshowkw/__init__.py
index 707c266..ba6c076 100644
--- a/pym/gentoolkit/eshowkw/__init__.py
+++ b/pym/gentoolkit/eshowkw/__init__.py
@@ -1,8 +1,8 @@
-#	vim:fileencoding=utf-8
+# 	vim:fileencoding=utf-8
 # Copyright 2010-2016 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
-__package__ = 'gentoolkit.eshowkw'
+__package__ = "gentoolkit.eshowkw"
 __version__ = "git"
 __author__ = "Tomáš Chvátal <scarabeus@gentoo.org>"
 
@@ -20,137 +20,193 @@ from gentoolkit.eshowkw.display_pretty import display
 
 ignore_slots = False
 bold = False
-order = 'bottom'
-topper = 'versionlist'
+order = "bottom"
+topper = "versionlist"
+
 
 def process_display(package, keywords, dbapi):
 
-	portdata = keywords_content(package, keywords.keywords, dbapi, ignore_slots, order, bold, topper)
-	if topper == 'archlist':
-		header = string_rotator().rotateContent(keywords.content, keywords.length, bold)
-		extra = string_rotator().rotateContent(keywords.extra, keywords.length, bold, False)
-		# -1 : space is taken in account and appended by us
-		filler = ''.ljust(portdata.slot_length-1)
-		header = ['%s%s%s' % (x, filler, y) for x, y in zip(header, extra)]
-		content = portdata.content
-		header_length = portdata.version_length
-		content_length = keywords.length
-	else:
-		header = string_rotator().rotateContent(portdata.content, portdata.content_length, bold)
-		content = keywords.content
-		sep = [''.ljust(keywords.length) for x in range(portdata.slot_length-1)]
-		content.extend(sep)
-		content.extend(keywords.extra)
-		header_length = keywords.length
-		content_length = portdata.version_length
-	display(content, header, header_length, content_length, portdata.cp, topper)
+    portdata = keywords_content(
+        package, keywords.keywords, dbapi, ignore_slots, order, bold, topper
+    )
+    if topper == "archlist":
+        header = string_rotator().rotateContent(keywords.content, keywords.length, bold)
+        extra = string_rotator().rotateContent(
+            keywords.extra, keywords.length, bold, False
+        )
+        # -1 : space is taken in account and appended by us
+        filler = "".ljust(portdata.slot_length - 1)
+        header = ["%s%s%s" % (x, filler, y) for x, y in zip(header, extra)]
+        content = portdata.content
+        header_length = portdata.version_length
+        content_length = keywords.length
+    else:
+        header = string_rotator().rotateContent(
+            portdata.content, portdata.content_length, bold
+        )
+        content = keywords.content
+        sep = ["".ljust(keywords.length) for x in range(portdata.slot_length - 1)]
+        content.extend(sep)
+        content.extend(keywords.extra)
+        header_length = keywords.length
+        content_length = portdata.version_length
+    display(content, header, header_length, content_length, portdata.cp, topper)
+
 
 def process_args(argv):
-	"""Option parsing via argc"""
-	parser = argparse.ArgumentParser(prog=__package__,
-		formatter_class=argparse.ArgumentDefaultsHelpFormatter,
-		description='Display keywords for specified package or for package that is in pwd.')
-
-	parser.add_argument('-v', '--version', action='version', version=__version__, help='show package version and exit')
-
-	parser.add_argument('package', nargs='*', default=None, help='Packages to check.')
-
-	parser.add_argument('-a', '--arch', nargs=1, default=[], help='Display only specified arch(s)')
-
-	parser.add_argument('-A', '--align', nargs='?', default='bottom', choices=['top', 'bottom'],
-		help='Specify alignment for descriptions.')
-	parser.add_argument('-T', '--top-position', nargs='?', default='archlist', choices=['archlist', 'versionlist'],
-		help='Specify which fields we want to have in top listing.')
-
-	parser.add_argument('-B', '--bold', action='store_true', default=False,
-		help='Print out each other column in bold for easier visual separation.')
-	parser.add_argument('-C', '--color', action='store_true', default=False,
-		help='Force colored output')
-	parser.add_argument('-O', '--overlays', action='store_true', default=False,
-		help='Search also overlays')
-	parser.add_argument('-P', '--prefix', action='store_true', default=False,
-		help='Display prefix keywords in output.')
-	parser.add_argument('-S', '--ignore-slot', action='store_true', default=False,
-		help='Treat slots as irelevant during detection of redundant packages.')
-
-	return parser.parse_args(args=argv)
-
-def main(argv, indirect = False):
-	global ignore_slots, bold, order, topper
-
-	#opts parsing
-	opts = process_args(argv)
-	ignore_slots = opts.ignore_slot
-	use_overlays = opts.overlays
-	highlight_arch = ''.join(opts.arch).split(',')
-	bold = opts.bold
-	order = opts.align
-	topper = opts.top_position
-	prefix = opts.prefix
-	color = opts.color
-	package = opts.package
-
-	# equery support
-	if indirect and len(package) <= 0:
-		msg_err = 'No packages specified'
-		raise SystemExit(msg_err)
-
-	# disable colors when redirected and they are not forced on
-	if not color and not sys.stdout.isatty():
-		# disable colors
-		porto.nocolor()
-
-	# Imply prefix if user specified any architectures (Bug 578496)
-	if len(opts.arch) > 0:
-		prefix = True
-
-	keywords = keywords_header(prefix, highlight_arch, order)
-	if len(package) > 0:
-		mysettings = portc(local_config=False)
-		dbapi = portdbapi(mysettings=mysettings)
-		if not use_overlays:
-			dbapi.porttrees = [dbapi.porttree_root]
-		for pkg in package:
-			process_display(pkg, keywords, dbapi)
-	else:
-		currdir = os.getcwd()
-		# check if there are actualy some ebuilds
-		ebuilds = ['%s' % x for x in os.listdir(currdir)
-			if fnmatch.fnmatch(x, '*.ebuild')]
-		if len(ebuilds) <= 0:
-			msg_err = 'No ebuilds at "%s"' % currdir
-			raise SystemExit(msg_err)
-		package= '%s/%s' % (os.path.basename(os.path.abspath('../')), os.path.basename(currdir))
-		ourtree = os.path.realpath('../..')
-		ourstat = os.stat(ourtree)
-		ourstat = (ourstat.st_ino, ourstat.st_dev)
-		for repo in ports.repositories:
-			try:
-				repostat = os.stat(repo.location)
-			except OSError:
-				continue
-			if ourstat == (repostat.st_ino, repostat.st_dev):
-				dbapi = portdbapi(mysettings=portc(local_config=False))
-				break
-		else:
-			repos = {}
-			for repo in ports.repositories:
-				repos[repo.name] = repo.location
-
-			with open(os.path.join(ourtree, 'profiles', 'repo_name'),
-				'rt') as f:
-				repo_name = f.readline().strip()
-
-			repos[repo_name] = ourtree
-			repos = ''.join('[{}]\nlocation={}\n'.format(k, v)
-				for k, v in repos.items())
-			mysettings = portc(local_config=False,
-				env={'PORTAGE_REPOSITORIES': repos})
-			dbapi = portdbapi(mysettings=mysettings)
-		# specify that we want just our nice tree we are in cwd
-		dbapi.porttrees = [ourtree]
-		process_display(package, keywords, dbapi)
-	return 0
-
-if __name__ == '__main__':
-	sys.exit(main(sys.argv[1:]))
+    """Option parsing via argc"""
+    parser = argparse.ArgumentParser(
+        prog=__package__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+        description="Display keywords for specified package or for package that is in pwd.",
+    )
+
+    parser.add_argument(
+        "-v",
+        "--version",
+        action="version",
+        version=__version__,
+        help="show package version and exit",
+    )
+
+    parser.add_argument("package", nargs="*", default=None, help="Packages to check.")
+
+    parser.add_argument(
+        "-a", "--arch", nargs=1, default=[], help="Display only specified arch(s)"
+    )
+
+    parser.add_argument(
+        "-A",
+        "--align",
+        nargs="?",
+        default="bottom",
+        choices=["top", "bottom"],
+        help="Specify alignment for descriptions.",
+    )
+    parser.add_argument(
+        "-T",
+        "--top-position",
+        nargs="?",
+        default="archlist",
+        choices=["archlist", "versionlist"],
+        help="Specify which fields we want to have in top listing.",
+    )
+
+    parser.add_argument(
+        "-B",
+        "--bold",
+        action="store_true",
+        default=False,
+        help="Print out each other column in bold for easier visual separation.",
+    )
+    parser.add_argument(
+        "-C", "--color", action="store_true", default=False, help="Force colored output"
+    )
+    parser.add_argument(
+        "-O",
+        "--overlays",
+        action="store_true",
+        default=False,
+        help="Search also overlays",
+    )
+    parser.add_argument(
+        "-P",
+        "--prefix",
+        action="store_true",
+        default=False,
+        help="Display prefix keywords in output.",
+    )
+    parser.add_argument(
+        "-S",
+        "--ignore-slot",
+        action="store_true",
+        default=False,
+        help="Treat slots as irelevant during detection of redundant packages.",
+    )
+
+    return parser.parse_args(args=argv)
+
+
+def main(argv, indirect=False):
+    global ignore_slots, bold, order, topper
+
+    # opts parsing
+    opts = process_args(argv)
+    ignore_slots = opts.ignore_slot
+    use_overlays = opts.overlays
+    highlight_arch = "".join(opts.arch).split(",")
+    bold = opts.bold
+    order = opts.align
+    topper = opts.top_position
+    prefix = opts.prefix
+    color = opts.color
+    package = opts.package
+
+    # equery support
+    if indirect and len(package) <= 0:
+        msg_err = "No packages specified"
+        raise SystemExit(msg_err)
+
+    # disable colors when redirected and they are not forced on
+    if not color and not sys.stdout.isatty():
+        # disable colors
+        porto.nocolor()
+
+    # Imply prefix if user specified any architectures (Bug 578496)
+    if len(opts.arch) > 0:
+        prefix = True
+
+    keywords = keywords_header(prefix, highlight_arch, order)
+    if len(package) > 0:
+        mysettings = portc(local_config=False)
+        dbapi = portdbapi(mysettings=mysettings)
+        if not use_overlays:
+            dbapi.porttrees = [dbapi.porttree_root]
+        for pkg in package:
+            process_display(pkg, keywords, dbapi)
+    else:
+        currdir = os.getcwd()
+        # check if there are actualy some ebuilds
+        ebuilds = [
+            "%s" % x for x in os.listdir(currdir) if fnmatch.fnmatch(x, "*.ebuild")
+        ]
+        if len(ebuilds) <= 0:
+            msg_err = 'No ebuilds at "%s"' % currdir
+            raise SystemExit(msg_err)
+        package = "%s/%s" % (
+            os.path.basename(os.path.abspath("../")),
+            os.path.basename(currdir),
+        )
+        ourtree = os.path.realpath("../..")
+        ourstat = os.stat(ourtree)
+        ourstat = (ourstat.st_ino, ourstat.st_dev)
+        for repo in ports.repositories:
+            try:
+                repostat = os.stat(repo.location)
+            except OSError:
+                continue
+            if ourstat == (repostat.st_ino, repostat.st_dev):
+                dbapi = portdbapi(mysettings=portc(local_config=False))
+                break
+        else:
+            repos = {}
+            for repo in ports.repositories:
+                repos[repo.name] = repo.location
+
+            with open(os.path.join(ourtree, "profiles", "repo_name"), "rt") as f:
+                repo_name = f.readline().strip()
+
+            repos[repo_name] = ourtree
+            repos = "".join(
+                "[{}]\nlocation={}\n".format(k, v) for k, v in repos.items()
+            )
+            mysettings = portc(local_config=False, env={"PORTAGE_REPOSITORIES": repos})
+            dbapi = portdbapi(mysettings=mysettings)
+        # specify that we want just our nice tree we are in cwd
+        dbapi.porttrees = [ourtree]
+        process_display(package, keywords, dbapi)
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))

diff --git a/pym/gentoolkit/eshowkw/display_pretty.py b/pym/gentoolkit/eshowkw/display_pretty.py
index beca5f4..d58036a 100644
--- a/pym/gentoolkit/eshowkw/display_pretty.py
+++ b/pym/gentoolkit/eshowkw/display_pretty.py
@@ -1,106 +1,130 @@
-#	vim:fileencoding=utf-8
+# 	vim:fileencoding=utf-8
 # Copyright 2010 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
 from portage.output import colorize
-try: # newer python versions
-	from itertools import zip_longest
-except ImportError: # older python naming
-	from itertools import izip_longest as zip_longest
-
-__all__ = ['string_rotator', 'colorize_string', 'align_string', 'rotate_dash', 'print_content', 'display']
-
-def display(plain_list, rotated_list, plain_width, rotated_height, cp, toplist = 'archlist'):
-	"""Render defauld display to show the keywords listing"""
-	# header
-	output = []
-	output.append('Keywords for %s:' % colorize('blue', cp))
-	# data
-	corner_image = [''.ljust(plain_width) for x in range(rotated_height)]
-	if toplist != 'archlist':
-		corner_image.extend(plain_list)
-	data_printout = ['%s%s' % (x, y)
-		for x, y in zip_longest(corner_image, rotated_list, fillvalue=corner_image[0])]
-	if toplist == 'archlist':
-		data_printout.extend(plain_list)
-	output.extend(data_printout)
-	print(print_content(output))
+
+try:  # newer python versions
+    from itertools import zip_longest
+except ImportError:  # older python naming
+    from itertools import izip_longest as zip_longest
+
+__all__ = [
+    "string_rotator",
+    "colorize_string",
+    "align_string",
+    "rotate_dash",
+    "print_content",
+    "display",
+]
+
+
+def display(
+    plain_list, rotated_list, plain_width, rotated_height, cp, toplist="archlist"
+):
+    """Render defauld display to show the keywords listing"""
+    # header
+    output = []
+    output.append("Keywords for %s:" % colorize("blue", cp))
+    # data
+    corner_image = ["".ljust(plain_width) for x in range(rotated_height)]
+    if toplist != "archlist":
+        corner_image.extend(plain_list)
+    data_printout = [
+        "%s%s" % (x, y)
+        for x, y in zip_longest(corner_image, rotated_list, fillvalue=corner_image[0])
+    ]
+    if toplist == "archlist":
+        data_printout.extend(plain_list)
+    output.extend(data_printout)
+    print(print_content(output))
+
 
 def align_string(string, align, length):
-	"""Align string to the specified alignment (left or right, and after rotation it becomes top and bottom)"""
-	if align == 'top' or align == 'left':
-		string = string.ljust(length)
-	else:
-		string = string.rjust(length)
-	return string
+    """Align string to the specified alignment (left or right, and after rotation it becomes top and bottom)"""
+    if align == "top" or align == "left":
+        string = string.ljust(length)
+    else:
+        string = string.rjust(length)
+    return string
+
 
 def colorize_string(color, string):
-	"""Add coloring for specified string. Due to rotation we need to do that per character rather than per-line"""
-	tmp = []
-	for char in list(string):
-		# % is whitespace separator so we wont color that :)
-		if char != '%':
-			tmp.append(colorize(color, char))
-		else:
-			tmp.append(char)
-	return ''.join(tmp)
+    """Add coloring for specified string. Due to rotation we need to do that per character rather than per-line"""
+    tmp = []
+    for char in list(string):
+        # % is whitespace separator so we wont color that :)
+        if char != "%":
+            tmp.append(colorize(color, char))
+        else:
+            tmp.append(char)
+    return "".join(tmp)
+
 
 def rotate_dash(string):
-	"""Rotate special strings over 90 degrees for better readability."""
-	chars = ['-', '|']
-	subs = ['|', '-']
-	out = string
-	for x,y  in zip(chars, subs):
-		if string.find(x) != -1:
-			out = out.replace(x, y)
-	return out
+    """Rotate special strings over 90 degrees for better readability."""
+    chars = ["-", "|"]
+    subs = ["|", "-"]
+    out = string
+    for x, y in zip(chars, subs):
+        if string.find(x) != -1:
+            out = out.replace(x, y)
+    return out
+
 
 def print_content(content):
-	"""Print out content (strip it out of the temporary %)"""
-	return '\n'.join(content).replace('%','')
+    """Print out content (strip it out of the temporary %)"""
+    return "\n".join(content).replace("%", "")
+
 
 class string_rotator:
-	__DASH_COUNT = 0
-	def __getChar(self, string, position, line, bold_separator = False):
-		"""Return specified character from the string position"""
-
-		# first figure out what character we want to work with
-		# based on order and position in the string
-		isdash = False
-		if string.startswith('|') or string.startswith('-') or string.startswith('+'):
-			split = list(string)
-			isdash = True
-			self.__DASH_COUNT += 1
-		else:
-			split = string.split('%')
-		char = split[position]
-		# bolding
-		if not isdash and bold_separator \
-				and (line-self.__DASH_COUNT)%2 == 0 \
-				and char != ' ':
-			char = colorize('bold', char)
-		return char
-
-	def rotateContent(self, elements, length, bold_separator = False, strip = True):
-		"""
-			Rotate string over 90 degrees:
-			string -> s
-						t
-						r
-						i
-						n
-						g
-		"""
-		# join used to have list of lines rather than list of chars
-		tmp = []
-		for position in range(length):
-			x = ''
-			for i, string in enumerate(elements):
-				x += ' ' + self.__getChar(rotate_dash(string), position, i, bold_separator)
-			# spaces on dashed line should be dashed too
-			if x.find('+ -') != -1:
-				x = x.replace(' ', '-')
-			# strip all chars and remove empty lines
-			if not strip or len(x.strip(' |-')) > 0:
-				tmp.append(x)
-		return tmp
+    __DASH_COUNT = 0
+
+    def __getChar(self, string, position, line, bold_separator=False):
+        """Return specified character from the string position"""
+
+        # first figure out what character we want to work with
+        # based on order and position in the string
+        isdash = False
+        if string.startswith("|") or string.startswith("-") or string.startswith("+"):
+            split = list(string)
+            isdash = True
+            self.__DASH_COUNT += 1
+        else:
+            split = string.split("%")
+        char = split[position]
+        # bolding
+        if (
+            not isdash
+            and bold_separator
+            and (line - self.__DASH_COUNT) % 2 == 0
+            and char != " "
+        ):
+            char = colorize("bold", char)
+        return char
+
+    def rotateContent(self, elements, length, bold_separator=False, strip=True):
+        """
+        Rotate string over 90 degrees:
+        string -> s
+                                t
+                                r
+                                i
+                                n
+                                g
+        """
+        # join used to have list of lines rather than list of chars
+        tmp = []
+        for position in range(length):
+            x = ""
+            for i, string in enumerate(elements):
+                x += " " + self.__getChar(
+                    rotate_dash(string), position, i, bold_separator
+                )
+            # spaces on dashed line should be dashed too
+            if x.find("+ -") != -1:
+                x = x.replace(" ", "-")
+            # strip all chars and remove empty lines
+            if not strip or len(x.strip(" |-")) > 0:
+                tmp.append(x)
+        return tmp

diff --git a/pym/gentoolkit/eshowkw/keywords_content.py b/pym/gentoolkit/eshowkw/keywords_content.py
index 786e8b9..4f9e3e1 100644
--- a/pym/gentoolkit/eshowkw/keywords_content.py
+++ b/pym/gentoolkit/eshowkw/keywords_content.py
@@ -1,4 +1,4 @@
-#	vim:fileencoding=utf-8
+# 	vim:fileencoding=utf-8
 # Copyright 2010 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
@@ -6,364 +6,406 @@ import portage as port
 import os
 from portage.output import colorize
 
-__all__ = ['keywords_content']
+__all__ = ["keywords_content"]
 
 from gentoolkit.eshowkw.display_pretty import colorize_string
 from gentoolkit.eshowkw.display_pretty import align_string
 
+
 class keywords_content:
-	class RedundancyChecker:
-		def __listRedundant(self, masks, keywords, ignoreslots, slots):
-			"""List all redundant packages."""
-			if ignoreslots:
-				return self.__listRedundantAll(masks, keywords)
-			else:
-				return self.__listRedundantSlots(masks, keywords, slots)
-
-		def __listRedundantSlots(self, masks, keywords, slots):
-			"""Search for redundant packages walking per keywords for specified slot."""
-			output = list()
-			zipped = list(zip(masks, keywords, slots))
-			for slot in self.__uniq(slots):
-				ms = list()
-				ks = list()
-				for m, k, s in zipped:
-					if slot == s:
-						ms.append(m)
-						ks.append(k)
-				output.append(self.__compareSelected(ms, ks))
-			# this is required because the list itself is not just one level depth
-			return list(''.join(output))
-
-		@staticmethod
-		def __uniq(seq):
-			"""Remove all duplicate elements from list."""
-			seen = {}
-			result = []
-			for item in seq:
-				if item in seen:
-					continue
-				seen[item] = 1
-				result.append(item)
-			return result
-
-		@staticmethod
-		def __cleanKeyword(keyword):
-			"""Remove masked arches and hardmasks from keywords since we don't care about that."""
-			return ["%s" % x for x in keyword.split()
-				if x != '-*' and not x.startswith('-')]
-
-		def __listRedundantAll(self, masks, keywords):
-			"""Search for redundant packages using all versions ignoring its slotting."""
-			return list(self.__compareSelected(list(masks), list(keywords)))
-
-		def __compareSelected(self, masks, kws):
-			"""
-			Rotate over list of keywords and compare each element with others.
-			Selectively remove each already compared list from the remaining keywords.
-			"""
-			result = []
-			kws.reverse()
-			masks.reverse()
-			for i in range(len(kws)):
-				kw = kws.pop()
-				masks.pop()
-				if self.__compareKeywordWithRest(kw, kws, masks):
-					result.append('#')
-				else:
-					result.append('o')
-			if len(result) == 0:
-				result.append('o')
-			return ''.join(result)
-
-		def __compareKeywordWithRest(self, keyword, keywords, masks):
-			"""Compare keywords with list of keywords."""
-			kw = self.__cleanKeyword(keyword)
-			for kwi, mask in zip(keywords, masks):
-				kwi = self.__cleanKeyword(kwi)
-				if kwi and not mask:
-					kw = self.__checkShadow(kw, kwi)
-				if not kw:
-					return True
-			return False
-
-		def __checkShadow(self, old, new):
-			"""Check if package version is overshadowed by other package version."""
-			tmp = set(new)
-			tmp.update("~%s" % x for x in new
-				if not x.startswith("~"))
-			return list(set(old).difference(tmp))
-
-		def __init__(self, masks, keywords, slots, ignore_slots = False):
-			"""Query all relevant data for redundancy package checking"""
-			self.redundant = self.__listRedundant(masks, keywords, ignore_slots, slots)
-
-	class VersionChecker:
-		def __getVersions(self, packages):
-			"""Obtain properly aligned version strings without colors."""
-			revlength = max([len(self.__getRevision(x)) for x, repo in packages])
-			return  [self.__separateVersion(x, repo, revlength) for x, repo in packages]
-
-		def __getRevision(self, cpv):
-			"""Get revision informations for each package for nice further alignment"""
-			rev = port.catpkgsplit(cpv)[3]
-			return rev if rev != 'r0' else ''
-
-		def __separateVersion(self, cpv, repo, revlength):
-			return self.__modifyVersionInfo(cpv, repo, port.versions.cpv_getversion(cpv), revlength)
-
-		def __modifyVersionInfo(self, cpv, repo, pv, revlength):
-			"""Prefix and suffix version with string based on whether version is installed or masked and its revision."""
-			mask = self.__getMaskStatus(cpv)
-			install = self.__getInstallStatus(cpv, repo)
-
-			# calculate suffix length
-			currevlen = len(self.__getRevision(cpv))
-			suffixlen = revlength - currevlen
-			# +1 required for the dash in revision
-			if suffixlen != 0 and currevlen == 0:
-				suffixlen = suffixlen + 1
-			suffix = ''
-			for x in range(suffixlen):
-				suffix = '%s ' % suffix
-
-			if mask and install:
-				pv = '[M][I]%s%s' % (pv, suffix)
-			elif mask:
-				pv = '[M]%s%s' % (pv, suffix)
-			elif install:
-				pv = '[I]%s%s' % (pv, suffix)
-			else:
-				pv = '%s%s' % (pv, suffix)
-			return pv
-
-		def __getMaskStatus(self, cpv):
-			"""Figure out if package is pmasked."""
-			try:
-				if "package.mask" in port.getmaskingstatus(cpv, settings=self.mysettings):
-					return True
-			except:
-				# occurs when package is not known by portdb
-				# so we consider it unmasked
-				pass
-			return False
-
-
-		def __getInstallStatus(self, cpv, repo):
-			"""Check if package version we test is installed."""
-			return bool(self.vartree.match("=%s::%s" % (cpv, repo)))
-
-		def __init__(self, packages):
-			"""Query all relevant data for version data formatting"""
-			self.vartree = port.db[port.root]['vartree'].dbapi
-			self.mysettings = port.config(local_config=False)
-			self.versions = self.__getVersions(packages)
-			self.masks = list(map(lambda x: self.__getMaskStatus(x), packages))
-
-	@staticmethod
-	def __packages_sort(package_content):
-		"""
-		Sort packages queried based on version and slot
-		%% pn , repo, slot, eapi, keywords
-		"""
-		if len(package_content) > 1:
-			ver_map = {}
-			for cpv in package_content:
-				ver_map[cpv[0]] = '-'.join(port.versions.catpkgsplit(cpv[0])[2:])
-			def cmp_cpv(cpv1, cpv2):
-				return port.versions.vercmp(ver_map[cpv1[0]], ver_map[cpv2[0]])
-
-			package_content.sort(key=port.util.cmp_sort_key(cmp_cpv))
-
-	def __xmatch(self, pdb, package):
-		"""xmatch function that searches for all packages over all repos"""
-		try:
-			mycp = port.dep_expand(package, mydb=pdb, settings=pdb.settings).cp
-		except port.exception.AmbiguousPackageName as Arg:
-			msg_err = 'Ambiguous package name "%s".\n' % package
-			found = 'Possibilities: %s' % Arg
-			raise SystemExit('%s%s' % (msg_err, found))
-		except port.exception.InvalidAtom:
-			msg_err = 'No such package "%s"' % package
-			raise SystemExit(msg_err)
-
-		mysplit = mycp.split('/')
-		mypkgs = []
-		for oroot in pdb.porttrees:
-			try:
-				file_list = os.listdir(os.path.join(oroot, mycp))
-			except OSError:
-				continue
-			for x in file_list:
-				pf = x[:-7] if x[-7:] == '.ebuild' else []
-				if pf:
-					ps = port.pkgsplit(pf)
-					if not ps or ps[0] != mysplit[1]:
-						# we got garbage or ebuild with wrong name in the dir
-						continue
-					ver_match = port.versions.ver_regexp.match("-".join(ps[1:]))
-					if ver_match is None or not ver_match.groups():
-						# version is not allowed by portage or unset
-						continue
-					# obtain related data from metadata and append to the pkg list
-					keywords, slot, eapi = self.__getMetadata(pdb, mysplit[0]+'/'+pf, oroot)
-					mypkgs.append([mysplit[0]+'/'+pf, oroot, slot, eapi, keywords])
-
-		self.__packages_sort(mypkgs)
-		return mypkgs
-
-	def __checkExist(self, pdb, package):
-		"""Check if specified package even exists."""
-		matches = self.__xmatch(pdb, package)
-		if len(matches) <= 0:
-			msg_err = 'No such package "%s"' % package
-			raise SystemExit(msg_err)
-		return list(zip(*matches))
-
-	@staticmethod
-	def __getMetadata(pdb, package, repo):
-		"""Obtain all required metadata from portage auxdb"""
-		try:
-			metadata = pdb.aux_get(package, ['KEYWORDS', 'SLOT', 'EAPI'], repo)
-		except KeyError:
-			# portage prints out more verbose error for us if we were lucky
-			raise SystemExit('Failed to obtain metadata')
-		return metadata
-
-	def __formatKeywords(self, keywords, keywords_list, usebold = False, toplist = 'archlist'):
-		"""Loop over all keywords and replace them with nice visual identifier"""
-		# the % is fancy separator, we use it to split keywords for rotation
-		# so we wont loose the empty spaces
-		return ['% %'.join([self.__prepareKeywordChar(arch, i, version.split(), usebold, toplist)
-			for i, arch in enumerate(keywords_list)])
-				for version in keywords]
-
-	@staticmethod
-	def __prepareKeywordChar(arch, field, keywords, usebold = False, toplist = 'archlist'):
-		"""
-		Convert specified keywords for package into their visual replacements.
-		# possibilities:
-		# ~arch -> orange ~
-		# -arch -> red -
-		# arch -> green +
-		# -* -> red *
-		"""
-		keys = [ '~%s' % arch, '-%s' % arch, '%s' % arch, '-*' ]
-		values = [
-			colorize('darkyellow', '~'),
-			colorize('darkred', '-'),
-			colorize('darkgreen', '+'),
-			colorize('darkred', '*')
-		]
-		# check what keyword we have
-		# here we cant just append space because it would get stripped later
-		char = colorize('darkgray','o')
-		for k, v in zip(keys, values):
-			if k in keywords:
-				char = v
-				break
-		if toplist == 'archlist' and usebold and (field)%2 == 0 and char != ' ':
-			char = colorize('bold', char)
-		return char
-
-	@staticmethod
-	def __formatVersions(versions, align, length):
-		"""Append colors and align keywords properly"""
-		# % are used as separators for further split so we wont loose spaces and coloring
-		tmp = []
-		for pv in versions:
-			pv = align_string(pv, align, length)
-			pv = '%'.join(list(pv))
-			if pv.find('[%M%][%I%]') != -1:
-				tmp.append(colorize_string('darkyellow', pv))
-			elif pv.find('[%M%]') != -1:
-				tmp.append(colorize_string('darkred', pv))
-			elif pv.find('[%I%]') != -1:
-				tmp.append(colorize_string('bold', pv))
-			else:
-				tmp.append(pv)
-		return tmp
-
-	@staticmethod
-	def __formatAdditional(additional, color, length):
-		"""Align additional items properly"""
-		# % are used as separators for further split so we wont loose spaces and coloring
-		tmp = []
-		for x in additional:
-			tmpc = color
-			x = align_string(x, 'left', length)
-			x = '%'.join(list(x))
-			if x == 'o':
-				# the value is unset so the color is gray
-				tmpc = 'darkgray'
-			x = colorize_string(tmpc, x)
-			tmp.append(x)
-		return tmp
-
-	@staticmethod
-	def __formatEapis(eapis, repos, repos_configs, length):
-		"""Align eapis items properly"""
-		# % are used as separators for further split so we wont loose spaces and coloring
-		tmp = []
-		for eapi, repo in zip(eapis, repos):
-			tmp_eapi = eapi
-			eapi = align_string(eapi, 'left', length)
-			eapi = '%'.join(list(eapi))
-			if repos_configs[repo].eapi_is_banned(tmp_eapi):
-				eapi = colorize_string('red', eapi)
-			elif repos_configs[repo].eapi_is_deprecated(tmp_eapi):
-				eapi = colorize_string('yellow', eapi)
-			else:
-				eapi = colorize_string('green', eapi)
-			tmp.append(eapi)
-		return tmp
-
-	@staticmethod
-	def __prepareContentResult(versions, keywords, eapi, redundant, slots, slot_length, repos, linesep):
-		"""Parse version fields into one list with proper separators"""
-		content = []
-		oldslot = ''
-		fieldsep = '% %|% %'
-		normsep = '% %'
-		for v, k, e, r, s, t in zip(versions, keywords, eapi, redundant, slots, repos):
-			if oldslot != s:
-				oldslot = s
-				content.append(linesep)
-			else:
-				s = '%'.join(list(''.rjust(slot_length)))
-			content.append('%s%s%s%s%s%s%s%s%s%s%s' % (v, fieldsep, k, fieldsep, e, normsep, r, normsep, s, fieldsep, t))
-		return content
-
-	def __init__(self, package, keywords_list, porttree, ignoreslots = False, content_align = 'bottom', usebold = False, toplist = 'archlist'):
-		"""Query all relevant data from portage databases."""
-		packages, self.repositories, self.slots, self.eapi, self.keywords = self.__checkExist(porttree, package)
-		# convert repositories from path to name
-		self.repositories = [porttree.getRepositoryName(x) for x in self.repositories]
-		self.slot_length = max([len(x) for x in self.slots])
-		repositories_length = max([len(x) for x in self.repositories])
-		self.keyword_length = len(keywords_list)
-		vers =self.VersionChecker(list(zip(packages, self.repositories)))
-		self.versions = vers.versions
-		masks = vers.masks
-		self.version_length = max([len(x) for x in self.versions])
-		self.version_count = len(self.versions)
-		self.redundant = self.RedundancyChecker(masks, self.keywords, self.slots, ignoreslots).redundant
-		redundant_length = max([len(x) for x in self.redundant])
-
-		ver = self.__formatVersions(self.versions, content_align, self.version_length)
-		kws = self.__formatKeywords(self.keywords, keywords_list, usebold, toplist)
-		repos_configs = porttree.repositories.prepos
-		eap = self.__formatEapis(self.eapi, self.repositories, repos_configs, 1)
-		red = self.__formatAdditional(self.redundant, 'purple', redundant_length)
-		slt = self.__formatAdditional(self.slots, 'bold', self.slot_length)
-		rep = self.__formatAdditional(self.repositories, 'yellow', repositories_length)
-		# those + numbers are spaces in printout. keywords are multiplied also because of that
-		linesep = '%s+%s+%s+%s' % (''.ljust(self.version_length+1, '-'),
-			''.ljust(self.keyword_length*2+1, '-'),
-			''.ljust(redundant_length+self.slot_length+1+4, '-'),
-			''.ljust(repositories_length+1, '-')
-		)
-
-		self.content = self.__prepareContentResult(ver, kws, eap, red, slt, self.slot_length, rep, linesep)
-		self.content_length = len(linesep)
-		self.cp = port.cpv_getkey(packages[0])
+    class RedundancyChecker:
+        def __listRedundant(self, masks, keywords, ignoreslots, slots):
+            """List all redundant packages."""
+            if ignoreslots:
+                return self.__listRedundantAll(masks, keywords)
+            else:
+                return self.__listRedundantSlots(masks, keywords, slots)
+
+        def __listRedundantSlots(self, masks, keywords, slots):
+            """Search for redundant packages walking per keywords for specified slot."""
+            output = list()
+            zipped = list(zip(masks, keywords, slots))
+            for slot in self.__uniq(slots):
+                ms = list()
+                ks = list()
+                for m, k, s in zipped:
+                    if slot == s:
+                        ms.append(m)
+                        ks.append(k)
+                output.append(self.__compareSelected(ms, ks))
+            # this is required because the list itself is not just one level depth
+            return list("".join(output))
+
+        @staticmethod
+        def __uniq(seq):
+            """Remove all duplicate elements from list."""
+            seen = {}
+            result = []
+            for item in seq:
+                if item in seen:
+                    continue
+                seen[item] = 1
+                result.append(item)
+            return result
+
+        @staticmethod
+        def __cleanKeyword(keyword):
+            """Remove masked arches and hardmasks from keywords since we don't care about that."""
+            return [
+                "%s" % x for x in keyword.split() if x != "-*" and not x.startswith("-")
+            ]
+
+        def __listRedundantAll(self, masks, keywords):
+            """Search for redundant packages using all versions ignoring its slotting."""
+            return list(self.__compareSelected(list(masks), list(keywords)))
+
+        def __compareSelected(self, masks, kws):
+            """
+            Rotate over list of keywords and compare each element with others.
+            Selectively remove each already compared list from the remaining keywords.
+            """
+            result = []
+            kws.reverse()
+            masks.reverse()
+            for i in range(len(kws)):
+                kw = kws.pop()
+                masks.pop()
+                if self.__compareKeywordWithRest(kw, kws, masks):
+                    result.append("#")
+                else:
+                    result.append("o")
+            if len(result) == 0:
+                result.append("o")
+            return "".join(result)
+
+        def __compareKeywordWithRest(self, keyword, keywords, masks):
+            """Compare keywords with list of keywords."""
+            kw = self.__cleanKeyword(keyword)
+            for kwi, mask in zip(keywords, masks):
+                kwi = self.__cleanKeyword(kwi)
+                if kwi and not mask:
+                    kw = self.__checkShadow(kw, kwi)
+                if not kw:
+                    return True
+            return False
+
+        def __checkShadow(self, old, new):
+            """Check if package version is overshadowed by other package version."""
+            tmp = set(new)
+            tmp.update("~%s" % x for x in new if not x.startswith("~"))
+            return list(set(old).difference(tmp))
+
+        def __init__(self, masks, keywords, slots, ignore_slots=False):
+            """Query all relevant data for redundancy package checking"""
+            self.redundant = self.__listRedundant(masks, keywords, ignore_slots, slots)
+
+    class VersionChecker:
+        def __getVersions(self, packages):
+            """Obtain properly aligned version strings without colors."""
+            revlength = max([len(self.__getRevision(x)) for x, repo in packages])
+            return [self.__separateVersion(x, repo, revlength) for x, repo in packages]
+
+        def __getRevision(self, cpv):
+            """Get revision informations for each package for nice further alignment"""
+            rev = port.catpkgsplit(cpv)[3]
+            return rev if rev != "r0" else ""
+
+        def __separateVersion(self, cpv, repo, revlength):
+            return self.__modifyVersionInfo(
+                cpv, repo, port.versions.cpv_getversion(cpv), revlength
+            )
+
+        def __modifyVersionInfo(self, cpv, repo, pv, revlength):
+            """Prefix and suffix version with string based on whether version is installed or masked and its revision."""
+            mask = self.__getMaskStatus(cpv)
+            install = self.__getInstallStatus(cpv, repo)
+
+            # calculate suffix length
+            currevlen = len(self.__getRevision(cpv))
+            suffixlen = revlength - currevlen
+            # +1 required for the dash in revision
+            if suffixlen != 0 and currevlen == 0:
+                suffixlen = suffixlen + 1
+            suffix = ""
+            for x in range(suffixlen):
+                suffix = "%s " % suffix
+
+            if mask and install:
+                pv = "[M][I]%s%s" % (pv, suffix)
+            elif mask:
+                pv = "[M]%s%s" % (pv, suffix)
+            elif install:
+                pv = "[I]%s%s" % (pv, suffix)
+            else:
+                pv = "%s%s" % (pv, suffix)
+            return pv
+
+        def __getMaskStatus(self, cpv):
+            """Figure out if package is pmasked."""
+            try:
+                if "package.mask" in port.getmaskingstatus(
+                    cpv, settings=self.mysettings
+                ):
+                    return True
+            except:
+                # occurs when package is not known by portdb
+                # so we consider it unmasked
+                pass
+            return False
+
+        def __getInstallStatus(self, cpv, repo):
+            """Check if package version we test is installed."""
+            return bool(self.vartree.match("=%s::%s" % (cpv, repo)))
+
+        def __init__(self, packages):
+            """Query all relevant data for version data formatting"""
+            self.vartree = port.db[port.root]["vartree"].dbapi
+            self.mysettings = port.config(local_config=False)
+            self.versions = self.__getVersions(packages)
+            self.masks = list(map(lambda x: self.__getMaskStatus(x), packages))
+
+    @staticmethod
+    def __packages_sort(package_content):
+        """
+        Sort packages queried based on version and slot
+        %% pn , repo, slot, eapi, keywords
+        """
+        if len(package_content) > 1:
+            ver_map = {}
+            for cpv in package_content:
+                ver_map[cpv[0]] = "-".join(port.versions.catpkgsplit(cpv[0])[2:])
+
+            def cmp_cpv(cpv1, cpv2):
+                return port.versions.vercmp(ver_map[cpv1[0]], ver_map[cpv2[0]])
+
+            package_content.sort(key=port.util.cmp_sort_key(cmp_cpv))
+
+    def __xmatch(self, pdb, package):
+        """xmatch function that searches for all packages over all repos"""
+        try:
+            mycp = port.dep_expand(package, mydb=pdb, settings=pdb.settings).cp
+        except port.exception.AmbiguousPackageName as Arg:
+            msg_err = 'Ambiguous package name "%s".\n' % package
+            found = "Possibilities: %s" % Arg
+            raise SystemExit("%s%s" % (msg_err, found))
+        except port.exception.InvalidAtom:
+            msg_err = 'No such package "%s"' % package
+            raise SystemExit(msg_err)
+
+        mysplit = mycp.split("/")
+        mypkgs = []
+        for oroot in pdb.porttrees:
+            try:
+                file_list = os.listdir(os.path.join(oroot, mycp))
+            except OSError:
+                continue
+            for x in file_list:
+                pf = x[:-7] if x[-7:] == ".ebuild" else []
+                if pf:
+                    ps = port.pkgsplit(pf)
+                    if not ps or ps[0] != mysplit[1]:
+                        # we got garbage or ebuild with wrong name in the dir
+                        continue
+                    ver_match = port.versions.ver_regexp.match("-".join(ps[1:]))
+                    if ver_match is None or not ver_match.groups():
+                        # version is not allowed by portage or unset
+                        continue
+                    # obtain related data from metadata and append to the pkg list
+                    keywords, slot, eapi = self.__getMetadata(
+                        pdb, mysplit[0] + "/" + pf, oroot
+                    )
+                    mypkgs.append([mysplit[0] + "/" + pf, oroot, slot, eapi, keywords])
+
+        self.__packages_sort(mypkgs)
+        return mypkgs
+
+    def __checkExist(self, pdb, package):
+        """Check if specified package even exists."""
+        matches = self.__xmatch(pdb, package)
+        if len(matches) <= 0:
+            msg_err = 'No such package "%s"' % package
+            raise SystemExit(msg_err)
+        return list(zip(*matches))
+
+    @staticmethod
+    def __getMetadata(pdb, package, repo):
+        """Obtain all required metadata from portage auxdb"""
+        try:
+            metadata = pdb.aux_get(package, ["KEYWORDS", "SLOT", "EAPI"], repo)
+        except KeyError:
+            # portage prints out more verbose error for us if we were lucky
+            raise SystemExit("Failed to obtain metadata")
+        return metadata
+
+    def __formatKeywords(
+        self, keywords, keywords_list, usebold=False, toplist="archlist"
+    ):
+        """Loop over all keywords and replace them with nice visual identifier"""
+        # the % is fancy separator, we use it to split keywords for rotation
+        # so we wont loose the empty spaces
+        return [
+            "% %".join(
+                [
+                    self.__prepareKeywordChar(
+                        arch, i, version.split(), usebold, toplist
+                    )
+                    for i, arch in enumerate(keywords_list)
+                ]
+            )
+            for version in keywords
+        ]
+
+    @staticmethod
+    def __prepareKeywordChar(arch, field, keywords, usebold=False, toplist="archlist"):
+        """
+        Convert specified keywords for package into their visual replacements.
+        # possibilities:
+        # ~arch -> orange ~
+        # -arch -> red -
+        # arch -> green +
+        # -* -> red *
+        """
+        keys = ["~%s" % arch, "-%s" % arch, "%s" % arch, "-*"]
+        values = [
+            colorize("darkyellow", "~"),
+            colorize("darkred", "-"),
+            colorize("darkgreen", "+"),
+            colorize("darkred", "*"),
+        ]
+        # check what keyword we have
+        # here we cant just append space because it would get stripped later
+        char = colorize("darkgray", "o")
+        for k, v in zip(keys, values):
+            if k in keywords:
+                char = v
+                break
+        if toplist == "archlist" and usebold and (field) % 2 == 0 and char != " ":
+            char = colorize("bold", char)
+        return char
+
+    @staticmethod
+    def __formatVersions(versions, align, length):
+        """Append colors and align keywords properly"""
+        # % are used as separators for further split so we wont loose spaces and coloring
+        tmp = []
+        for pv in versions:
+            pv = align_string(pv, align, length)
+            pv = "%".join(list(pv))
+            if pv.find("[%M%][%I%]") != -1:
+                tmp.append(colorize_string("darkyellow", pv))
+            elif pv.find("[%M%]") != -1:
+                tmp.append(colorize_string("darkred", pv))
+            elif pv.find("[%I%]") != -1:
+                tmp.append(colorize_string("bold", pv))
+            else:
+                tmp.append(pv)
+        return tmp
+
+    @staticmethod
+    def __formatAdditional(additional, color, length):
+        """Align additional items properly"""
+        # % are used as separators for further split so we wont loose spaces and coloring
+        tmp = []
+        for x in additional:
+            tmpc = color
+            x = align_string(x, "left", length)
+            x = "%".join(list(x))
+            if x == "o":
+                # the value is unset so the color is gray
+                tmpc = "darkgray"
+            x = colorize_string(tmpc, x)
+            tmp.append(x)
+        return tmp
+
+    @staticmethod
+    def __formatEapis(eapis, repos, repos_configs, length):
+        """Align eapis items properly"""
+        # % are used as separators for further split so we wont loose spaces and coloring
+        tmp = []
+        for eapi, repo in zip(eapis, repos):
+            tmp_eapi = eapi
+            eapi = align_string(eapi, "left", length)
+            eapi = "%".join(list(eapi))
+            if repos_configs[repo].eapi_is_banned(tmp_eapi):
+                eapi = colorize_string("red", eapi)
+            elif repos_configs[repo].eapi_is_deprecated(tmp_eapi):
+                eapi = colorize_string("yellow", eapi)
+            else:
+                eapi = colorize_string("green", eapi)
+            tmp.append(eapi)
+        return tmp
+
+    @staticmethod
+    def __prepareContentResult(
+        versions, keywords, eapi, redundant, slots, slot_length, repos, linesep
+    ):
+        """Parse version fields into one list with proper separators"""
+        content = []
+        oldslot = ""
+        fieldsep = "% %|% %"
+        normsep = "% %"
+        for v, k, e, r, s, t in zip(versions, keywords, eapi, redundant, slots, repos):
+            if oldslot != s:
+                oldslot = s
+                content.append(linesep)
+            else:
+                s = "%".join(list("".rjust(slot_length)))
+            content.append(
+                "%s%s%s%s%s%s%s%s%s%s%s"
+                % (v, fieldsep, k, fieldsep, e, normsep, r, normsep, s, fieldsep, t)
+            )
+        return content
+
+    def __init__(
+        self,
+        package,
+        keywords_list,
+        porttree,
+        ignoreslots=False,
+        content_align="bottom",
+        usebold=False,
+        toplist="archlist",
+    ):
+        """Query all relevant data from portage databases."""
+        (
+            packages,
+            self.repositories,
+            self.slots,
+            self.eapi,
+            self.keywords,
+        ) = self.__checkExist(porttree, package)
+        # convert repositories from path to name
+        self.repositories = [porttree.getRepositoryName(x) for x in self.repositories]
+        self.slot_length = max([len(x) for x in self.slots])
+        repositories_length = max([len(x) for x in self.repositories])
+        self.keyword_length = len(keywords_list)
+        vers = self.VersionChecker(list(zip(packages, self.repositories)))
+        self.versions = vers.versions
+        masks = vers.masks
+        self.version_length = max([len(x) for x in self.versions])
+        self.version_count = len(self.versions)
+        self.redundant = self.RedundancyChecker(
+            masks, self.keywords, self.slots, ignoreslots
+        ).redundant
+        redundant_length = max([len(x) for x in self.redundant])
+
+        ver = self.__formatVersions(self.versions, content_align, self.version_length)
+        kws = self.__formatKeywords(self.keywords, keywords_list, usebold, toplist)
+        repos_configs = porttree.repositories.prepos
+        eap = self.__formatEapis(self.eapi, self.repositories, repos_configs, 1)
+        red = self.__formatAdditional(self.redundant, "purple", redundant_length)
+        slt = self.__formatAdditional(self.slots, "bold", self.slot_length)
+        rep = self.__formatAdditional(self.repositories, "yellow", repositories_length)
+        # those + numbers are spaces in printout. keywords are multiplied also because of that
+        linesep = "%s+%s+%s+%s" % (
+            "".ljust(self.version_length + 1, "-"),
+            "".ljust(self.keyword_length * 2 + 1, "-"),
+            "".ljust(redundant_length + self.slot_length + 1 + 4, "-"),
+            "".ljust(repositories_length + 1, "-"),
+        )
+
+        self.content = self.__prepareContentResult(
+            ver, kws, eap, red, slt, self.slot_length, rep, linesep
+        )
+        self.content_length = len(linesep)
+        self.cp = port.cpv_getkey(packages[0])

diff --git a/pym/gentoolkit/eshowkw/keywords_header.py b/pym/gentoolkit/eshowkw/keywords_header.py
index 00ecb27..5ca892e 100644
--- a/pym/gentoolkit/eshowkw/keywords_header.py
+++ b/pym/gentoolkit/eshowkw/keywords_header.py
@@ -1,8 +1,8 @@
-#	vim:fileencoding=utf-8
+# 	vim:fileencoding=utf-8
 # Copyright 2001-2018 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
-__all__ = ['keywords_header']
+__all__ = ["keywords_header"]
 
 from portage import settings as ports
 from gentoolkit.eshowkw.display_pretty import colorize_string
@@ -11,136 +11,143 @@ from gentoolkit.profile import load_profile_data
 
 
 def gen_arch_list(status):
-	_arch_status = load_profile_data()
-	if status == "stable":
-		return [arch for arch in _arch_status if _arch_status[arch][0] == "stable"]
-	elif status == "dev":
-		return [arch for arch in _arch_status if _arch_status[arch][0] == "dev"]
-	elif status == "exp":
-		return [arch for arch in _arch_status if _arch_status[arch][0] == "exp"]
-	elif status == "arch":
-		return [arch for arch in _arch_status if _arch_status[arch][1] == "arch"]
-	elif status == "~arch":
-		return [arch for arch in _arch_status if _arch_status[arch][1] == "~arch"]
-	else:
-		raise TypeError
+    _arch_status = load_profile_data()
+    if status == "stable":
+        return [arch for arch in _arch_status if _arch_status[arch][0] == "stable"]
+    elif status == "dev":
+        return [arch for arch in _arch_status if _arch_status[arch][0] == "dev"]
+    elif status == "exp":
+        return [arch for arch in _arch_status if _arch_status[arch][0] == "exp"]
+    elif status == "arch":
+        return [arch for arch in _arch_status if _arch_status[arch][1] == "arch"]
+    elif status == "~arch":
+        return [arch for arch in _arch_status if _arch_status[arch][1] == "~arch"]
+    else:
+        raise TypeError
+
 
 class keywords_header:
-	__IMPARCHS = gen_arch_list("stable")
-	__DEV_ARCHS = gen_arch_list("dev")
-	__EXP_ARCHS = gen_arch_list("exp")
-	__TESTING_KW_ARCHS = gen_arch_list("~arch")
-	__ADDITIONAL_FIELDS = [ 'eapi', 'unused', 'slot' ]
-	__EXTRA_FIELDS = [ 'repo' ]
-
-	@staticmethod
-	def __readKeywords():
-		"""Read all available keywords from portage."""
-		return [x for x in ports.archlist()
-			if not x.startswith('~')]
-
-	@staticmethod
-	def __isPrefix(k):
-		spl = k.split('-')
-		# *-fbsd are not prefix
-		return len(spl) > 1 and spl[1] != 'fbsd'
-
-	def __sortKeywords(self, keywords, prefix = False, required_keywords = []):
-		"""Sort keywords: order by status (IMP, then DEV, then EXP, then
-		prefix), then by name."""
-
-		# user specified only some keywords to display
-		if len(required_keywords) != 0:
-			tmpkeywords = [k for k in keywords
-				if k in required_keywords]
-			# idiots might specify non-existant archs
-			if len(tmpkeywords) != 0:
-				keywords = tmpkeywords
-
-		normal = [k for k in keywords if not self.__isPrefix(k)]
-		if prefix:
-			longer = [k for k in keywords if self.__isPrefix(k)]
-			normal.extend(longer)
-
-		lists = (self.__IMPARCHS + self.__DEV_ARCHS), self.__EXP_ARCHS
-		levels = {}
-		for kw in normal:
-			for level, ls in enumerate(lists):
-				if kw in ls:
-					levels[kw] = level
-					break
-
-		# sort by, in order (to match Bugzilla):
-		# 1. non-prefix, then prefix (stable output between -P and not)
-		# 2. arch, then ~arch
-		# 3. profile stability
-		# 4. short keywords, then long (prefix, fbsd)
-		# 5. keyword name in reverse component order
-		normal.sort(key=lambda kw: (self.__isPrefix(kw),
-			kw in self.__TESTING_KW_ARCHS,
-			levels.get(kw, 99),
-			kw.count('-'),
-			list(reversed(kw.split('-')))))
-		return normal
-
-	def __readAdditionalFields(self):
-		"""Prepare list of aditional fileds displayed by eshowkw (2nd part)"""
-		return self.__ADDITIONAL_FIELDS
-
-	def __readExtraFields(self):
-		"""Prepare list of extra fileds displayed by eshowkw (3rd part)"""
-		return self.__EXTRA_FIELDS
-
-	def __formatKeywords(self, keywords, align, length):
-		"""Append colors and align keywords properly"""
-		tmp = []
-		for keyword in keywords:
-			tmp2 = keyword
-			keyword = align_string(keyword, align, length)
-			# % are used as separators for further split so we wont loose spaces and coloring
-			keyword = '%'.join(list(keyword))
-			if tmp2 in self.__IMPARCHS:
-				tmp.append(colorize_string('darkyellow', keyword))
-			elif tmp2 in self.__EXP_ARCHS:
-				tmp.append(colorize_string('darkgray', keyword))
-			else:
-				tmp.append(keyword)
-		return tmp
-
-	@staticmethod
-	def __formatAdditional(additional, align, length):
-		"""Align additional items properly"""
-		# % are used as separators for further split so we wont loose spaces and coloring
-		return ['%'.join(align_string(x, align, length)) for x in additional]
-
-	def __prepareExtra(self, extra, align, length):
-		content = []
-		content.append(''.ljust(length, '-'))
-		content.extend(self.__formatAdditional(extra, align, length))
-		return content
-
-	def __prepareResult(self, keywords, additional, align, length):
-		"""Parse keywords and additional fields into one list with proper separators"""
-		content = []
-		content.append(''.ljust(length, '-'))
-		content.extend(self.__formatKeywords(keywords, align, length))
-		content.append(''.ljust(length, '-'))
-		content.extend(self.__formatAdditional(additional, align, length))
-		return content
-
-	def __init__(self, prefix = False, required_keywords = [], keywords_align = 'bottom'):
-		"""Initialize keywords header."""
-		additional = self.__readAdditionalFields()
-		extra = self.__readExtraFields()
-		self.keywords = self.__sortKeywords(self.__readKeywords(), prefix, required_keywords)
-		self.length = max(
-			max([len(x) for x in self.keywords]),
-			max([len(x) for x in additional]),
-			max([len(x) for x in extra])
-		)
-		#len(max([max(self.keywords, key=len), max(additional, key=len)], key=len))
-		self.keywords_count = len(self.keywords)
-		self.additional_count = len(additional)
-		self.extra_count = len(extra)
-		self.content = self.__prepareResult(self.keywords, additional, keywords_align, self.length)
-		self.extra = self.__prepareExtra(extra, keywords_align, self.length)
+    __IMPARCHS = gen_arch_list("stable")
+    __DEV_ARCHS = gen_arch_list("dev")
+    __EXP_ARCHS = gen_arch_list("exp")
+    __TESTING_KW_ARCHS = gen_arch_list("~arch")
+    __ADDITIONAL_FIELDS = ["eapi", "unused", "slot"]
+    __EXTRA_FIELDS = ["repo"]
+
+    @staticmethod
+    def __readKeywords():
+        """Read all available keywords from portage."""
+        return [x for x in ports.archlist() if not x.startswith("~")]
+
+    @staticmethod
+    def __isPrefix(k):
+        spl = k.split("-")
+        # *-fbsd are not prefix
+        return len(spl) > 1 and spl[1] != "fbsd"
+
+    def __sortKeywords(self, keywords, prefix=False, required_keywords=[]):
+        """Sort keywords: order by status (IMP, then DEV, then EXP, then
+        prefix), then by name."""
+
+        # user specified only some keywords to display
+        if len(required_keywords) != 0:
+            tmpkeywords = [k for k in keywords if k in required_keywords]
+            # idiots might specify non-existant archs
+            if len(tmpkeywords) != 0:
+                keywords = tmpkeywords
+
+        normal = [k for k in keywords if not self.__isPrefix(k)]
+        if prefix:
+            longer = [k for k in keywords if self.__isPrefix(k)]
+            normal.extend(longer)
+
+        lists = (self.__IMPARCHS + self.__DEV_ARCHS), self.__EXP_ARCHS
+        levels = {}
+        for kw in normal:
+            for level, ls in enumerate(lists):
+                if kw in ls:
+                    levels[kw] = level
+                    break
+
+        # sort by, in order (to match Bugzilla):
+        # 1. non-prefix, then prefix (stable output between -P and not)
+        # 2. arch, then ~arch
+        # 3. profile stability
+        # 4. short keywords, then long (prefix, fbsd)
+        # 5. keyword name in reverse component order
+        normal.sort(
+            key=lambda kw: (
+                self.__isPrefix(kw),
+                kw in self.__TESTING_KW_ARCHS,
+                levels.get(kw, 99),
+                kw.count("-"),
+                list(reversed(kw.split("-"))),
+            )
+        )
+        return normal
+
+    def __readAdditionalFields(self):
+        """Prepare list of aditional fileds displayed by eshowkw (2nd part)"""
+        return self.__ADDITIONAL_FIELDS
+
+    def __readExtraFields(self):
+        """Prepare list of extra fileds displayed by eshowkw (3rd part)"""
+        return self.__EXTRA_FIELDS
+
+    def __formatKeywords(self, keywords, align, length):
+        """Append colors and align keywords properly"""
+        tmp = []
+        for keyword in keywords:
+            tmp2 = keyword
+            keyword = align_string(keyword, align, length)
+            # % are used as separators for further split so we wont loose spaces and coloring
+            keyword = "%".join(list(keyword))
+            if tmp2 in self.__IMPARCHS:
+                tmp.append(colorize_string("darkyellow", keyword))
+            elif tmp2 in self.__EXP_ARCHS:
+                tmp.append(colorize_string("darkgray", keyword))
+            else:
+                tmp.append(keyword)
+        return tmp
+
+    @staticmethod
+    def __formatAdditional(additional, align, length):
+        """Align additional items properly"""
+        # % are used as separators for further split so we wont loose spaces and coloring
+        return ["%".join(align_string(x, align, length)) for x in additional]
+
+    def __prepareExtra(self, extra, align, length):
+        content = []
+        content.append("".ljust(length, "-"))
+        content.extend(self.__formatAdditional(extra, align, length))
+        return content
+
+    def __prepareResult(self, keywords, additional, align, length):
+        """Parse keywords and additional fields into one list with proper separators"""
+        content = []
+        content.append("".ljust(length, "-"))
+        content.extend(self.__formatKeywords(keywords, align, length))
+        content.append("".ljust(length, "-"))
+        content.extend(self.__formatAdditional(additional, align, length))
+        return content
+
+    def __init__(self, prefix=False, required_keywords=[], keywords_align="bottom"):
+        """Initialize keywords header."""
+        additional = self.__readAdditionalFields()
+        extra = self.__readExtraFields()
+        self.keywords = self.__sortKeywords(
+            self.__readKeywords(), prefix, required_keywords
+        )
+        self.length = max(
+            max([len(x) for x in self.keywords]),
+            max([len(x) for x in additional]),
+            max([len(x) for x in extra]),
+        )
+        # len(max([max(self.keywords, key=len), max(additional, key=len)], key=len))
+        self.keywords_count = len(self.keywords)
+        self.additional_count = len(additional)
+        self.extra_count = len(extra)
+        self.content = self.__prepareResult(
+            self.keywords, additional, keywords_align, self.length
+        )
+        self.extra = self.__prepareExtra(extra, keywords_align, self.length)

diff --git a/pym/gentoolkit/flag.py b/pym/gentoolkit/flag.py
index 42e8196..20ba855 100644
--- a/pym/gentoolkit/flag.py
+++ b/pym/gentoolkit/flag.py
@@ -9,14 +9,14 @@
 
 
 __all__ = (
-	'get_iuse',
-	'get_installed_use',
-	'reduce_flag',
-	'reduce_flags',
-	'defaulted_flags',
-	'filter_flags',
-	'get_all_cpv_use',
-	'get_flags'
+    "get_iuse",
+    "get_installed_use",
+    "reduce_flag",
+    "reduce_flags",
+    "defaulted_flags",
+    "filter_flags",
+    "get_all_cpv_use",
+    "get_flags",
 )
 
 
@@ -24,156 +24,160 @@ import portage
 
 
 def get_iuse(cpv):
-	"""Gets the current IUSE flags from the tree
-
-	To be used when a gentoolkit package object is not needed
-	@type: cpv: string
-	@param cpv: cat/pkg-ver
-	@rtype list
-	@returns [] or the list of IUSE flags
-	"""
-	try:
-		# aux_get might return dupes, so run them through set() to remove them
-		iuse = set(portage.db[portage.root]["porttree"].dbapi.aux_get(cpv, ["IUSE"])[0].split())
-		# there could still be dupes due to IUSE defaults
-		iuse = [x for x in iuse if '+'+x not in iuse and '-'+x not in iuse]
-		return list(iuse)
-	except:
-		return []
+    """Gets the current IUSE flags from the tree
+
+    To be used when a gentoolkit package object is not needed
+    @type: cpv: string
+    @param cpv: cat/pkg-ver
+    @rtype list
+    @returns [] or the list of IUSE flags
+    """
+    try:
+        # aux_get might return dupes, so run them through set() to remove them
+        iuse = set(
+            portage.db[portage.root]["porttree"].dbapi.aux_get(cpv, ["IUSE"])[0].split()
+        )
+        # there could still be dupes due to IUSE defaults
+        iuse = [x for x in iuse if "+" + x not in iuse and "-" + x not in iuse]
+        return list(iuse)
+    except:
+        return []
 
 
 def get_installed_use(cpv, use="USE"):
-	"""Gets the installed USE flags from the VARDB
+    """Gets the installed USE flags from the VARDB
 
-	To be used when a gentoolkit package object is not needed
-	@type: cpv: string
-	@param cpv: cat/pkg-ver
-	@type use: string
-	@param use: 1 of ["USE", "PKGUSE"]
-	@rtype list
-	@returns [] or the list of IUSE flags
-	"""
-	return portage.db[portage.root]["vartree"].dbapi.aux_get(cpv,[use])[0].split()
+    To be used when a gentoolkit package object is not needed
+    @type: cpv: string
+    @param cpv: cat/pkg-ver
+    @type use: string
+    @param use: 1 of ["USE", "PKGUSE"]
+    @rtype list
+    @returns [] or the list of IUSE flags
+    """
+    return portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, [use])[0].split()
 
 
 def reduce_flag(flag):
-	"""Absolute value function for a USE flag
+    """Absolute value function for a USE flag
 
-	@type flag: string
-	@param flag: the use flag to absolute.
-	@rtype: string
-	@return absolute USE flag
-	"""
-	if flag[0] in ["+","-"]:
-		return flag[1:]
-	else:
-		return flag
+    @type flag: string
+    @param flag: the use flag to absolute.
+    @rtype: string
+    @return absolute USE flag
+    """
+    if flag[0] in ["+", "-"]:
+        return flag[1:]
+    else:
+        return flag
 
 
 def reduce_flags(the_list):
-	"""Absolute value function for a USE flag list
+    """Absolute value function for a USE flag list
 
-	@type the_list: list
-	@param the_list: the use flags to absolute.
-	@rtype: list
-	@return absolute USE flags
-	"""
-	r=[]
-	for member in the_list:
-		r.append(reduce_flag(member))
-	return r
+    @type the_list: list
+    @param the_list: the use flags to absolute.
+    @rtype: list
+    @return absolute USE flags
+    """
+    r = []
+    for member in the_list:
+        r.append(reduce_flag(member))
+    return r
 
 
 def defaulted_flags(the_list):
-	"""Absolute value function for a USE flag list
+    """Absolute value function for a USE flag list
 
-	@type the_list: list
-	@param the_list: the use flags to get defaulted ones from.
-	@rtype: dict of lists
-	@return defaulted USE flags {'+': [...], '-': [...]}
-	"""
-	r={"+":[], "-": []}
-	for member in the_list:
-		if member[0] in  ["+","-"]:
-			r[member[0]].append(member[1:])
-	return r
+    @type the_list: list
+    @param the_list: the use flags to get defaulted ones from.
+    @rtype: dict of lists
+    @return defaulted USE flags {'+': [...], '-': [...]}
+    """
+    r = {"+": [], "-": []}
+    for member in the_list:
+        if member[0] in ["+", "-"]:
+            r[member[0]].append(member[1:])
+    return r
 
 
 def filter_flags(use, use_expand_hidden, usemasked, useforced):
-	"""Filter function to remove hidden or otherwise not normally
-	visible USE flags from a list.
-
-	@type use: list
-	@param use: the USE flag list to be filtered.
-	@type use_expand_hidden: list
-	@param  use_expand_hidden: list of flags hidden.
-	@type usemasked: list
-	@param usemasked: list of masked USE flags.
-	@type useforced: list
-	@param useforced: the forced USE flags.
-	@rtype: list
-	@return the filtered USE flags.
-	"""
-	# clean out some environment flags, since they will most probably
-	# be confusing for the user
-	use = dict((reduce_flag(flag), flag) for flag in use)
-	for f in use_expand_hidden:
-		f=f.lower() + "_"
-		for x in list(use):
-			if x.startswith(f):
-				del use[x]
-	# clean out any arch's
-	archlist = portage.settings["PORTAGE_ARCHLIST"].split()
-	for a in archlist:
-		use.pop(a, None)
-	# dbl check if any from usemasked  or useforced are still there
-	masked = usemasked + useforced
-	for a in masked:
-		use.pop(a, None)
-	return list(use.values())
+    """Filter function to remove hidden or otherwise not normally
+    visible USE flags from a list.
+
+    @type use: list
+    @param use: the USE flag list to be filtered.
+    @type use_expand_hidden: list
+    @param  use_expand_hidden: list of flags hidden.
+    @type usemasked: list
+    @param usemasked: list of masked USE flags.
+    @type useforced: list
+    @param useforced: the forced USE flags.
+    @rtype: list
+    @return the filtered USE flags.
+    """
+    # clean out some environment flags, since they will most probably
+    # be confusing for the user
+    use = dict((reduce_flag(flag), flag) for flag in use)
+    for f in use_expand_hidden:
+        f = f.lower() + "_"
+        for x in list(use):
+            if x.startswith(f):
+                del use[x]
+    # clean out any arch's
+    archlist = portage.settings["PORTAGE_ARCHLIST"].split()
+    for a in archlist:
+        use.pop(a, None)
+    # dbl check if any from usemasked  or useforced are still there
+    masked = usemasked + useforced
+    for a in masked:
+        use.pop(a, None)
+    return list(use.values())
 
 
 def get_all_cpv_use(cpv):
-	"""Uses portage to determine final USE flags and settings for an emerge
-
-	@type cpv: string
-	@param cpv: eg cat/pkg-ver
-	@rtype: lists
-	@return  use, use_expand_hidden, usemask, useforce
-	"""
-	use = None
-	portage.db[portage.root]["porttree"].dbapi.settings.unlock()
-	try:
-		portage.db[portage.root]["porttree"].dbapi.settings.setcpv(cpv, mydb=portage.portdb)
-		use = portage.settings['PORTAGE_USE'].split()
-		use_expand_hidden = portage.settings["USE_EXPAND_HIDDEN"].split()
-		usemask = list(portage.db[portage.root]["porttree"].dbapi.settings.usemask)
-		useforce =  list(portage.db[portage.root]["porttree"].dbapi.settings.useforce)
-	except KeyError:
-		portage.db[portage.root]["porttree"].dbapi.settings.reset()
-		portage.db[portage.root]["porttree"].dbapi.settings.lock()
-		return [], [], [], []
-	# reset cpv filter
-	portage.db[portage.root]["porttree"].dbapi.settings.reset()
-	portage.db[portage.root]["porttree"].dbapi.settings.lock()
-	return use, use_expand_hidden, usemask, useforce
+    """Uses portage to determine final USE flags and settings for an emerge
+
+    @type cpv: string
+    @param cpv: eg cat/pkg-ver
+    @rtype: lists
+    @return  use, use_expand_hidden, usemask, useforce
+    """
+    use = None
+    portage.db[portage.root]["porttree"].dbapi.settings.unlock()
+    try:
+        portage.db[portage.root]["porttree"].dbapi.settings.setcpv(
+            cpv, mydb=portage.portdb
+        )
+        use = portage.settings["PORTAGE_USE"].split()
+        use_expand_hidden = portage.settings["USE_EXPAND_HIDDEN"].split()
+        usemask = list(portage.db[portage.root]["porttree"].dbapi.settings.usemask)
+        useforce = list(portage.db[portage.root]["porttree"].dbapi.settings.useforce)
+    except KeyError:
+        portage.db[portage.root]["porttree"].dbapi.settings.reset()
+        portage.db[portage.root]["porttree"].dbapi.settings.lock()
+        return [], [], [], []
+    # reset cpv filter
+    portage.db[portage.root]["porttree"].dbapi.settings.reset()
+    portage.db[portage.root]["porttree"].dbapi.settings.lock()
+    return use, use_expand_hidden, usemask, useforce
 
 
 def get_flags(cpv, final_setting=False):
-	"""Retrieves all information needed to filter out hidded, masked, etc.
-	USE flags for a given package.
-
-	@type cpv: string
-	@param cpv: eg. cat/pkg-ver
-	@type final_setting: boolean
-	@param final_setting: used to also determine the final
-		enviroment USE flag settings and return them as well.
-	@rtype: list or list, list
-	@return IUSE or IUSE, final_flags
-	"""
-	final_use, use_expand_hidden, usemasked, useforced = get_all_cpv_use(cpv)
-	iuse_flags = filter_flags(get_iuse(cpv), use_expand_hidden, usemasked, useforced)
-	if final_setting:
-		final_flags = filter_flags(final_use,  use_expand_hidden, usemasked, useforced)
-		return iuse_flags, final_flags
-	return iuse_flags
+    """Retrieves all information needed to filter out hidded, masked, etc.
+    USE flags for a given package.
+
+    @type cpv: string
+    @param cpv: eg. cat/pkg-ver
+    @type final_setting: boolean
+    @param final_setting: used to also determine the final
+            enviroment USE flag settings and return them as well.
+    @rtype: list or list, list
+    @return IUSE or IUSE, final_flags
+    """
+    final_use, use_expand_hidden, usemasked, useforced = get_all_cpv_use(cpv)
+    iuse_flags = filter_flags(get_iuse(cpv), use_expand_hidden, usemasked, useforced)
+    if final_setting:
+        final_flags = filter_flags(final_use, use_expand_hidden, usemasked, useforced)
+        return iuse_flags, final_flags
+    return iuse_flags

diff --git a/pym/gentoolkit/formatters.py b/pym/gentoolkit/formatters.py
index 097c186..84c66ee 100644
--- a/pym/gentoolkit/formatters.py
+++ b/pym/gentoolkit/formatters.py
@@ -13,123 +13,118 @@ import gentoolkit.pprinter as pp
 
 
 def format_options(options):
-	"""Format module options.
-
-	@type options: list
-	@param options: [('option 1', 'description 1'), ('option 2', 'des... )]
-	@rtype: str
-	@return: formatted options string
-	"""
-
-	result = []
-	twrap = TextWrapper(width=gentoolkit.CONFIG['termWidth'])
-	opts = (x[0] for x in options)
-	descs = (x[1] for x in options)
-	for opt, desc in zip(opts, descs):
-		twrap.initial_indent = pp.emph(opt.ljust(25))
-		twrap.subsequent_indent = " " * 25
-		result.append(twrap.fill(desc))
-	return '\n'.join(result)
-
-
-def format_filetype(path, fdesc, show_type=False, show_md5=False,
-		show_timestamp=False):
-	"""Format a path for printing.
-
-	@type path: str
-	@param path: the path
-	@type fdesc: list
-	@param fdesc: [file_type, timestamp, MD5 sum/symlink target]
-		file_type is one of dev, dir, obj, sym.
-		If file_type is dir, there is no timestamp or MD5 sum.
-		If file_type is sym, fdesc[2] is the target of the symlink.
-	@type show_type: bool
-	@param show_type: if True, prepend the file's type to the formatted string
-	@type show_md5: bool
-	@param show_md5: if True, append MD5 sum to the formatted string
-	@type show_timestamp: bool
-	@param show_timestamp: if True, append time-of-creation after pathname
-	@rtype: str
-	@return: formatted pathname with optional added information
-	"""
-
-	ftype = fpath = stamp = md5sum = ""
-	if fdesc[0] == "obj":
-		ftype = "file"
-		fpath = path
-		stamp = format_timestamp(fdesc[1])
-		md5sum = fdesc[2]
-	elif fdesc[0] == "dir":
-		ftype = "dir"
-		fpath = pp.path(path)
-	elif fdesc[0] == "sym":
-		ftype = "sym"
-		stamp = format_timestamp(fdesc[1])
-		tgt = fdesc[2].split()[0]
-		if gentoolkit.CONFIG["piping"]:
-			fpath = path
-		else:
-			fpath = pp.path_symlink(path + " -> " + tgt)
-	elif fdesc[0] == "dev":
-		ftype = "dev"
-		fpath = path
-	else:
-		sys.stderr.write(
-			pp.error("%s has unknown type: %s" % (path, fdesc[0]))
-		)
-	result = ""
-	if show_type:
-		result += "%4s " % ftype
-	result += fpath
-	if show_timestamp:
-		result += "  " + stamp
-	if show_md5:
-		result += "  " + md5sum
-	return result
-
+    """Format module options.
+
+    @type options: list
+    @param options: [('option 1', 'description 1'), ('option 2', 'des... )]
+    @rtype: str
+    @return: formatted options string
+    """
+
+    result = []
+    twrap = TextWrapper(width=gentoolkit.CONFIG["termWidth"])
+    opts = (x[0] for x in options)
+    descs = (x[1] for x in options)
+    for opt, desc in zip(opts, descs):
+        twrap.initial_indent = pp.emph(opt.ljust(25))
+        twrap.subsequent_indent = " " * 25
+        result.append(twrap.fill(desc))
+    return "\n".join(result)
+
+
+def format_filetype(path, fdesc, show_type=False, show_md5=False, show_timestamp=False):
+    """Format a path for printing.
+
+    @type path: str
+    @param path: the path
+    @type fdesc: list
+    @param fdesc: [file_type, timestamp, MD5 sum/symlink target]
+            file_type is one of dev, dir, obj, sym.
+            If file_type is dir, there is no timestamp or MD5 sum.
+            If file_type is sym, fdesc[2] is the target of the symlink.
+    @type show_type: bool
+    @param show_type: if True, prepend the file's type to the formatted string
+    @type show_md5: bool
+    @param show_md5: if True, append MD5 sum to the formatted string
+    @type show_timestamp: bool
+    @param show_timestamp: if True, append time-of-creation after pathname
+    @rtype: str
+    @return: formatted pathname with optional added information
+    """
+
+    ftype = fpath = stamp = md5sum = ""
+    if fdesc[0] == "obj":
+        ftype = "file"
+        fpath = path
+        stamp = format_timestamp(fdesc[1])
+        md5sum = fdesc[2]
+    elif fdesc[0] == "dir":
+        ftype = "dir"
+        fpath = pp.path(path)
+    elif fdesc[0] == "sym":
+        ftype = "sym"
+        stamp = format_timestamp(fdesc[1])
+        tgt = fdesc[2].split()[0]
+        if gentoolkit.CONFIG["piping"]:
+            fpath = path
+        else:
+            fpath = pp.path_symlink(path + " -> " + tgt)
+    elif fdesc[0] == "dev":
+        ftype = "dev"
+        fpath = path
+    else:
+        sys.stderr.write(pp.error("%s has unknown type: %s" % (path, fdesc[0])))
+    result = ""
+    if show_type:
+        result += "%4s " % ftype
+    result += fpath
+    if show_timestamp:
+        result += "  " + stamp
+    if show_md5:
+        result += "  " + md5sum
+    return result
 
 
 def format_timestamp(timestamp):
-	"""Format a timestamp into, e.g., '2009-01-31 21:19:44' format"""
-
-	return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
+    """Format a timestamp into, e.g., '2009-01-31 21:19:44' format"""
 
-class CpvValueWrapper:
-	"""Format a cpv and linewrap pre-formatted values"""
-
-	def __init__(self, cpv_width=None, width=None):
-		self.cpv_width = cpv_width
-		if width is None:
-			width = gentoolkit.CONFIG['termWidth']
-		self.twrap = TextWrapper(width=width)
-		#self.init_indent = len(self.spacer)
-
-	def _format_values(self, key, values):
-		"""Format entry values ie. USE flags, keywords,...
-
-		@type key: str
-		@param key: a pre-formatted cpv
-		@type values: list of pre-formatted strings
-		@param values: ['flag1', 'flag2',...]
-		@rtype: str
-		@return: formatted options string
-		"""
-
-		result = []
-		if self.cpv_width > 1:
-			_cpv = pp.cpv(key+'.'*(self.cpv_width-len(key)))
-			if not len(values):
-				return _cpv
-			self.twrap.initial_indent = _cpv
-			self.twrap.subsequent_indent = " " * (self.cpv_width+1)
-		else:
-			_cpv = pp.cpv(key+' ')
-			if not len(values):
-				return _cpv
-			self.twrap.initial_indent = _cpv
-			self.twrap.subsequent_indent = " " * (len(key)+1)
-
-		result.append(self.twrap.fill(values))
-		return '\n'.join(result)
+    return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
 
 
+class CpvValueWrapper:
+    """Format a cpv and linewrap pre-formatted values"""
+
+    def __init__(self, cpv_width=None, width=None):
+        self.cpv_width = cpv_width
+        if width is None:
+            width = gentoolkit.CONFIG["termWidth"]
+        self.twrap = TextWrapper(width=width)
+        # self.init_indent = len(self.spacer)
+
+    def _format_values(self, key, values):
+        """Format entry values ie. USE flags, keywords,...
+
+        @type key: str
+        @param key: a pre-formatted cpv
+        @type values: list of pre-formatted strings
+        @param values: ['flag1', 'flag2',...]
+        @rtype: str
+        @return: formatted options string
+        """
+
+        result = []
+        if self.cpv_width > 1:
+            _cpv = pp.cpv(key + "." * (self.cpv_width - len(key)))
+            if not len(values):
+                return _cpv
+            self.twrap.initial_indent = _cpv
+            self.twrap.subsequent_indent = " " * (self.cpv_width + 1)
+        else:
+            _cpv = pp.cpv(key + " ")
+            if not len(values):
+                return _cpv
+            self.twrap.initial_indent = _cpv
+            self.twrap.subsequent_indent = " " * (len(key) + 1)
+
+        result.append(self.twrap.fill(values))
+        return "\n".join(result)

diff --git a/pym/gentoolkit/helpers.py b/pym/gentoolkit/helpers.py
index 236a379..15d959d 100644
--- a/pym/gentoolkit/helpers.py
+++ b/pym/gentoolkit/helpers.py
@@ -9,14 +9,14 @@
 """
 
 __all__ = (
-	'FileOwner',
-	'get_cpvs',
-	'get_installed_cpvs',
-	'get_uninstalled_cpvs',
-	'get_bintree_cpvs',
-	'uniqify',
+    "FileOwner",
+    "get_cpvs",
+    "get_installed_cpvs",
+    "get_uninstalled_cpvs",
+    "get_bintree_cpvs",
+    "uniqify",
 )
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -35,280 +35,298 @@ from gentoolkit import errors
 from gentoolkit.atom import Atom
 from gentoolkit.cpv import CPV
 from gentoolkit.versionmatch import VersionMatch
+
 # This has to be imported below to stop circular import.
-#from gentoolkit.package import Package
+# from gentoolkit.package import Package
 
 # =======
 # Classes
 # =======
 
+
 class FileOwner:
-	"""Creates a function for locating the owner of filename queries.
-
-	Example usage:
-		>>> from gentoolkit.helpers import FileOwner
-		>>> findowner = FileOwner()
-		>>> findowner(('/bin/grep',))
-		[(<Package 'sys-apps/grep-2.12'>, '/bin/grep')]
-	"""
-	def __init__(self, is_regex=False, early_out=False, printer_fn=None):
-		"""Instantiate function.
-
-		@type is_regex: bool
-		@param is_regex: funtion args are regular expressions
-		@type early_out: bool
-		@param early_out: return when first result is found (safe)
-		@type printer_fn: callable
-		@param printer_fn: If defined, will be passed useful information for
-			printing each result as it is found.
-		"""
-		self.is_regex = is_regex
-		self.early_out = early_out
-		self.printer_fn = printer_fn
-
-	def __call__(self, queries):
-		"""Run the function.
-
-		@type queries: iterable
-		@param queries: filepaths or filepath regexes
-		"""
-		query_re_string = self._prepare_search_regex(queries)
-		try:
-			query_re = re.compile(query_re_string)
-		except (TypeError, re.error) as err:
-			raise errors.GentoolkitInvalidRegex(err)
-
-		use_match = False
-		if ((self.is_regex or query_re_string.startswith(r'^\/'))
-			and '|' not in query_re_string ):
-			# If we were passed a regex or a single path starting with root,
-			# we can use re.match, else use re.search.
-			use_match = True
-
-		pkgset = get_installed_cpvs()
-
-		return self.find_owners(query_re, use_match=use_match, pkgset=pkgset)
-
-	def find_owners(self, query_re, use_match=False, pkgset=None):
-		"""Find owners and feed data to supplied output function.
-
-		@type query_re: _sre.SRE_Pattern
-		@param query_re: file regex
-		@type use_match: bool
-		@param use_match: use re.match or re.search
-		@type pkgset: iterable or None
-		@param pkgset: list of packages to look through
-		"""
-		# FIXME: Remove when lazyimport supports objects:
-		from gentoolkit.package import Package
-
-		if use_match:
-			query_fn = query_re.match
-		else:
-			query_fn = query_re.search
-
-		results = []
-		found_match = False
-		for pkg in sorted([Package(x) for x in pkgset]):
-			files = pkg.parsed_contents()
-			for cfile in files:
-				match = query_fn(cfile)
-				if match:
-					results.append((pkg, cfile))
-					if self.printer_fn is not None:
-						self.printer_fn(pkg, cfile)
-					if self.early_out:
-						found_match = True
-						break
-			if found_match:
-				break
-		return results
-
-	@staticmethod
-	def expand_abspaths(paths):
-		"""Expand any relative paths (./file) to their absolute paths.
-
-		@type paths: list
-		@param paths: file path strs
-		@rtype: list
-		@return: the original list with any relative paths expanded
-		@raise AttributeError: if paths does not have attribute 'extend'
-		"""
-
-		osp = os.path
-		expanded_paths = []
-		for path in paths:
-			if path.startswith('./'):
-				expanded_paths.append(osp.abspath(path))
-			else:
-				expanded_paths.append(path)
-
-		return expanded_paths
-
-	@staticmethod
-	def extend_realpaths(paths):
-		"""Extend a list of paths with the realpaths for any symlinks.
-
-		@type paths: list
-		@param paths: file path strs
-		@rtype: list
-		@return: the original list plus the realpaths for any symlinks
-			so long as the realpath doesn't already exist in the list
-		@raise AttributeError: if paths does not have attribute 'extend'
-		"""
-
-		osp = os.path
-		paths.extend([osp.realpath(x) for x in paths
-			if osp.realpath(x) not in paths])
-
-		return paths
-
-	def _prepare_search_regex(self, queries):
-		"""Create a regex out of the queries"""
-
-		queries = list(queries)
-		if self.is_regex:
-			return '|'.join(queries)
-		else:
-			result = []
-			# Trim trailing and multiple slashes from queries
-			slashes = re.compile(r'/+')
-			queries = self.expand_abspaths(queries)
-			queries = self.extend_realpaths(queries)
-			for query in queries:
-				query = slashes.sub('/', query).rstrip('/')
-				if query.startswith('/'):
-					query = "^%s$" % re.escape(query)
-				else:
-					query = "/%s$" % re.escape(query)
-				result.append(query)
-		result = "|".join(result)
-		return result
+    """Creates a function for locating the owner of filename queries.
+
+    Example usage:
+            >>> from gentoolkit.helpers import FileOwner
+            >>> findowner = FileOwner()
+            >>> findowner(('/bin/grep',))
+            [(<Package 'sys-apps/grep-2.12'>, '/bin/grep')]
+    """
+
+    def __init__(self, is_regex=False, early_out=False, printer_fn=None):
+        """Instantiate function.
+
+        @type is_regex: bool
+        @param is_regex: funtion args are regular expressions
+        @type early_out: bool
+        @param early_out: return when first result is found (safe)
+        @type printer_fn: callable
+        @param printer_fn: If defined, will be passed useful information for
+                printing each result as it is found.
+        """
+        self.is_regex = is_regex
+        self.early_out = early_out
+        self.printer_fn = printer_fn
+
+    def __call__(self, queries):
+        """Run the function.
+
+        @type queries: iterable
+        @param queries: filepaths or filepath regexes
+        """
+        query_re_string = self._prepare_search_regex(queries)
+        try:
+            query_re = re.compile(query_re_string)
+        except (TypeError, re.error) as err:
+            raise errors.GentoolkitInvalidRegex(err)
+
+        use_match = False
+        if (
+            self.is_regex or query_re_string.startswith(r"^\/")
+        ) and "|" not in query_re_string:
+            # If we were passed a regex or a single path starting with root,
+            # we can use re.match, else use re.search.
+            use_match = True
+
+        pkgset = get_installed_cpvs()
+
+        return self.find_owners(query_re, use_match=use_match, pkgset=pkgset)
+
+    def find_owners(self, query_re, use_match=False, pkgset=None):
+        """Find owners and feed data to supplied output function.
+
+        @type query_re: _sre.SRE_Pattern
+        @param query_re: file regex
+        @type use_match: bool
+        @param use_match: use re.match or re.search
+        @type pkgset: iterable or None
+        @param pkgset: list of packages to look through
+        """
+        # FIXME: Remove when lazyimport supports objects:
+        from gentoolkit.package import Package
+
+        if use_match:
+            query_fn = query_re.match
+        else:
+            query_fn = query_re.search
+
+        results = []
+        found_match = False
+        for pkg in sorted([Package(x) for x in pkgset]):
+            files = pkg.parsed_contents()
+            for cfile in files:
+                match = query_fn(cfile)
+                if match:
+                    results.append((pkg, cfile))
+                    if self.printer_fn is not None:
+                        self.printer_fn(pkg, cfile)
+                    if self.early_out:
+                        found_match = True
+                        break
+            if found_match:
+                break
+        return results
+
+    @staticmethod
+    def expand_abspaths(paths):
+        """Expand any relative paths (./file) to their absolute paths.
+
+        @type paths: list
+        @param paths: file path strs
+        @rtype: list
+        @return: the original list with any relative paths expanded
+        @raise AttributeError: if paths does not have attribute 'extend'
+        """
+
+        osp = os.path
+        expanded_paths = []
+        for path in paths:
+            if path.startswith("./"):
+                expanded_paths.append(osp.abspath(path))
+            else:
+                expanded_paths.append(path)
+
+        return expanded_paths
+
+    @staticmethod
+    def extend_realpaths(paths):
+        """Extend a list of paths with the realpaths for any symlinks.
+
+        @type paths: list
+        @param paths: file path strs
+        @rtype: list
+        @return: the original list plus the realpaths for any symlinks
+                so long as the realpath doesn't already exist in the list
+        @raise AttributeError: if paths does not have attribute 'extend'
+        """
+
+        osp = os.path
+        paths.extend([osp.realpath(x) for x in paths if osp.realpath(x) not in paths])
+
+        return paths
+
+    def _prepare_search_regex(self, queries):
+        """Create a regex out of the queries"""
+
+        queries = list(queries)
+        if self.is_regex:
+            return "|".join(queries)
+        else:
+            result = []
+            # Trim trailing and multiple slashes from queries
+            slashes = re.compile(r"/+")
+            queries = self.expand_abspaths(queries)
+            queries = self.extend_realpaths(queries)
+            for query in queries:
+                query = slashes.sub("/", query).rstrip("/")
+                if query.startswith("/"):
+                    query = "^%s$" % re.escape(query)
+                else:
+                    query = "/%s$" % re.escape(query)
+                result.append(query)
+        result = "|".join(result)
+        return result
+
 
 # =========
 # Functions
 # =========
 
+
 def get_cpvs(predicate=None, include_installed=True):
-	"""Get all packages in the Portage tree and overlays. Optionally apply a
-	predicate.
-
-	Example usage:
-		>>> from gentoolkit.helpers import get_cpvs
-		>>> len(set(get_cpvs()))
-		33518
-		>>> fn = lambda x: x.startswith('app-portage')
-		>>> len(set(get_cpvs(fn, include_installed=False)))
-		137
-
-	@type predicate: function
-	@param predicate: a function to filter the package list with
-	@type include_installed: bool
-	@param include_installed:
-		If True: Return the union of all_cpvs and all_installed_cpvs
-		If False: Return the difference of all_cpvs and all_installed_cpvs
-	@rtype: generator
-	@return: a generator that yields unsorted cat/pkg-ver strings from the
-		Portage tree
-	"""
-
-	if not predicate:
-		predicate = lambda x: x
-
-	all_cps = portage.db[portage.root]["porttree"].dbapi.cp_all()
-
-	all_cpvs = iter(x for x in chain.from_iterable(
-		portage.db[portage.root]["porttree"].dbapi.cp_list(x)
-		for x in all_cps) if predicate(x))
-
-	all_installed_cpvs = set(get_installed_cpvs(predicate))
-
-	if include_installed:
-		for cpv in all_cpvs:
-			if cpv in all_installed_cpvs:
-				all_installed_cpvs.remove(cpv)
-			yield cpv
-		for cpv in all_installed_cpvs:
-			yield cpv
-	else:
-		for cpv in all_cpvs:
-			if cpv not in all_installed_cpvs:
-				yield cpv
+    """Get all packages in the Portage tree and overlays. Optionally apply a
+    predicate.
+
+    Example usage:
+            >>> from gentoolkit.helpers import get_cpvs
+            >>> len(set(get_cpvs()))
+            33518
+            >>> fn = lambda x: x.startswith('app-portage')
+            >>> len(set(get_cpvs(fn, include_installed=False)))
+            137
+
+    @type predicate: function
+    @param predicate: a function to filter the package list with
+    @type include_installed: bool
+    @param include_installed:
+            If True: Return the union of all_cpvs and all_installed_cpvs
+            If False: Return the difference of all_cpvs and all_installed_cpvs
+    @rtype: generator
+    @return: a generator that yields unsorted cat/pkg-ver strings from the
+            Portage tree
+    """
+
+    if not predicate:
+        predicate = lambda x: x
+
+    all_cps = portage.db[portage.root]["porttree"].dbapi.cp_all()
+
+    all_cpvs = iter(
+        x
+        for x in chain.from_iterable(
+            portage.db[portage.root]["porttree"].dbapi.cp_list(x) for x in all_cps
+        )
+        if predicate(x)
+    )
+
+    all_installed_cpvs = set(get_installed_cpvs(predicate))
+
+    if include_installed:
+        for cpv in all_cpvs:
+            if cpv in all_installed_cpvs:
+                all_installed_cpvs.remove(cpv)
+            yield cpv
+        for cpv in all_installed_cpvs:
+            yield cpv
+    else:
+        for cpv in all_cpvs:
+            if cpv not in all_installed_cpvs:
+                yield cpv
 
 
 get_uninstalled_cpvs = partial(get_cpvs, include_installed=False)
 
 
 def get_installed_cpvs(predicate=None):
-	"""Get all installed packages. Optionally apply a predicate.
+    """Get all installed packages. Optionally apply a predicate.
 
-	@type predicate: function
-	@param predicate: a function to filter the package list with
-	@rtype: generator
-	@return: a generator that yields unsorted installed cat/pkg-ver strings
-		from VARDB
-	"""
+    @type predicate: function
+    @param predicate: a function to filter the package list with
+    @rtype: generator
+    @return: a generator that yields unsorted installed cat/pkg-ver strings
+            from VARDB
+    """
 
-	if not predicate:
-		predicate = lambda x: x
+    if not predicate:
+        predicate = lambda x: x
 
-	installed_cps = portage.db[portage.root]["vartree"].dbapi.cp_all()
+    installed_cps = portage.db[portage.root]["vartree"].dbapi.cp_all()
 
-	installed_cpvs = iter(x for x in chain.from_iterable(
-		portage.db[portage.root]["vartree"].dbapi.cp_list(x)
-		for x in installed_cps) if predicate(x))
+    installed_cpvs = iter(
+        x
+        for x in chain.from_iterable(
+            portage.db[portage.root]["vartree"].dbapi.cp_list(x) for x in installed_cps
+        )
+        if predicate(x)
+    )
 
-	for cpv in installed_cpvs:
-		yield cpv
+    for cpv in installed_cpvs:
+        yield cpv
 
 
 def get_bintree_cpvs(predicate=None):
-	"""Get all binary packages available. Optionally apply a predicate.
+    """Get all binary packages available. Optionally apply a predicate.
 
-	@type predicate: function
-	@param predicate: a function to filter the package list with
-	@rtype: generator
-	@return: a generator that yields unsorted binary package cat/pkg-ver strings
-		from BINDB
-	"""
+    @type predicate: function
+    @param predicate: a function to filter the package list with
+    @rtype: generator
+    @return: a generator that yields unsorted binary package cat/pkg-ver strings
+            from BINDB
+    """
 
-	if not predicate:
-		predicate = lambda x: x
+    if not predicate:
+        predicate = lambda x: x
 
-	installed_cps = portage.db[portage.root]["bintree"].dbapi.cp_all()
+    installed_cps = portage.db[portage.root]["bintree"].dbapi.cp_all()
 
-	installed_cpvs = iter(x for x in chain.from_iterable(
-		portage.db[portage.root]["bintree"].dbapi.cp_list(x)
-		for x in installed_cps) if predicate(x))
+    installed_cpvs = iter(
+        x
+        for x in chain.from_iterable(
+            portage.db[portage.root]["bintree"].dbapi.cp_list(x) for x in installed_cps
+        )
+        if predicate(x)
+    )
 
-	for cpv in installed_cpvs:
-		yield cpv
+    for cpv in installed_cpvs:
+        yield cpv
 
 
 def print_file(path):
-	"""Display the contents of a file."""
+    """Display the contents of a file."""
 
-	with open(_unicode_encode(path, encoding=_encodings['fs']), mode="rb") as open_file:
-		lines = open_file.read()
-		pp.uprint(lines.strip())
+    with open(_unicode_encode(path, encoding=_encodings["fs"]), mode="rb") as open_file:
+        lines = open_file.read()
+        pp.uprint(lines.strip())
 
 
 def print_sequence(seq):
-	"""Print every item of a sequence."""
+    """Print every item of a sequence."""
 
-	for item in seq:
-		pp.uprint(item)
+    for item in seq:
+        pp.uprint(item)
 
 
 def uniqify(seq, preserve_order=True):
-	"""Return a uniqified list. Optionally preserve order."""
+    """Return a uniqified list. Optionally preserve order."""
+
+    if preserve_order:
+        seen = set()
+        result = [x for x in seq if x not in seen and not seen.add(x)]
+    else:
+        result = list(set(seq))
 
-	if preserve_order:
-		seen = set()
-		result = [x for x in seq if x not in seen and not seen.add(x)]
-	else:
-		result = list(set(seq))
+    return result
 
-	return result
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/imlate/imlate.py b/pym/gentoolkit/imlate/imlate.py
index 3e22791..87b8ef1 100755
--- a/pym/gentoolkit/imlate/imlate.py
+++ b/pym/gentoolkit/imlate/imlate.py
@@ -7,8 +7,8 @@
 __version__ = "git"
 
 # works just with stable keywords!
-MAIN_ARCH = "auto" # can be overridden by -m ARCH
-TARGET_ARCH = "auto" # can be overridden by -t ARCH
+MAIN_ARCH = "auto"  # can be overridden by -m ARCH
+TARGET_ARCH = "auto"  # can be overridden by -t ARCH
 # auto means e.g.:
 # MAIN_ARCH = amd64
 # TARGET_ARCH = ~amd64
@@ -25,6 +25,7 @@ from os import stat
 from time import time
 from xml.dom import minidom, NotFoundErr
 from xml.parsers.expat import ExpatError
+
 # TODO: just import needed stuff to safe memory/time and maybe use "as foo"
 import portage
 import portage.versions
@@ -33,450 +34,562 @@ from optparse import OptionParser
 from time import gmtime, strftime
 
 # override/change portage module settings
-def _portage_settings( var, value, settings = None ):
-	if not settings:
-		settings = portage.settings
 
-	settings.unlock()
-	settings[var] = value
-	# backup_changes is very important since it can cause trouble,
-	# if we do not backup our changes!
-	settings.backup_changes( var )
-	settings.lock()
+
+def _portage_settings(var, value, settings=None):
+    if not settings:
+        settings = portage.settings
+
+    settings.unlock()
+    settings[var] = value
+    # backup_changes is very important since it can cause trouble,
+    # if we do not backup our changes!
+    settings.backup_changes(var)
+    settings.lock()
+
 
 # add stuff to our imlate dict
-def _add_ent( imlate, cat, pkg, ver, our_ver ):
-	if not cat in list(imlate.keys()):
-		imlate[cat] = {}
-	if not pkg in list(imlate[cat].keys()):
-		imlate[cat][pkg] = []
 
-	imlate[cat][pkg].append( ver )
-	imlate[cat][pkg].append( our_ver )
 
-	return imlate
+def _add_ent(imlate, cat, pkg, ver, our_ver):
+    if not cat in list(imlate.keys()):
+        imlate[cat] = {}
+    if not pkg in list(imlate[cat].keys()):
+        imlate[cat][pkg] = []
+
+    imlate[cat][pkg].append(ver)
+    imlate[cat][pkg].append(our_ver)
+
+    return imlate
+
+
+def _fill(width, line, fill=" "):
+    while len(line) < width:
+        line = "%s%s" % (str(line), str(fill))
+    return line
 
-def _fill( width, line, fill = " " ):
-	while len( line ) < width:
-		line = "%s%s" % ( str( line ), str( fill ) )
-	return line
 
 # create a hopefully pretty result
-def show_result( conf, pkgs ):
-	# X - len(colX) = space to fill
-	col1 = -1
-	col2 = -1
-	for cat in pkgs:
-		for pkg in pkgs[cat]:
-			col1 = max(col1, len(("%s/%s" % (cat, pkg))))
-			col2 = max(col2, len(pkgs[cat][pkg][1]))
-	col1 += 1
-	col2 += 1
-
-	_header = "%s candidates for 'gentoo' on '%s'"
-	_helper = "%s%s%s" % (_fill(col1, "category/package[:SLOT])"),
-						  _fill(col2, "our version"), "best version")
-	_cand = ""
-	header = ""
-
-	if conf["FILE"] == "stdout":
-		out = stdout
-	elif conf["FILE"] == "stderr":
-		out = stderr
-	else:
-		out = open( conf["FILE"], "w" )
-
-	if conf["STABLE"] and conf["KEYWORD"]:
-		_cand = "%i Stable and %i Keyword(~)" % ( conf["STABLE_SUM"],
-												conf["KEYWORD_SUM"] )
-	elif conf["STABLE"]:
-		_cand = "%i Stable" % conf["STABLE_SUM"]
-	elif conf["KEYWORD"]:
-		_cand = "%i Keyword(~)" % conf["KEYWORD_SUM"]
-
-	header = _header % ( _cand, conf["MAIN_ARCH"] )
-
-	print("Generated on: %s" % conf["TIME"], file=out)
-	print(_fill( len( header ), "", "=" ), file=out)
-	print(header, file=out)
-	print(_fill( len( header ), "", "=" ), file=out)
-	print(file=out)
-
-	print(_helper, file=out)
-	print(_fill( len( _helper ), "", "-" ), file=out)
-
-	for cat in sorted( pkgs.keys() ):
-		for pkg in sorted( pkgs[cat].keys() ):
-			print("%s%s%s" % (_fill(col1, ("%s/%s" % (cat, pkg))),
-							  _fill(col2, pkgs[cat][pkg][1]),
-							  pkgs[cat][pkg][0] ), file=out)
-
-	if conf["FILE"] != "stdout":
-		out.close()
+
+
+def show_result(conf, pkgs):
+    # X - len(colX) = space to fill
+    col1 = -1
+    col2 = -1
+    for cat in pkgs:
+        for pkg in pkgs[cat]:
+            col1 = max(col1, len(("%s/%s" % (cat, pkg))))
+            col2 = max(col2, len(pkgs[cat][pkg][1]))
+    col1 += 1
+    col2 += 1
+
+    _header = "%s candidates for 'gentoo' on '%s'"
+    _helper = "%s%s%s" % (
+        _fill(col1, "category/package[:SLOT])"),
+        _fill(col2, "our version"),
+        "best version",
+    )
+    _cand = ""
+    header = ""
+
+    if conf["FILE"] == "stdout":
+        out = stdout
+    elif conf["FILE"] == "stderr":
+        out = stderr
+    else:
+        out = open(conf["FILE"], "w")
+
+    if conf["STABLE"] and conf["KEYWORD"]:
+        _cand = "%i Stable and %i Keyword(~)" % (
+            conf["STABLE_SUM"],
+            conf["KEYWORD_SUM"],
+        )
+    elif conf["STABLE"]:
+        _cand = "%i Stable" % conf["STABLE_SUM"]
+    elif conf["KEYWORD"]:
+        _cand = "%i Keyword(~)" % conf["KEYWORD_SUM"]
+
+    header = _header % (_cand, conf["MAIN_ARCH"])
+
+    print("Generated on: %s" % conf["TIME"], file=out)
+    print(_fill(len(header), "", "="), file=out)
+    print(header, file=out)
+    print(_fill(len(header), "", "="), file=out)
+    print(file=out)
+
+    print(_helper, file=out)
+    print(_fill(len(_helper), "", "-"), file=out)
+
+    for cat in sorted(pkgs.keys()):
+        for pkg in sorted(pkgs[cat].keys()):
+            print(
+                "%s%s%s"
+                % (
+                    _fill(col1, ("%s/%s" % (cat, pkg))),
+                    _fill(col2, pkgs[cat][pkg][1]),
+                    pkgs[cat][pkg][0],
+                ),
+                file=out,
+            )
+
+    if conf["FILE"] != "stdout":
+        out.close()
+
 
 def _get_metadata(metadata, element, tag):
-	values = []
-
-	try:
-		metadatadom = minidom.parse(metadata)
-	except ExpatError as e:
-		raise ExpatError("%s: %s" % (metadata, e,))
-
-	try:
-		elements = metadatadom.getElementsByTagName(element)
-		if not elements:
-			return values
-	except NotFoundErr:
-		return values
-
-	try:
-		for _element in elements:
-			node = _element.getElementsByTagName(tag)
-
-			if tag == "herd" and (not node or not node[0].childNodes):
-#				print >> stderr, "'%s' is missing a <herd> tag or it is empty," % metadata
-#				print >> stderr, "please file a bug at https://bugs.gentoo.org and refer to http://www.gentoo.org/proj/en/devrel/handbook/handbook.xml?part=2&chap=4"
-				values.append("no-herd")
-				continue
-
-			try:
-				values.append(node[0].childNodes[0].data)
-			except IndexError:
-				pass
-	except NotFoundErr:
-		raise NotFoundErr("%s: Malformed input: missing 'flag' tag(s)" % (metadata))
-
-	metadatadom.unlink()
-	return values
+    values = []
+
+    try:
+        metadatadom = minidom.parse(metadata)
+    except ExpatError as e:
+        raise ExpatError(
+            "%s: %s"
+            % (
+                metadata,
+                e,
+            )
+        )
+
+    try:
+        elements = metadatadom.getElementsByTagName(element)
+        if not elements:
+            return values
+    except NotFoundErr:
+        return values
+
+    try:
+        for _element in elements:
+            node = _element.getElementsByTagName(tag)
+
+            if tag == "herd" and (not node or not node[0].childNodes):
+                # 				print >> stderr, "'%s' is missing a <herd> tag or it is empty," % metadata
+                # 				print >> stderr, "please file a bug at https://bugs.gentoo.org and refer to http://www.gentoo.org/proj/en/devrel/handbook/handbook.xml?part=2&chap=4"
+                values.append("no-herd")
+                continue
+
+            try:
+                values.append(node[0].childNodes[0].data)
+            except IndexError:
+                pass
+    except NotFoundErr:
+        raise NotFoundErr("%s: Malformed input: missing 'flag' tag(s)" % (metadata))
+
+    metadatadom.unlink()
+    return values
+
 
 def is_maintainer(maintainer, metadata):
-	data = []
+    data = []
+
+    if maintainer == None:
+        return True
 
-	if maintainer == None:
-		return True
+    mtainer = maintainer.split(",")
 
-	mtainer = maintainer.split(",")
+    data = _get_metadata(metadata, "maintainer", "email")
 
-	data = _get_metadata(metadata, "maintainer", "email")
+    if not data and len(maintainer) == 0:
+        return True
+    elif not data and len(maintainer) > 0:
+        return False
+    else:
+        for addy in data:
+            for contact in mtainer:
+                if addy == contact:
+                    return True
+                if addy.startswith(contact):
+                    return True
+    return False
 
-	if not data and len(maintainer) == 0:
-		return True
-	elif not data and len(maintainer) > 0:
-		return False
-	else:
-		for addy in data:
-			for contact in mtainer:
-				if addy == contact:
-					return True
-				if addy.startswith(contact):
-					return True
-	return False
 
 def is_herd(herd, metadata):
-	data = []
+    data = []
 
-	if herd == None:
-		return True
+    if herd == None:
+        return True
 
-	hrd = herd.split(",")
-	data = _get_metadata(metadata, "pkgmetadata", "herd")
+    hrd = herd.split(",")
+    data = _get_metadata(metadata, "pkgmetadata", "herd")
 
-	if not data and len(herd) == 0:
-		return True
-	elif not data and len(herd) > 0:
-		return False
-	else:
-		for hd in data:
-			for hd2 in hrd:
-				if hd == hd2:
-					return True
-				if hd.startswith(hd2):
-					return True
+    if not data and len(herd) == 0:
+        return True
+    elif not data and len(herd) > 0:
+        return False
+    else:
+        for hd in data:
+            for hd2 in hrd:
+                if hd == hd2:
+                    return True
+                if hd.startswith(hd2):
+                    return True
 
-	return False
+    return False
 
 
 # fetch a list of arch (just stable) packages
 # -* is important to be sure that just arch is used
-def get_packages( conf ):
-	_pkgs = {}
+def get_packages(conf):
+    _pkgs = {}
+
+    _portage_settings(
+        "ACCEPT_KEYWORDS", ("-* %s" % str(conf["TARGET_ARCH"])), conf["portdb"].settings
+    )
 
-	_portage_settings( "ACCEPT_KEYWORDS", ( "-* %s" % str( conf["TARGET_ARCH"] ) ),
-					conf["portdb"].settings )
+    for cp in conf["portdb"].dbapi.cp_all():
+        cpvrs = []
+        slots = {}
 
-	for cp in conf["portdb"].dbapi.cp_all():
-		cpvrs = []
-		slots = {}
+        if conf["USER_PKGS"]:
+            if not cp in conf["USER_PKGS"] and not basename(cp) in conf["USER_PKGS"]:
+                continue
 
-		if conf["USER_PKGS"]:
-			if not cp in conf["USER_PKGS"] and not basename(cp) in conf["USER_PKGS"]:
-				continue
+        # None is important to match also on empty string
+        if conf["MAINTAINER"] != None:
+            if not is_maintainer(
+                conf["MAINTAINER"], join(conf["PORTDIR"], cp, "metadata.xml")
+            ):
+                continue
+        if conf["HERD"] != None:
+            if not is_herd(conf["HERD"], join(conf["PORTDIR"], cp, "metadata.xml")):
+                continue
 
-		# None is important to match also on empty string
-		if conf["MAINTAINER"] != None:
-			if not is_maintainer(conf["MAINTAINER"], join(conf["PORTDIR"], cp, "metadata.xml")):
-				continue
-		if conf["HERD"] != None:
-			if not is_herd(conf["HERD"], join(conf["PORTDIR"], cp, "metadata.xml")):
-				continue
+        cpvrs = conf["portdb"].dbapi.match(cp)
 
-		cpvrs = conf["portdb"].dbapi.match( cp )
+        for cpvr in cpvrs:
+            slot = conf["portdb"].dbapi.aux_get(cpvr, ["SLOT"])[0]
+            if not slot in slots:
+                slots[slot] = []
+            slots[slot].append(cpvr)
 
-		for cpvr in cpvrs:
-			slot = conf["portdb"].dbapi.aux_get( cpvr, ["SLOT"] )[0]
-			if not slot in slots:
-				slots[slot] = []
-			slots[slot].append(cpvr)
+        for slot in sorted(slots):
+            cpvr = portage.versions.best(slots[slot])
 
-		for slot in sorted(slots):
-			cpvr = portage.versions.best( slots[slot] )
+            if cpvr:
+                (cat, pkg, ver, rev) = portage.versions.catpkgsplit(cpvr)
 
-			if cpvr:
-				( cat, pkg, ver, rev ) = portage.versions.catpkgsplit( cpvr )
+                if not cat in list(_pkgs.keys()):
+                    _pkgs[cat] = {}
+                if not pkg in list(_pkgs[cat].keys()):
+                    _pkgs[cat][pkg] = []
 
-				if not cat in list(_pkgs.keys()):
-					_pkgs[cat] = {}
-				if not pkg in list(_pkgs[cat].keys()):
-					_pkgs[cat][pkg] = []
+                if rev != "r0":
+                    ver = "%s-%s" % (ver, rev)
 
-				if rev != "r0":
-					ver = "%s-%s" % ( ver, rev )
+                _pkgs[cat][pkg].append(ver)
 
-				_pkgs[cat][pkg].append( ver )
+    return _pkgs
 
-	return _pkgs
 
 # compare get_packages() against MAIN_ARCH
-def get_imlate( conf, pkgs ):
-	_portage_settings( "ACCEPT_KEYWORDS", ( "-* %s" % str( conf["MAIN_ARCH"] ) ),
-					conf["portdb"].settings )
-
-	stable = str( conf["MAIN_ARCH"].lstrip("~") )
-	testing = "~%s" % stable
-	exclude = "-%s" % stable
-	exclude_all = "-*"
-
-	imlate = {}
-
-	for cat in sorted( pkgs.keys() ):
-		for pkg in sorted( pkgs[cat].keys() ):
-			for vr in pkgs[cat][pkg]:
-				cpvr = ""
-				abs_pkg = ""
-				kwds = ""
-				our = ""
-				our_ver = ""
-				mtime = 0
-				slot = 0
-
-				# 0 = none(default), 1 = testing(~arch), 2 = stable(arch),
-				# 3 = exclude(-arch), 4 = exclude_all(-*)
-				# -* would be overridden by ~arch or arch
-				kwd_type = 0
-
-				cpvr = "%s/%s-%s" % ( cat, pkg, vr )
-
-				# absolute ebuild path for mtime check
-				abs_pkg = join( conf["PORTDIR"], cat, pkg, basename( cpvr ) )
-				abs_pkg = "%s.ebuild" % str( abs_pkg )
-
-				kwds = conf["portdb"].dbapi.aux_get( cpvr, ["KEYWORDS"] )[0]
-
-				# FIXME: %s is bad.. maybe even cast it, else there are issues because its unicode
-				slot = ":%s" % conf["portdb"].dbapi.aux_get( cpvr, ["SLOT"] )[0]
-				if slot == ":0":
-					slot = ""
-
-				# sorted() to keep the right order
-				# e.g. -* first, -arch second, arch third and ~arch fourth
-				# -* -foo ~arch
-				# example: -* would be overridden by ~arch
-				for kwd in sorted( kwds.split() ):
-					if kwd == stable:
-						kwd_type = 2
-						break
-					elif kwd == exclude:
-						kwd_type = 3
-						break
-					elif kwd == exclude_all:
-						kwd_type = 4
-					elif kwd == testing:
-						kwd_type = 1
-						break
-
-				# ignore -arch and already stabilized packages
-				if kwd_type == 3 or kwd_type == 2:
-					continue
-				# drop packages with -* and without ~arch or arch
-				# even if there is another version which includes arch or ~arch
-				if kwd_type == 4:
-					continue
-				# drop "stable candidates" with mtime < 30 days
-				# Shall we use gmtime/UTC here?
-				if kwd_type == 1:
-					mtime = int( ( time() - stat( abs_pkg ).st_mtime ) / 60 / 60 / 24 )
-					if mtime < conf["MTIME"]:
-						continue
-
-				# look for an existing stable version
-				our = portage.versions.best( conf["portdb"].dbapi.match( "%s/%s%s" % ( cat, pkg, slot ) ) )
-				if our:
-					_foo = portage.versions.pkgsplit( our )
-					our_ver = _foo[1]
-					if _foo[2] != "r0":
-						our_ver = "%s-%s" % ( our_ver, _foo[2] )
-				else:
-					our_ver = ""
-
-				# we just need the version if > our_ver
-				if our_ver:
-					if portage.versions.vercmp( our_ver, vr ) >= 0:
-						continue
-
-				if kwd_type == 1 and conf["STABLE"]:
-					imlate = _add_ent( imlate, cat, ("%s%s" % (pkg, slot)), vr, our_ver )
-					conf["STABLE_SUM"] += 1
-				elif kwd_type == 0 and conf["KEYWORD"]:
-					conf["KEYWORD_SUM"] += 1
-					imlate = _add_ent( imlate, cat, ( "~%s%s" % (pkg, slot) ),
-									vr, our_ver )
-
-	return imlate
+
+
+def get_imlate(conf, pkgs):
+    _portage_settings(
+        "ACCEPT_KEYWORDS", ("-* %s" % str(conf["MAIN_ARCH"])), conf["portdb"].settings
+    )
+
+    stable = str(conf["MAIN_ARCH"].lstrip("~"))
+    testing = "~%s" % stable
+    exclude = "-%s" % stable
+    exclude_all = "-*"
+
+    imlate = {}
+
+    for cat in sorted(pkgs.keys()):
+        for pkg in sorted(pkgs[cat].keys()):
+            for vr in pkgs[cat][pkg]:
+                cpvr = ""
+                abs_pkg = ""
+                kwds = ""
+                our = ""
+                our_ver = ""
+                mtime = 0
+                slot = 0
+
+                # 0 = none(default), 1 = testing(~arch), 2 = stable(arch),
+                # 3 = exclude(-arch), 4 = exclude_all(-*)
+                # -* would be overridden by ~arch or arch
+                kwd_type = 0
+
+                cpvr = "%s/%s-%s" % (cat, pkg, vr)
+
+                # absolute ebuild path for mtime check
+                abs_pkg = join(conf["PORTDIR"], cat, pkg, basename(cpvr))
+                abs_pkg = "%s.ebuild" % str(abs_pkg)
+
+                kwds = conf["portdb"].dbapi.aux_get(cpvr, ["KEYWORDS"])[0]
+
+                # FIXME: %s is bad.. maybe even cast it, else there are issues because its unicode
+                slot = ":%s" % conf["portdb"].dbapi.aux_get(cpvr, ["SLOT"])[0]
+                if slot == ":0":
+                    slot = ""
+
+                # sorted() to keep the right order
+                # e.g. -* first, -arch second, arch third and ~arch fourth
+                # -* -foo ~arch
+                # example: -* would be overridden by ~arch
+                for kwd in sorted(kwds.split()):
+                    if kwd == stable:
+                        kwd_type = 2
+                        break
+                    elif kwd == exclude:
+                        kwd_type = 3
+                        break
+                    elif kwd == exclude_all:
+                        kwd_type = 4
+                    elif kwd == testing:
+                        kwd_type = 1
+                        break
+
+                # ignore -arch and already stabilized packages
+                if kwd_type == 3 or kwd_type == 2:
+                    continue
+                # drop packages with -* and without ~arch or arch
+                # even if there is another version which includes arch or ~arch
+                if kwd_type == 4:
+                    continue
+                # drop "stable candidates" with mtime < 30 days
+                # Shall we use gmtime/UTC here?
+                if kwd_type == 1:
+                    mtime = int((time() - stat(abs_pkg).st_mtime) / 60 / 60 / 24)
+                    if mtime < conf["MTIME"]:
+                        continue
+
+                # look for an existing stable version
+                our = portage.versions.best(
+                    conf["portdb"].dbapi.match("%s/%s%s" % (cat, pkg, slot))
+                )
+                if our:
+                    _foo = portage.versions.pkgsplit(our)
+                    our_ver = _foo[1]
+                    if _foo[2] != "r0":
+                        our_ver = "%s-%s" % (our_ver, _foo[2])
+                else:
+                    our_ver = ""
+
+                # we just need the version if > our_ver
+                if our_ver:
+                    if portage.versions.vercmp(our_ver, vr) >= 0:
+                        continue
+
+                if kwd_type == 1 and conf["STABLE"]:
+                    imlate = _add_ent(imlate, cat, ("%s%s" % (pkg, slot)), vr, our_ver)
+                    conf["STABLE_SUM"] += 1
+                elif kwd_type == 0 and conf["KEYWORD"]:
+                    conf["KEYWORD_SUM"] += 1
+                    imlate = _add_ent(imlate, cat, ("~%s%s" % (pkg, slot)), vr, our_ver)
+
+    return imlate
+
 
 # fetch portage related settings
-def get_settings( conf = None ):
-	if not isinstance( conf, dict ) and conf:
-		raise TypeError("conf must be dict() or None")
-	if not conf:
-		conf = {}
-
-	# TODO: maybe we should improve it a bit ;)
-	mysettings = portage.config( config_incrementals = portage.const.INCREMENTALS, local_config = False )
-
-	if conf["MAIN_ARCH"] == "auto":
-		conf["MAIN_ARCH"] = "%s" % mysettings["ACCEPT_KEYWORDS"].split(" ")[0].lstrip("~")
-	if conf["TARGET_ARCH"] == "auto":
-		conf["TARGET_ARCH"] = "~%s" % mysettings["ACCEPT_KEYWORDS"].split(" ")[0].lstrip("~")
-
-	# TODO: exclude overlay categories from check
-	if conf["CATEGORIES"]:
-		_mycats = []
-		for _cat in conf["CATEGORIES"].split(","):
-			_cat = _cat.strip()
-			_mycats.append(_cat )
-			if _cat not in mysettings.categories:
-				raise ValueError("invalid category for -C switch '%s'" % _cat)
-		mysettings.categories = _mycats
-
-	# maybe thats not necessary because we override porttrees below..
-	_portage_settings( "PORTDIR_OVERLAY", "", mysettings )
-	trees = portage.create_trees()
-	trees["/"]["porttree"].settings = mysettings
-	portdb = trees["/"]["porttree"]
-	portdb.dbapi.settings = mysettings
-	portdb.dbapi.porttrees = [portage.portdb.porttree_root]
-	# does it make sense to remove _all_ useless stuff or just leave it as it is?
-	#portdb.dbapi._aux_cache_keys.clear()
-	#portdb.dbapi._aux_cache_keys.update(["EAPI", "KEYWORDS", "SLOT"])
-
-	conf["PORTDIR"] = portage.settings["PORTDIR"]
-	conf["portdb"] = portdb
-
-	return conf
+
+
+def get_settings(conf=None):
+    if not isinstance(conf, dict) and conf:
+        raise TypeError("conf must be dict() or None")
+    if not conf:
+        conf = {}
+
+    # TODO: maybe we should improve it a bit ;)
+    mysettings = portage.config(
+        config_incrementals=portage.const.INCREMENTALS, local_config=False
+    )
+
+    if conf["MAIN_ARCH"] == "auto":
+        conf["MAIN_ARCH"] = "%s" % mysettings["ACCEPT_KEYWORDS"].split(" ")[0].lstrip(
+            "~"
+        )
+    if conf["TARGET_ARCH"] == "auto":
+        conf["TARGET_ARCH"] = "~%s" % mysettings["ACCEPT_KEYWORDS"].split(" ")[
+            0
+        ].lstrip("~")
+
+    # TODO: exclude overlay categories from check
+    if conf["CATEGORIES"]:
+        _mycats = []
+        for _cat in conf["CATEGORIES"].split(","):
+            _cat = _cat.strip()
+            _mycats.append(_cat)
+            if _cat not in mysettings.categories:
+                raise ValueError("invalid category for -C switch '%s'" % _cat)
+        mysettings.categories = _mycats
+
+    # maybe thats not necessary because we override porttrees below..
+    _portage_settings("PORTDIR_OVERLAY", "", mysettings)
+    trees = portage.create_trees()
+    trees["/"]["porttree"].settings = mysettings
+    portdb = trees["/"]["porttree"]
+    portdb.dbapi.settings = mysettings
+    portdb.dbapi.porttrees = [portage.portdb.porttree_root]
+    # does it make sense to remove _all_ useless stuff or just leave it as it is?
+    # portdb.dbapi._aux_cache_keys.clear()
+    # portdb.dbapi._aux_cache_keys.update(["EAPI", "KEYWORDS", "SLOT"])
+
+    conf["PORTDIR"] = portage.settings["PORTDIR"]
+    conf["portdb"] = portdb
+
+    return conf
 
 
 # just for standalone
 def main():
-	conf = {}
-	pkgs = {}
-
-	parser = OptionParser( version = "%prog " + __version__ )
-	parser.usage = "%prog [options] [category/package] ..."
-	parser.disable_interspersed_args()
-
-	parser.add_option( "-f", "--file", dest = "filename", action = "store", type = "string",
-			help = "write result into FILE [default: %default]", metavar = "FILE", default = "stdout" )
-	parser.add_option( "-m", "--main", dest = "main_arch", action = "store", type = "string",
-			help = "set main ARCH (e.g. your arch) [default: %default]", metavar = "ARCH", default = MAIN_ARCH )
-	parser.add_option( "-t", "--target", dest = "target_arch", action = "store", type = "string",
-			help = "set target ARCH (e.g. x86) [default: %default]", metavar = "ARCH", default = TARGET_ARCH )
-	parser.add_option( "--mtime", dest = "mtime", action = "store", type = "int",
-			help = "set minimum MTIME in days [default: %default]", metavar = "MTIME", default = 30 )
-
-	# TODO: leave a good comment here (about True/False) :)
-	parser.add_option( "-s", "--stable", dest = "stable", action = "store_true", default = False,
-			help = "just show stable candidates (e.g. -s and -k is the default result) [default: True]" )
-	parser.add_option( "-k", "--keyword", dest = "keyword", action = "store_true", default = False,
-			help = "just show keyword candidates (e.g. -s and -k is the default result) [default: True]" )
-
-	parser.add_option( "-M", "--maintainer", dest = "maintainer", action = "store", type = "string",
-			help = "Show only packages from the specified maintainer", metavar = "MAINTAINER", default = None)
-
-	parser.add_option( "-H", "--herd", dest = "herd", action = "store", type = "string",
-			help = "Show only packages from the specified herd", metavar = "HERD", default = None)
-
-#	# EXPERIMENTAL
-#	parser.add_option( "-e", "--experimental", dest = "experimental", action = "store_true", default = False,
-#			help = "enables experimental functions/features (have a look for # EXPERIMENTAL comments in the source) [default: %default]" )
-
-	parser.add_option( "-C", "--category", "--categories", dest = "categories", action = "store", default = None,
-			metavar = "CATEGORIES",
-			help = "just check in the specified category/categories (comma separated) [default: %default]")
-
-	( options, args ) = parser.parse_args()
-
-	if len( args ) > 0:
-		conf["USER_PKGS"] = args
-	else:
-		conf["USER_PKGS"] = []
-
-	# cleanup optparse
-	try:
-		parser.destroy()
-	except AttributeError:
-		# to be at least python 2.4 compatible
-		del parser._short_opt
-		del parser._long_opt
-		del parser.defaults
-
-	# generated timestamp (UTC)
-	conf["TIME"] = strftime( "%a %b %d %H:%M:%S %Z %Y", gmtime() )
-
-	# package counter
-	conf["KEYWORD_SUM"] = 0
-	conf["STABLE_SUM"] = 0
-
-	if not options.main_arch in portage.archlist and options.main_arch != "auto":
-		raise ValueError("invalid MAIN ARCH defined!")
-	if not options.target_arch in portage.archlist and options.target_arch != "auto":
-		raise ValueError("invalid TARGET ARCH defined!")
-
-	conf["MAIN_ARCH"] = options.main_arch
-	conf["TARGET_ARCH"] = options.target_arch
-
-	conf["FILE"] = options.filename
-	conf["MTIME"] = options.mtime
-
-	if not options.stable and not options.keyword:
-		conf["STABLE"] = True
-		conf["KEYWORD"] = True
-	else:
-		conf["STABLE"] = options.stable
-		conf["KEYWORD"] = options.keyword
-
-#	conf["EXPERIMENTAL"] = options.experimental
-	conf["CATEGORIES"] = options.categories
-
-	conf["MAINTAINER"] = options.maintainer
-	conf["HERD"] = options.herd
-
-	# append to our existing
-	conf = get_settings( conf )
-	pkgs = get_packages( conf )
-	pkgs = get_imlate( conf, pkgs )
-
-	show_result( conf, pkgs )
+    conf = {}
+    pkgs = {}
+
+    parser = OptionParser(version="%prog " + __version__)
+    parser.usage = "%prog [options] [category/package] ..."
+    parser.disable_interspersed_args()
+
+    parser.add_option(
+        "-f",
+        "--file",
+        dest="filename",
+        action="store",
+        type="string",
+        help="write result into FILE [default: %default]",
+        metavar="FILE",
+        default="stdout",
+    )
+    parser.add_option(
+        "-m",
+        "--main",
+        dest="main_arch",
+        action="store",
+        type="string",
+        help="set main ARCH (e.g. your arch) [default: %default]",
+        metavar="ARCH",
+        default=MAIN_ARCH,
+    )
+    parser.add_option(
+        "-t",
+        "--target",
+        dest="target_arch",
+        action="store",
+        type="string",
+        help="set target ARCH (e.g. x86) [default: %default]",
+        metavar="ARCH",
+        default=TARGET_ARCH,
+    )
+    parser.add_option(
+        "--mtime",
+        dest="mtime",
+        action="store",
+        type="int",
+        help="set minimum MTIME in days [default: %default]",
+        metavar="MTIME",
+        default=30,
+    )
+
+    # TODO: leave a good comment here (about True/False) :)
+    parser.add_option(
+        "-s",
+        "--stable",
+        dest="stable",
+        action="store_true",
+        default=False,
+        help="just show stable candidates (e.g. -s and -k is the default result) [default: True]",
+    )
+    parser.add_option(
+        "-k",
+        "--keyword",
+        dest="keyword",
+        action="store_true",
+        default=False,
+        help="just show keyword candidates (e.g. -s and -k is the default result) [default: True]",
+    )
+
+    parser.add_option(
+        "-M",
+        "--maintainer",
+        dest="maintainer",
+        action="store",
+        type="string",
+        help="Show only packages from the specified maintainer",
+        metavar="MAINTAINER",
+        default=None,
+    )
+
+    parser.add_option(
+        "-H",
+        "--herd",
+        dest="herd",
+        action="store",
+        type="string",
+        help="Show only packages from the specified herd",
+        metavar="HERD",
+        default=None,
+    )
+
+    # 	# EXPERIMENTAL
+    # 	parser.add_option( "-e", "--experimental", dest = "experimental", action = "store_true", default = False,
+    # 			help = "enables experimental functions/features (have a look for # EXPERIMENTAL comments in the source) [default: %default]" )
+
+    parser.add_option(
+        "-C",
+        "--category",
+        "--categories",
+        dest="categories",
+        action="store",
+        default=None,
+        metavar="CATEGORIES",
+        help="just check in the specified category/categories (comma separated) [default: %default]",
+    )
+
+    (options, args) = parser.parse_args()
+
+    if len(args) > 0:
+        conf["USER_PKGS"] = args
+    else:
+        conf["USER_PKGS"] = []
+
+    # cleanup optparse
+    try:
+        parser.destroy()
+    except AttributeError:
+        # to be at least python 2.4 compatible
+        del parser._short_opt
+        del parser._long_opt
+        del parser.defaults
+
+    # generated timestamp (UTC)
+    conf["TIME"] = strftime("%a %b %d %H:%M:%S %Z %Y", gmtime())
+
+    # package counter
+    conf["KEYWORD_SUM"] = 0
+    conf["STABLE_SUM"] = 0
+
+    if not options.main_arch in portage.archlist and options.main_arch != "auto":
+        raise ValueError("invalid MAIN ARCH defined!")
+    if not options.target_arch in portage.archlist and options.target_arch != "auto":
+        raise ValueError("invalid TARGET ARCH defined!")
+
+    conf["MAIN_ARCH"] = options.main_arch
+    conf["TARGET_ARCH"] = options.target_arch
+
+    conf["FILE"] = options.filename
+    conf["MTIME"] = options.mtime
+
+    if not options.stable and not options.keyword:
+        conf["STABLE"] = True
+        conf["KEYWORD"] = True
+    else:
+        conf["STABLE"] = options.stable
+        conf["KEYWORD"] = options.keyword
+
+    # 	conf["EXPERIMENTAL"] = options.experimental
+    conf["CATEGORIES"] = options.categories
+
+    conf["MAINTAINER"] = options.maintainer
+    conf["HERD"] = options.herd
+
+    # append to our existing
+    conf = get_settings(conf)
+    pkgs = get_packages(conf)
+    pkgs = get_imlate(conf, pkgs)
+
+    show_result(conf, pkgs)
 
-if __name__ == "__main__":
-	main()
 
+if __name__ == "__main__":
+    main()

diff --git a/pym/gentoolkit/keyword.py b/pym/gentoolkit/keyword.py
index e997efe..4160781 100644
--- a/pym/gentoolkit/keyword.py
+++ b/pym/gentoolkit/keyword.py
@@ -7,12 +7,7 @@
 http://www.gentoo.org/proj/en/glep/glep-0053.html
 """
 
-__all__ = (
-	'Keyword',
-	'compare_strs',
-	'reduce_keywords',
-	'determine_keyword'
-)
+__all__ = ("Keyword", "compare_strs", "reduce_keywords", "determine_keyword")
 
 # =======
 # Imports
@@ -23,87 +18,90 @@ __all__ = (
 # Classes
 # =======
 
+
 class Keyword:
-	"""Provides common methods on a GLEP 53 keyword."""
+    """Provides common methods on a GLEP 53 keyword."""
+
+    def __init__(self, keyword):
+        self.keyword = keyword
+        arch, sep, os = keyword.partition("-")
+        self.arch = arch
+        self.os = os
 
-	def __init__(self, keyword):
-		self.keyword = keyword
-		arch, sep, os = keyword.partition('-')
-		self.arch = arch
-		self.os = os
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            return False
+        return self.keyword == other.keyword
 
-	def __eq__(self, other):
-		if not isinstance(other, self.__class__):
-			return False
-		return self.keyword == other.keyword
+    def __ne__(self, other):
+        return not self == other
 
-	def __ne__(self, other):
-		return not self == other
+    def __lt__(self, other):
+        if not isinstance(other, self.__class__):
+            raise TypeError(
+                "other isn't of %s type, is %s" % (self.__class__, other.__class__)
+            )
+        if self.os < other.os:
+            return True
+        return self.arch < other.arch
 
-	def __lt__(self, other):
-		if not isinstance(other, self.__class__):
-			raise TypeError("other isn't of %s type, is %s" % (
-				self.__class__, other.__class__)
-			)
-		if self.os < other.os:
-			return True
-		return self.arch < other.arch
+    def __le__(self, other):
+        return self == other or self < other
 
-	def __le__(self, other):
-		return self == other or self < other
+    def __gt__(self, other):
+        return not self <= other
 
-	def __gt__(self, other):
-		return not self <= other
+    def __ge__(self, other):
+        return self == other or self > other
 
-	def __ge__(self, other):
-		return self == other or self > other
+    def __str__(self):
+        return self.keyword
 
-	def __str__(self):
-		return self.keyword
+    def __repr__(self):
+        return "<{0.__class__.__name__} {0.keyword!r}>".format(self)
 
-	def __repr__(self):
-		return "<{0.__class__.__name__} {0.keyword!r}>".format(self)
 
 # =========
 # Functions
 # =========
 
+
 def compare_strs(kw1, kw2):
-	"""Similar to the builtin cmp, but for keyword strings. Usually called
-	as: keyword_list.sort(keyword.compare_strs)
+    """Similar to the builtin cmp, but for keyword strings. Usually called
+    as: keyword_list.sort(keyword.compare_strs)
 
-	An alternative is to use the Keyword descriptor directly:
-	>>> keyword_list = ['~x86', '~amd64', 'x86']
-	>>> kwds = sorted(Keyword(x) for x in keyword_list)
+    An alternative is to use the Keyword descriptor directly:
+    >>> keyword_list = ['~x86', '~amd64', 'x86']
+    >>> kwds = sorted(Keyword(x) for x in keyword_list)
 
-	@see: >>> help(cmp)
-	"""
+    @see: >>> help(cmp)
+    """
 
-	kw1_arch, sep, kw1_os = kw1.partition('-')
-	kw2_arch, sep, kw2_os = kw2.partition('-')
-	if kw1_arch != kw2_arch:
-		if kw1_os != kw2_os:
-			return -1 if kw1_os < kw2_os else 1
-		return -1 if kw1_arch < kw2_arch else 1
-	if kw1_os == kw2_os:
-		return 0
-	return -1 if kw1_os < kw2_os else 1
+    kw1_arch, sep, kw1_os = kw1.partition("-")
+    kw2_arch, sep, kw2_os = kw2.partition("-")
+    if kw1_arch != kw2_arch:
+        if kw1_os != kw2_os:
+            return -1 if kw1_os < kw2_os else 1
+        return -1 if kw1_arch < kw2_arch else 1
+    if kw1_os == kw2_os:
+        return 0
+    return -1 if kw1_os < kw2_os else 1
 
 
 def reduce_keywords(keywords):
-	"""Reduce a list of keywords to a unique set of stable keywords.
+    """Reduce a list of keywords to a unique set of stable keywords.
 
-	Example usage:
-		>>> kw = reduce_keywords(['~amd64', 'x86', '~x86'])
-		>>> isinstance(kw, set)
-		True
-		>>> sorted(kw)
-		['amd64', 'x86']
+    Example usage:
+            >>> kw = reduce_keywords(['~amd64', 'x86', '~x86'])
+            >>> isinstance(kw, set)
+            True
+            >>> sorted(kw)
+            ['amd64', 'x86']
 
-	@type keywords: array
-	@rtype: set
-	"""
-	return set(x.lstrip('~') for x in keywords)
+    @type keywords: array
+    @rtype: set
+    """
+    return set(x.lstrip("~") for x in keywords)
 
 
 abs_keywords = reduce_keywords
@@ -114,31 +112,31 @@ abs_keywords = reduce_keywords
 # I was trying to avoid a 2nd use of determine_keyword name (in analyse.lib)
 # but that one is a little different and not suitable for this task.
 def determine_keyword(arch, accepted, keywords):
-	"""Determine a keyword from matching a dep's KEYWORDS
-	list against the ARCH & ACCEPT_KEYWORDS provided.
-
-	@type arch: string
-	@param arch: portage.settings["ARCH"]
-	@type accepted: string
-	@param accepted: portage.settings["ACCEPT_KEYWORDS"]
-	@type keywords: string
-	@param keywords: the pkg ebuilds keywords
-	"""
-	if not keywords:
-		return ''
-	keys = keywords.split()
-	if arch in keys:
-		return arch
-	keyworded = "~" + arch
-	if keyworded in keys:
-		return keyworded
-	match = list(set(accepted.split(" ")).intersection(keys))
-	if len(match) > 1:
-		if arch in match:
-			return arch
-		if keyworded in match:
-			return keyworded
-		return 'unknown'
-	if match:
-		return match[0]
-	return 'unknown'
+    """Determine a keyword from matching a dep's KEYWORDS
+    list against the ARCH & ACCEPT_KEYWORDS provided.
+
+    @type arch: string
+    @param arch: portage.settings["ARCH"]
+    @type accepted: string
+    @param accepted: portage.settings["ACCEPT_KEYWORDS"]
+    @type keywords: string
+    @param keywords: the pkg ebuilds keywords
+    """
+    if not keywords:
+        return ""
+    keys = keywords.split()
+    if arch in keys:
+        return arch
+    keyworded = "~" + arch
+    if keyworded in keys:
+        return keyworded
+    match = list(set(accepted.split(" ")).intersection(keys))
+    if len(match) > 1:
+        if arch in match:
+            return arch
+        if keyworded in match:
+            return keyworded
+        return "unknown"
+    if match:
+        return match[0]
+    return "unknown"

diff --git a/pym/gentoolkit/metadata.py b/pym/gentoolkit/metadata.py
index 9dc9619..2678611 100644
--- a/pym/gentoolkit/metadata.py
+++ b/pym/gentoolkit/metadata.py
@@ -31,8 +31,8 @@
 		'Thomas Mills Hinkle'
 """
 
-__all__ = ('MetaData',)
-__docformat__ = 'epytext'
+__all__ = ("MetaData",)
+__docformat__ = "epytext"
 
 # =======
 # Imports
@@ -48,260 +48,262 @@ from portage import settings
 # Classes
 # =======
 
+
 class _Maintainer:
-	"""An object for representing one maintainer.
-
-	@type email: str or None
-	@ivar email: Maintainer's email address. Used for both Gentoo and upstream.
-	@type name: str or None
-	@ivar name: Maintainer's name. Used for both Gentoo and upstream.
-	@type description: str or None
-	@ivar description: Description of what a maintainer does. Gentoo only.
-	@type restrict: str or None
-	@ivar restrict: e.g. &gt;=portage-2.2 means only maintains versions
-		of Portage greater than 2.2. Should be DEPEND string with < and >
-		converted to &lt; and &gt; respectively.
-	@type status: str or None
-	@ivar status: If set, either 'active' or 'inactive'. Upstream only.
-	"""
-
-	def __init__(self, node):
-		self.email = None
-		self.name = None
-		self.description = None
-		self.restrict = node.get('restrict')
-		self.status = node.get('status')
-		for attr in node.iter():
-			setattr(self, attr.tag, attr.text)
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.email)
+    """An object for representing one maintainer.
+
+    @type email: str or None
+    @ivar email: Maintainer's email address. Used for both Gentoo and upstream.
+    @type name: str or None
+    @ivar name: Maintainer's name. Used for both Gentoo and upstream.
+    @type description: str or None
+    @ivar description: Description of what a maintainer does. Gentoo only.
+    @type restrict: str or None
+    @ivar restrict: e.g. &gt;=portage-2.2 means only maintains versions
+            of Portage greater than 2.2. Should be DEPEND string with < and >
+            converted to &lt; and &gt; respectively.
+    @type status: str or None
+    @ivar status: If set, either 'active' or 'inactive'. Upstream only.
+    """
+
+    def __init__(self, node):
+        self.email = None
+        self.name = None
+        self.description = None
+        self.restrict = node.get("restrict")
+        self.status = node.get("status")
+        for attr in node.iter():
+            setattr(self, attr.tag, attr.text)
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.email)
 
 
 class _Useflag:
-	"""An object for representing one USE flag.
-
-	@todo: Is there any way to have a keyword option to leave in
-		<pkg> and <cat> for later processing?
-	@type name: str or None
-	@ivar name: USE flag
-	@type restrict: str or None
-	@ivar restrict: e.g. &gt;=portage-2.2 means flag is only avaiable in
-		versions greater than 2.2
-	@type description: str
-	@ivar description: description of the USE flag
-	"""
-
-	def __init__(self, node):
-		self.name = node.get('name')
-		self.restrict = node.get('restrict')
-		_desc = ''
-		if node.text:
-			_desc = node.text
-		for child in node.iter():
-			# prevent duplicate text
-			if child.text and child.text not in _desc:
-				_desc += child.text
-			if child.tail and not child.tail in _desc:
-				_desc += child.tail
-		# This takes care of tabs and newlines left from the file
-		self.description = re.sub(r'\s+', ' ', _desc)
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.name)
+    """An object for representing one USE flag.
+
+    @todo: Is there any way to have a keyword option to leave in
+            <pkg> and <cat> for later processing?
+    @type name: str or None
+    @ivar name: USE flag
+    @type restrict: str or None
+    @ivar restrict: e.g. &gt;=portage-2.2 means flag is only avaiable in
+            versions greater than 2.2
+    @type description: str
+    @ivar description: description of the USE flag
+    """
+
+    def __init__(self, node):
+        self.name = node.get("name")
+        self.restrict = node.get("restrict")
+        _desc = ""
+        if node.text:
+            _desc = node.text
+        for child in node.iter():
+            # prevent duplicate text
+            if child.text and child.text not in _desc:
+                _desc += child.text
+            if child.tail and not child.tail in _desc:
+                _desc += child.tail
+        # This takes care of tabs and newlines left from the file
+        self.description = re.sub(r"\s+", " ", _desc)
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.name)
 
 
 class _Upstream:
-	"""An object for representing one package's upstream.
-
-	@type maintainers: list
-	@ivar maintainers: L{_Maintainer} objects for each upstream maintainer
-	@type changelogs: list
-	@ivar changelogs: URLs to upstream's ChangeLog file in str format
-	@type docs: list
-	@ivar docs: Sequence of tuples containing URLs to upstream documentation
-		in the first slot and 'lang' attribute in the second, e.g.,
-		[('http.../docs/en/tut.html', None), ('http.../doc/fr/tut.html', 'fr')]
-	@type bugtrackers: list
-	@ivar bugtrackers: URLs to upstream's bugtracker. May also contain an email
-		address if prepended with 'mailto:'
-	@type remoteids: list
-	@ivar remoteids: Sequence of tuples containing the project's hosting site
-		name in the first slot and the project's ID name or number for that
-		site in the second, e.g., [('sourceforge', 'systemrescuecd')]
-	"""
-
-	def __init__(self, node):
-		self.node = node
-		self.maintainers = self.upstream_maintainers()
-		self.changelogs = self.upstream_changelogs()
-		self.docs = self.upstream_documentation()
-		self.bugtrackers = self.upstream_bugtrackers()
-		self.remoteids = self.upstream_remoteids()
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.__dict__)
-
-	def upstream_bugtrackers(self):
-		"""Retrieve upstream bugtracker location from xml node."""
-		return [e.text for e in self.node.findall('bugs-to')]
-
-	def upstream_changelogs(self):
-		"""Retrieve upstream changelog location from xml node."""
-		return [e.text for e in self.node.findall('changelog')]
-
-	def upstream_documentation(self):
-		"""Retrieve upstream documentation location from xml node."""
-		result = []
-		for elem in self.node.findall('doc'):
-			lang = elem.get('lang')
-			result.append((elem.text, lang))
-		return result
-
-	def upstream_maintainers(self):
-		"""Retrieve upstream maintainer information from xml node."""
-		return [_Maintainer(m) for m in self.node.findall('maintainer')]
-
-	def upstream_remoteids(self):
-		"""Retrieve upstream remote ID from xml node."""
-		return [(e.text, e.get('type')) for e in self.node.findall('remote-id')]
+    """An object for representing one package's upstream.
+
+    @type maintainers: list
+    @ivar maintainers: L{_Maintainer} objects for each upstream maintainer
+    @type changelogs: list
+    @ivar changelogs: URLs to upstream's ChangeLog file in str format
+    @type docs: list
+    @ivar docs: Sequence of tuples containing URLs to upstream documentation
+            in the first slot and 'lang' attribute in the second, e.g.,
+            [('http.../docs/en/tut.html', None), ('http.../doc/fr/tut.html', 'fr')]
+    @type bugtrackers: list
+    @ivar bugtrackers: URLs to upstream's bugtracker. May also contain an email
+            address if prepended with 'mailto:'
+    @type remoteids: list
+    @ivar remoteids: Sequence of tuples containing the project's hosting site
+            name in the first slot and the project's ID name or number for that
+            site in the second, e.g., [('sourceforge', 'systemrescuecd')]
+    """
+
+    def __init__(self, node):
+        self.node = node
+        self.maintainers = self.upstream_maintainers()
+        self.changelogs = self.upstream_changelogs()
+        self.docs = self.upstream_documentation()
+        self.bugtrackers = self.upstream_bugtrackers()
+        self.remoteids = self.upstream_remoteids()
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.__dict__)
+
+    def upstream_bugtrackers(self):
+        """Retrieve upstream bugtracker location from xml node."""
+        return [e.text for e in self.node.findall("bugs-to")]
+
+    def upstream_changelogs(self):
+        """Retrieve upstream changelog location from xml node."""
+        return [e.text for e in self.node.findall("changelog")]
+
+    def upstream_documentation(self):
+        """Retrieve upstream documentation location from xml node."""
+        result = []
+        for elem in self.node.findall("doc"):
+            lang = elem.get("lang")
+            result.append((elem.text, lang))
+        return result
+
+    def upstream_maintainers(self):
+        """Retrieve upstream maintainer information from xml node."""
+        return [_Maintainer(m) for m in self.node.findall("maintainer")]
+
+    def upstream_remoteids(self):
+        """Retrieve upstream remote ID from xml node."""
+        return [(e.text, e.get("type")) for e in self.node.findall("remote-id")]
 
 
 class MetaData:
-	"""Access metadata.xml"""
-
-	def __init__(self, metadata_path):
-		"""Parse a valid metadata.xml file.
-
-		@type metadata_path: str
-		@param metadata_path: path to a valid metadata.xml file
-		@raise IOError: if C{metadata_path} can not be read
-		"""
-
-		self.metadata_path = metadata_path
-		self._xml_tree = etree.parse(metadata_path)
-
-		# Used for caching
-		self._herdstree = None
-		self._descriptions = None
-		self._maintainers = None
-		self._useflags = None
-		self._upstream = None
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.metadata_path)
+    """Access metadata.xml"""
+
+    def __init__(self, metadata_path):
+        """Parse a valid metadata.xml file.
+
+        @type metadata_path: str
+        @param metadata_path: path to a valid metadata.xml file
+        @raise IOError: if C{metadata_path} can not be read
+        """
+
+        self.metadata_path = metadata_path
+        self._xml_tree = etree.parse(metadata_path)
+
+        # Used for caching
+        self._herdstree = None
+        self._descriptions = None
+        self._maintainers = None
+        self._useflags = None
+        self._upstream = None
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.metadata_path)
+
+    def _get_herd_email(self, herd):
+        """Get a herd's email address.
+
+        @type herd: str
+        @param herd: herd whose email you want
+        @rtype: str or None
+        @return: email address or None if herd is not in herds.xml
+        @raise IOError: if $PORTDIR/metadata/herds.xml can not be read
+        """
+
+        if self._herdstree is None:
+            herds_path = os.path.join(settings["PORTDIR"], "metadata/herds.xml")
+            try:
+                self._herdstree = etree.parse(herds_path)
+            except IOError:
+                # For some trees, herds.xml may not exist. Bug #300108.
+                return None
+
+        # Some special herds are not listed in herds.xml
+        if herd in ("no-herd", "maintainer-wanted", "maintainer-needed"):
+            return None
+
+        for node in self._herdstree.iter("herd"):
+            if node.findtext("name") == herd:
+                return node.findtext("email")
+
+    def herds(self, include_email=False):
+        """Return a list of text nodes for <herd>.
+
+        @type include_email: bool
+        @keyword include_email: if True, also look up the herd's email
+        @rtype: list
+        @return: if include_email is False, return a list of strings;
+                 if include_email is True, return a list of tuples containing:
+                                 [('herd1', 'herd1@gentoo.org'), ('no-herd', None);
+        """
+
+        result = []
+        for elem in self._xml_tree.findall("herd"):
+            text = elem.text
+            if text is None:
+                text = ""
+            if include_email:
+                herd_mail = self._get_herd_email(text)
+                result.append((text, herd_mail))
+            else:
+                result.append(text)
 
-	def _get_herd_email(self, herd):
-		"""Get a herd's email address.
-
-		@type herd: str
-		@param herd: herd whose email you want
-		@rtype: str or None
-		@return: email address or None if herd is not in herds.xml
-		@raise IOError: if $PORTDIR/metadata/herds.xml can not be read
-		"""
-
-		if self._herdstree is None:
-			herds_path = os.path.join(settings['PORTDIR'], 'metadata/herds.xml')
-			try:
-				self._herdstree = etree.parse(herds_path)
-			except IOError:
-				# For some trees, herds.xml may not exist. Bug #300108.
-				return None
+        return result
 
-		# Some special herds are not listed in herds.xml
-		if herd in ('no-herd', 'maintainer-wanted', 'maintainer-needed'):
-			return None
+    def descriptions(self):
+        """Return a list of text nodes for <longdescription>.
 
-		for node in self._herdstree.iter('herd'):
-			if node.findtext('name') == herd:
-				return node.findtext('email')
+        @rtype: list
+        @return: package description in string format
+        @todo: Support the C{lang} attribute
+        """
 
-	def herds(self, include_email=False):
-		"""Return a list of text nodes for <herd>.
+        if self._descriptions is not None:
+            return self._descriptions
 
-		@type include_email: bool
-		@keyword include_email: if True, also look up the herd's email
-		@rtype: list
-		@return: if include_email is False, return a list of strings;
-		         if include_email is True, return a list of tuples containing:
-					 [('herd1', 'herd1@gentoo.org'), ('no-herd', None);
-		"""
+        long_descriptions = self._xml_tree.findall("longdescription")
+        self._descriptions = [e.text for e in long_descriptions]
+        return self._descriptions
 
-		result = []
-		for elem in self._xml_tree.findall('herd'):
-			text = elem.text
-			if text is None:
-				text = ''
-			if include_email:
-				herd_mail = self._get_herd_email(text)
-				result.append((text, herd_mail))
-			else:
-				result.append(text)
+    def maintainers(self):
+        """Get maintainers' name, email and description.
 
-		return result
+        @rtype: list
+        @return: a sequence of L{_Maintainer} objects in document order.
+        """
 
-	def descriptions(self):
-		"""Return a list of text nodes for <longdescription>.
+        if self._maintainers is not None:
+            return self._maintainers
 
-		@rtype: list
-		@return: package description in string format
-		@todo: Support the C{lang} attribute
-		"""
+        self._maintainers = []
+        for node in self._xml_tree.findall("maintainer"):
+            self._maintainers.append(_Maintainer(node))
 
-		if self._descriptions is not None:
-			return self._descriptions
+        return self._maintainers
 
-		long_descriptions = self._xml_tree.findall("longdescription")
-		self._descriptions = [e.text for e in long_descriptions]
-		return self._descriptions
+    def use(self):
+        """Get names and descriptions for USE flags defined in metadata.
 
-	def maintainers(self):
-		"""Get maintainers' name, email and description.
+        @rtype: list
+        @return: a sequence of L{_Useflag} objects in document order.
+        """
 
-		@rtype: list
-		@return: a sequence of L{_Maintainer} objects in document order.
-		"""
+        if self._useflags is not None:
+            return self._useflags
 
-		if self._maintainers is not None:
-			return self._maintainers
+        self._useflags = []
+        for node in self._xml_tree.iter("flag"):
+            self._useflags.append(_Useflag(node))
 
-		self._maintainers = []
-		for node in self._xml_tree.findall('maintainer'):
-			self._maintainers.append(_Maintainer(node))
+        return self._useflags
 
-		return self._maintainers
-
-	def use(self):
-		"""Get names and descriptions for USE flags defined in metadata.
-
-		@rtype: list
-		@return: a sequence of L{_Useflag} objects in document order.
-		"""
-
-		if self._useflags is not None:
-			return self._useflags
-
-		self._useflags = []
-		for node in self._xml_tree.iter('flag'):
-			self._useflags.append(_Useflag(node))
+    def upstream(self):
+        """Get upstream contact information.
 
-		return self._useflags
+        @rtype: list
+        @return: a sequence of L{_Upstream} objects in document order.
+        """
 
-	def upstream(self):
-		"""Get upstream contact information.
+        if self._upstream is not None:
+            return self._upstream
 
-		@rtype: list
-		@return: a sequence of L{_Upstream} objects in document order.
-		"""
-
-		if self._upstream is not None:
-			return self._upstream
+        self._upstream = []
+        for node in self._xml_tree.findall("upstream"):
+            self._upstream.append(_Upstream(node))
 
-		self._upstream = []
-		for node in self._xml_tree.findall('upstream'):
-			self._upstream.append(_Upstream(node))
+        return self._upstream
 
-		return self._upstream
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/module_base.py b/pym/gentoolkit/module_base.py
index 0632469..e6e668e 100644
--- a/pym/gentoolkit/module_base.py
+++ b/pym/gentoolkit/module_base.py
@@ -7,7 +7,7 @@
 """General Base Module class to hold common module operation functions
 """
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 
 import sys
@@ -17,135 +17,143 @@ import gentoolkit.pprinter as pp
 from gentoolkit.formatters import format_options
 from gentoolkit.base import mod_usage
 
+
 class ModuleBase:
-	"""E-app base module class to parse module options print module help, etc.."""
-
-	def __init__(self):
-		self.module_name = None
-		self.options = {}
-		self.formatted_options = None
-		self.short_opts = None
-		self.long_opts = None
-		self.module_opts = {}
-		self.warning = None
-		self.need_queries = True
-		self.saved_verbose = None
-
-
-	def print_help(self, with_description=True):
-		"""Print description, usage and a detailed help message.
-
-		@type with_description: bool
-		@param with_description: if true, print module's __doc__ string
-		"""
-
-		if with_description:
-			print()
-			print(__doc__.strip())
-			print()
-		if self.warning:
-			print()
-			for line in self.warning:
-				sys.stderr.write(pp.warn(line))
-			print()
-		print(mod_usage(mod_name=self.module_name, arg=self.arg_spec, optional=self.arg_option))
-		print()
-		print(pp.command("options"))
-		print(format_options( self.formatted_options ))
-		if self.formatted_args:
-			print()
-			print(pp.command(self.arg_spec))
-			print(format_options(self.formatted_args))
-		print()
-
-	def parse_module_options(self, module_opts):
-		"""Parse module options and update self.options"""
-		opts = (x[0] for x in module_opts)
-		posargs = (x[1] for x in module_opts)
-		for opt, posarg in zip(opts, posargs):
-			if opt in ('-h', '--help'):
-					self.print_help()
-					sys.exit(0)
-			opt_name, opt_type, opt_setting = self.module_opts[opt]
-			if opt_type == 'boolean':
-				self.options[opt_name] = opt_setting
-			elif opt_type == 'int':
-				if posarg.isdigit():
-					val = int(posarg)
-				else:
-					print()
-					err = "Module option %s requires integer (got '%s')"
-					sys.stdout.write(pp.error(err % (opt,posarg)))
-					print()
-					self.print_help(with_description=False)
-					sys.exit(2)
-				self.options[opt_name] = val
-			elif opt_type == 'char':
-				self.options[opt_name] = posarg
-
-	def set_quiet(self, quiet):
-		"""sets the class option["quiet"] and option["verbose"] accordingly"""
-		if quiet == self.options['quiet']:
-			return
-		if self.saved_verbose:
-			# detected a switch
-			verbose = self.options['verbose']
-			self.options['verbose']  = self.saved_verbose
-			self.saved_verbose = verbose
-		elif quiet:
-			self.saved_verbose = self.options['verbose']
-			self.options['verbose'] = False
-		self.options['quiet'] = quiet
-		return
-
-	def validate_query(self, query, depth=0):
-		"""check that the query meets the modules TargetSpec
-		If not it attempts to reduce it to a valid TargetSpec
-		or prints the help message and exits
-		"""
-		if depth > 1:
-			return []
-		if len(query) > 1:
-			query = list(set(self.arg_options).intersection(query))
-			#print "reduced query =", query
-			query = self.validate_query(query, depth+1)
-		if isinstance(query, list):
-			query = query[0]
-		if query not in self.arg_options:
-			print()
-			print(pp.error(
-				"Error starting module. Incorrect or No TargetSpec specified!"
-				))
-			print("query = ", query)
-			self.print_help()
-			sys.exit(2)
-		return query
-
-
-	def main_setup(self, input_args):
-		"""Parse input and prepares the program"""
-
-		try:
-			module_opts, queries = gnu_getopt(input_args, self.short_opts, self.long_opts)
-		except GetoptError as err:
-			sys.stderr.write(pp.error("Module %s" % err))
-			print()
-			self.print_help(with_description=False)
-			sys.exit(2)
-		self.parse_module_options(module_opts)
-		if self.need_queries and not queries:
-			self.print_help()
-			sys.exit(2)
-		return queries
-
-
-	def not_implemented(self, target):
-		"""Prints a standard module not implemented message"""
-		print()
-		print(pp.error(
-			"Sorry %s module and/or target is not implenented yet."
-			% pp.emph(self.command_name)))
-		print("module: %s, target: %s" %(pp.emph(self.module_name), pp.emph(target)))
-		print()
+    """E-app base module class to parse module options print module help, etc.."""
+
+    def __init__(self):
+        self.module_name = None
+        self.options = {}
+        self.formatted_options = None
+        self.short_opts = None
+        self.long_opts = None
+        self.module_opts = {}
+        self.warning = None
+        self.need_queries = True
+        self.saved_verbose = None
+
+    def print_help(self, with_description=True):
+        """Print description, usage and a detailed help message.
+
+        @type with_description: bool
+        @param with_description: if true, print module's __doc__ string
+        """
+
+        if with_description:
+            print()
+            print(__doc__.strip())
+            print()
+        if self.warning:
+            print()
+            for line in self.warning:
+                sys.stderr.write(pp.warn(line))
+            print()
+        print(
+            mod_usage(
+                mod_name=self.module_name, arg=self.arg_spec, optional=self.arg_option
+            )
+        )
+        print()
+        print(pp.command("options"))
+        print(format_options(self.formatted_options))
+        if self.formatted_args:
+            print()
+            print(pp.command(self.arg_spec))
+            print(format_options(self.formatted_args))
+        print()
+
+    def parse_module_options(self, module_opts):
+        """Parse module options and update self.options"""
+        opts = (x[0] for x in module_opts)
+        posargs = (x[1] for x in module_opts)
+        for opt, posarg in zip(opts, posargs):
+            if opt in ("-h", "--help"):
+                self.print_help()
+                sys.exit(0)
+            opt_name, opt_type, opt_setting = self.module_opts[opt]
+            if opt_type == "boolean":
+                self.options[opt_name] = opt_setting
+            elif opt_type == "int":
+                if posarg.isdigit():
+                    val = int(posarg)
+                else:
+                    print()
+                    err = "Module option %s requires integer (got '%s')"
+                    sys.stdout.write(pp.error(err % (opt, posarg)))
+                    print()
+                    self.print_help(with_description=False)
+                    sys.exit(2)
+                self.options[opt_name] = val
+            elif opt_type == "char":
+                self.options[opt_name] = posarg
+
+    def set_quiet(self, quiet):
+        """sets the class option["quiet"] and option["verbose"] accordingly"""
+        if quiet == self.options["quiet"]:
+            return
+        if self.saved_verbose:
+            # detected a switch
+            verbose = self.options["verbose"]
+            self.options["verbose"] = self.saved_verbose
+            self.saved_verbose = verbose
+        elif quiet:
+            self.saved_verbose = self.options["verbose"]
+            self.options["verbose"] = False
+        self.options["quiet"] = quiet
+        return
+
+    def validate_query(self, query, depth=0):
+        """check that the query meets the modules TargetSpec
+        If not it attempts to reduce it to a valid TargetSpec
+        or prints the help message and exits
+        """
+        if depth > 1:
+            return []
+        if len(query) > 1:
+            query = list(set(self.arg_options).intersection(query))
+            # print "reduced query =", query
+            query = self.validate_query(query, depth + 1)
+        if isinstance(query, list):
+            query = query[0]
+        if query not in self.arg_options:
+            print()
+            print(
+                pp.error("Error starting module. Incorrect or No TargetSpec specified!")
+            )
+            print("query = ", query)
+            self.print_help()
+            sys.exit(2)
+        return query
+
+    def main_setup(self, input_args):
+        """Parse input and prepares the program"""
+
+        try:
+            module_opts, queries = gnu_getopt(
+                input_args, self.short_opts, self.long_opts
+            )
+        except GetoptError as err:
+            sys.stderr.write(pp.error("Module %s" % err))
+            print()
+            self.print_help(with_description=False)
+            sys.exit(2)
+        self.parse_module_options(module_opts)
+        if self.need_queries and not queries:
+            self.print_help()
+            sys.exit(2)
+        return queries
+
+    def not_implemented(self, target):
+        """Prints a standard module not implemented message"""
+        print()
+        print(
+            pp.error(
+                "Sorry %s module and/or target is not implenented yet."
+                % pp.emph(self.command_name)
+            )
+        )
+        print("module: %s, target: %s" % (pp.emph(self.module_name), pp.emph(target)))
+        print()
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/package.py b/pym/gentoolkit/package.py
index 4c851bc..92bc3a3 100644
--- a/pym/gentoolkit/package.py
+++ b/pym/gentoolkit/package.py
@@ -20,19 +20,26 @@ Example usage:
 	False
 """
 
-__all__ = (
-	'Package',
-	'PackageFormatter',
-	'FORMAT_TMPL_VARS'
-)
+__all__ = ("Package", "PackageFormatter", "FORMAT_TMPL_VARS")
 
 # =======
 # Globals
 # =======
 
 FORMAT_TMPL_VARS = (
-	'$location', '$mask', '$mask2', '$cp', '$cpv', '$category', '$name',
-	'$version', '$revision', '$fullversion', '$slot', '$repo', '$keywords'
+    "$location",
+    "$mask",
+    "$mask2",
+    "$cp",
+    "$cpv",
+    "$category",
+    "$name",
+    "$version",
+    "$revision",
+    "$fullversion",
+    "$slot",
+    "$repo",
+    "$keywords",
 )
 
 # =======
@@ -57,13 +64,18 @@ from gentoolkit.eprefix import EPREFIX
 # Settings
 # =======
 
+
 def _NewPortageConfig(local_config):
-	ret = portage.config(local_config=local_config,
-			eprefix=EPREFIX if EPREFIX else None,
-			config_root=os.environ.get('PORTAGE_CONFIGROOT', None),
-			target_root=os.environ.get('ROOT', None))
-	ret.lock()
-	return ret
+    ret = portage.config(
+        local_config=local_config,
+        eprefix=EPREFIX if EPREFIX else None,
+        config_root=os.environ.get("PORTAGE_CONFIGROOT", None),
+        target_root=os.environ.get("ROOT", None),
+    )
+    ret.lock()
+    return ret
+
+
 default_settings = _NewPortageConfig(local_config=True)
 nolocal_settings = _NewPortageConfig(local_config=False)
 
@@ -71,555 +83,555 @@ nolocal_settings = _NewPortageConfig(local_config=False)
 # Classes
 # =======
 
-class Package(CPV):
-	"""Exposes the state of a given CPV."""
-
-	def __init__(self, cpv, validate=False, local_config=True):
-		if isinstance(cpv, CPV):
-			self.__dict__.update(cpv.__dict__)
-		else:
-			CPV.__init__(self, cpv, validate=validate)
-
-		if validate and not all(
-			hasattr(self, x) for x in ('category', 'version')
-		):
-			# CPV allows some things that Package must not
-			raise errors.GentoolkitInvalidPackage(self.cpv)
-
-		if local_config:
-			self._settings = default_settings
-		else:
-			self._settings = nolocal_settings
-
-		# Set dynamically
-		self._package_path = None
-		self._dblink = None
-		self._metadata = None
-		self._deps = None
-		self._portdir_path = None
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, self.cpv)
-
-	def __hash__(self):
-		return hash(self.cpv)
-
-	def __contains__(self, key):
-		return key in self.cpv
-
-	def __str__(self):
-		return str(self.cpv)
-
-	@property
-	def metadata(self):
-		"""Instantiate a L{gentoolkit.metadata.MetaData} object here."""
-
-		from gentoolkit.metadata import MetaData
-
-		if self._metadata is None:
-			metadata_path = os.path.join(
-				self.package_path(), 'metadata.xml'
-			)
-			try:
-				self._metadata = MetaData(metadata_path)
-			except IOError as error:
-				import errno
-				if error.errno != errno.ENOENT:
-					raise
-				return None
-
-		return self._metadata
-
-	@property
-	def dblink(self):
-		"""Instantiate a L{portage.dbapi.vartree.dblink} object here."""
-
-		if self._dblink is None:
-			self._dblink = portage.dblink(
-				self.category,
-				"%s-%s" % (self.name, self.fullversion),
-				self._settings["ROOT"],
-				self._settings
-			)
-
-		return self._dblink
-
-	@property
-	def deps(self):
-		"""Instantiate a L{gentoolkit.dependencies.Dependencies} object here."""
-
-		from gentoolkit.dependencies import Dependencies
-
-		if self._deps is None:
-			self._deps = Dependencies(self.cpv)
-
-		return self._deps
-
-	def environment(self, envvars, prefer_vdb=True, fallback=True):
-		"""Returns one or more of the predefined environment variables.
-
-		Some available envvars are:
-		----------------------
-			BINPKGMD5  COUNTER         FEATURES   LICENSE  SRC_URI
-			CATEGORY   CXXFLAGS        HOMEPAGE   PDEPEND  USE
-			CBUILD     DEFINED_PHASES  INHERITED  PF
-			CFLAGS     DEPEND          IUSE       PROVIDE
-			CHOST      DESCRIPTION     KEYWORDS   RDEPEND
-			CONTENTS   EAPI            LDFLAGS    SLOT
-
-		Example usage:
-			>>> pkg = Package('sys-apps/portage-9999')
-			>>> pkg.environment('USE')
-			''
-			>>> pkg.environment(('USE', 'IUSE'))
-			... # doctest: +NORMALIZE_WHITESPACE
-			['', 'build doc epydoc +ipc pypy1_9 python2 python3
-			 selinux xattr']
-
-		@type envvars: str or array
-		@param envvars: one or more of (DEPEND, SRC_URI, etc.)
-		@type prefer_vdb: bool
-		@keyword prefer_vdb: if True, look in the vardb before portdb, else
-			reverse order. Specifically KEYWORDS will get more recent
-			information by preferring portdb.
-		@type fallback: bool
-		@keyword fallback: query only the preferred db if False
-		@rtype: str or list
-		@return: str if envvars is str, list if envvars is array
-		@raise KeyError: if key is not found in requested db(s)
-		"""
-
-		got_string = False
-		if isinstance(envvars, str):
-			got_string = True
-			envvars = (envvars,)
-		if prefer_vdb:
-			try:
-				result = portage.db[portage.root][
-					'vartree'].dbapi.aux_get(
-					self.cpv, envvars)
-			except KeyError:
-				try:
-					if not fallback:
-						raise KeyError
-					result = portage.db[portage.root][
-						'porttree'].dbapi.aux_get(
-						self.cpv, envvars)
-				except KeyError:
-					raise errors.GentoolkitFatalError(
-						'aux_get returned unexpected '
-						'results')
-		else:
-			try:
-				result = portage.db[portage.root][
-					'porttree'].dbapi.aux_get(
-					self.cpv, envvars)
-			except KeyError:
-				try:
-					if not fallback:
-						raise KeyError
-					result = portage.db[portage.root][
-						'vartree'].dbapi.aux_get(
-						self.cpv, envvars)
-				except KeyError:
-					raise errors.GentoolkitFatalError(
-						'aux_get returned unexpected '
-						'results')
-
-		if got_string:
-			return result[0]
-		return result
-
-	def exists(self):
-		"""Return True if package exists in the Portage tree, else False"""
-
-		return bool(portage.db[portage.root]["porttree"].dbapi.cpv_exists(self.cpv))
-
-	def settings(self, key):
-		"""Returns the value of the given key for this package (useful
-		for package.* files."""
-
-		if self._settings.locked:
-			self._settings.unlock()
-		try:
-			result = self._settings[key]
-		finally:
-			self._settings.lock()
-		return result
-
-	def mask_status(self):
-		"""Shortcut to L{portage.getmaskingstatus}.
-
-		@rtype: None or list
-		@return: a list containing none or some of:
-			'profile'
-			'package.mask'
-			license(s)
-			"kmask" keyword
-			'missing keyword'
-		"""
-
-		if self._settings.locked:
-			self._settings.unlock()
-		try:
-			result = portage.getmaskingstatus(self.cpv,
-				settings=self._settings,
-				portdb=portage.db[portage.root]["porttree"].dbapi)
-		except KeyError:
-			# getmaskingstatus doesn't support packages without ebuilds in the
-			# Portage tree.
-			result = None
-
-		return result
-
-	def mask_reason(self):
-		"""Shortcut to L{portage.getmaskingreason}.
-
-		@rtype: None or tuple
-		@return: empty tuple if pkg not masked OR
-			('mask reason', 'mask location')
-		"""
-
-		try:
-			result = portage.getmaskingreason(self.cpv,
-				settings=self._settings,
-				portdb=portage.db[portage.root]["porttree"].dbapi,
-				return_location=True)
-			if result is None:
-				result = tuple()
-		except KeyError:
-			# getmaskingstatus doesn't support packages without ebuilds in the
-			# Portage tree.
-			result = None
-
-		return result
-
-	def ebuild_path(self, in_vartree=False):
-		"""Returns the complete path to the .ebuild file.
-
-		Example usage:
-			>>> pkg = Package('sys-apps/portage-9999')
-			>>> pkg.ebuild_path()
-			'/usr/portage/sys-apps/portage/portage-9999.ebuild'
-			>>> pkg.ebuild_path(in_vartree=True)
-			'/var/db/pkg/sys-apps/portage-9999/portage-9999.ebuild'
-		"""
-
-		if in_vartree:
-			return portage.db[portage.root]["vartree"].dbapi.findname(self.cpv)
-		return portage.db[portage.root]["porttree"].dbapi.findname(self.cpv)
-
-	def package_path(self, in_vartree=False):
-		"""Return the path to where the ebuilds and other files reside."""
-
-		if in_vartree:
-			return self.dblink.getpath()
-		return os.sep.join(self.ebuild_path().split(os.sep)[:-1])
-
-	def repo_name(self, fallback=True):
-		"""Determine the repository name.
-
-		@type fallback: bool
-		@param fallback: if the repo_name file does not exist, return the
-			repository name from the path
-		@rtype: str
-		@return: output of the repository metadata file, which stores the
-			repo_name variable, or try to get the name of the repo from
-			the path.
-		@raise GentoolkitFatalError: if fallback is False and repo_name is
-			not specified by the repository.
-		"""
-
-		try:
-			return self.environment('repository')
-		except errors.GentoolkitFatalError:
-			if fallback:
-				return self.package_path().split(os.sep)[-3]
-			raise
-
-	def use(self):
-		"""Returns the USE flags active at time of installation."""
-
-		return self.dblink.getstring("USE")
-
-	def use_status(self):
-		"""Returns the USE flags active for installation."""
-
-		iuse, final_flags = get_flags(self.cpv, final_setting=True)
-		return final_flags
-
-	def parsed_contents(self, prefix_root=False):
-		"""Returns the parsed CONTENTS file.
-
-		@rtype: dict
-		@return: {'/full/path/to/obj': ['type', 'timestamp', 'md5sum'], ...}
-		"""
-
-		contents = self.dblink.getcontents()
-
-		# Portage will automatically prepend ROOT.  Undo that.
-		if not prefix_root:
-			myroot = self._settings["ROOT"]
-			if myroot != '/':
-				ret = {}
-				for key, val in self.dblink.getcontents().items():
-					ret['/' + os.path.relpath(key, myroot)] = val
-				contents = ret
-
-		return contents
-
-	def size(self):
-		"""Estimates the installed size of the contents of this package.
-
-		@rtype: tuple
-		@return: (size, number of files in total, number of uncounted files)
-		"""
-
-		seen = set()
-		size = n_files = n_uncounted = 0
-		for path in self.parsed_contents(prefix_root=True):
-			try:
-				st = os.lstat(_unicode_encode(path, encoding=_encodings['fs']))
-			except OSError:
-				continue
-
-			# Remove hardlinks by checking for duplicate inodes. Bug #301026.
-			file_inode = st.st_ino
-			if file_inode in seen:
-				continue
-			seen.add(file_inode)
-
-			try:
-				size += st.st_size
-				n_files += 1
-			except OSError:
-				n_uncounted += 1
-
-		return (size, n_files, n_uncounted)
 
-	def is_installed(self):
-		"""Returns True if this package is installed (merged)."""
-
-		return self.dblink.exists()
+class Package(CPV):
+    """Exposes the state of a given CPV."""
+
+    def __init__(self, cpv, validate=False, local_config=True):
+        if isinstance(cpv, CPV):
+            self.__dict__.update(cpv.__dict__)
+        else:
+            CPV.__init__(self, cpv, validate=validate)
+
+        if validate and not all(hasattr(self, x) for x in ("category", "version")):
+            # CPV allows some things that Package must not
+            raise errors.GentoolkitInvalidPackage(self.cpv)
+
+        if local_config:
+            self._settings = default_settings
+        else:
+            self._settings = nolocal_settings
+
+        # Set dynamically
+        self._package_path = None
+        self._dblink = None
+        self._metadata = None
+        self._deps = None
+        self._portdir_path = None
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, self.cpv)
+
+    def __hash__(self):
+        return hash(self.cpv)
+
+    def __contains__(self, key):
+        return key in self.cpv
+
+    def __str__(self):
+        return str(self.cpv)
+
+    @property
+    def metadata(self):
+        """Instantiate a L{gentoolkit.metadata.MetaData} object here."""
+
+        from gentoolkit.metadata import MetaData
+
+        if self._metadata is None:
+            metadata_path = os.path.join(self.package_path(), "metadata.xml")
+            try:
+                self._metadata = MetaData(metadata_path)
+            except IOError as error:
+                import errno
+
+                if error.errno != errno.ENOENT:
+                    raise
+                return None
+
+        return self._metadata
+
+    @property
+    def dblink(self):
+        """Instantiate a L{portage.dbapi.vartree.dblink} object here."""
+
+        if self._dblink is None:
+            self._dblink = portage.dblink(
+                self.category,
+                "%s-%s" % (self.name, self.fullversion),
+                self._settings["ROOT"],
+                self._settings,
+            )
+
+        return self._dblink
+
+    @property
+    def deps(self):
+        """Instantiate a L{gentoolkit.dependencies.Dependencies} object here."""
+
+        from gentoolkit.dependencies import Dependencies
+
+        if self._deps is None:
+            self._deps = Dependencies(self.cpv)
+
+        return self._deps
+
+    def environment(self, envvars, prefer_vdb=True, fallback=True):
+        """Returns one or more of the predefined environment variables.
+
+        Some available envvars are:
+        ----------------------
+                BINPKGMD5  COUNTER         FEATURES   LICENSE  SRC_URI
+                CATEGORY   CXXFLAGS        HOMEPAGE   PDEPEND  USE
+                CBUILD     DEFINED_PHASES  INHERITED  PF
+                CFLAGS     DEPEND          IUSE       PROVIDE
+                CHOST      DESCRIPTION     KEYWORDS   RDEPEND
+                CONTENTS   EAPI            LDFLAGS    SLOT
+
+        Example usage:
+                >>> pkg = Package('sys-apps/portage-9999')
+                >>> pkg.environment('USE')
+                ''
+                >>> pkg.environment(('USE', 'IUSE'))
+                ... # doctest: +NORMALIZE_WHITESPACE
+                ['', 'build doc epydoc +ipc pypy1_9 python2 python3
+                 selinux xattr']
+
+        @type envvars: str or array
+        @param envvars: one or more of (DEPEND, SRC_URI, etc.)
+        @type prefer_vdb: bool
+        @keyword prefer_vdb: if True, look in the vardb before portdb, else
+                reverse order. Specifically KEYWORDS will get more recent
+                information by preferring portdb.
+        @type fallback: bool
+        @keyword fallback: query only the preferred db if False
+        @rtype: str or list
+        @return: str if envvars is str, list if envvars is array
+        @raise KeyError: if key is not found in requested db(s)
+        """
+
+        got_string = False
+        if isinstance(envvars, str):
+            got_string = True
+            envvars = (envvars,)
+        if prefer_vdb:
+            try:
+                result = portage.db[portage.root]["vartree"].dbapi.aux_get(
+                    self.cpv, envvars
+                )
+            except KeyError:
+                try:
+                    if not fallback:
+                        raise KeyError
+                    result = portage.db[portage.root]["porttree"].dbapi.aux_get(
+                        self.cpv, envvars
+                    )
+                except KeyError:
+                    raise errors.GentoolkitFatalError(
+                        "aux_get returned unexpected " "results"
+                    )
+        else:
+            try:
+                result = portage.db[portage.root]["porttree"].dbapi.aux_get(
+                    self.cpv, envvars
+                )
+            except KeyError:
+                try:
+                    if not fallback:
+                        raise KeyError
+                    result = portage.db[portage.root]["vartree"].dbapi.aux_get(
+                        self.cpv, envvars
+                    )
+                except KeyError:
+                    raise errors.GentoolkitFatalError(
+                        "aux_get returned unexpected " "results"
+                    )
+
+        if got_string:
+            return result[0]
+        return result
+
+    def exists(self):
+        """Return True if package exists in the Portage tree, else False"""
+
+        return bool(portage.db[portage.root]["porttree"].dbapi.cpv_exists(self.cpv))
+
+    def settings(self, key):
+        """Returns the value of the given key for this package (useful
+        for package.* files."""
+
+        if self._settings.locked:
+            self._settings.unlock()
+        try:
+            result = self._settings[key]
+        finally:
+            self._settings.lock()
+        return result
+
+    def mask_status(self):
+        """Shortcut to L{portage.getmaskingstatus}.
+
+        @rtype: None or list
+        @return: a list containing none or some of:
+                'profile'
+                'package.mask'
+                license(s)
+                "kmask" keyword
+                'missing keyword'
+        """
+
+        if self._settings.locked:
+            self._settings.unlock()
+        try:
+            result = portage.getmaskingstatus(
+                self.cpv,
+                settings=self._settings,
+                portdb=portage.db[portage.root]["porttree"].dbapi,
+            )
+        except KeyError:
+            # getmaskingstatus doesn't support packages without ebuilds in the
+            # Portage tree.
+            result = None
+
+        return result
+
+    def mask_reason(self):
+        """Shortcut to L{portage.getmaskingreason}.
+
+        @rtype: None or tuple
+        @return: empty tuple if pkg not masked OR
+                ('mask reason', 'mask location')
+        """
+
+        try:
+            result = portage.getmaskingreason(
+                self.cpv,
+                settings=self._settings,
+                portdb=portage.db[portage.root]["porttree"].dbapi,
+                return_location=True,
+            )
+            if result is None:
+                result = tuple()
+        except KeyError:
+            # getmaskingstatus doesn't support packages without ebuilds in the
+            # Portage tree.
+            result = None
+
+        return result
+
+    def ebuild_path(self, in_vartree=False):
+        """Returns the complete path to the .ebuild file.
+
+        Example usage:
+                >>> pkg = Package('sys-apps/portage-9999')
+                >>> pkg.ebuild_path()
+                '/usr/portage/sys-apps/portage/portage-9999.ebuild'
+                >>> pkg.ebuild_path(in_vartree=True)
+                '/var/db/pkg/sys-apps/portage-9999/portage-9999.ebuild'
+        """
+
+        if in_vartree:
+            return portage.db[portage.root]["vartree"].dbapi.findname(self.cpv)
+        return portage.db[portage.root]["porttree"].dbapi.findname(self.cpv)
+
+    def package_path(self, in_vartree=False):
+        """Return the path to where the ebuilds and other files reside."""
+
+        if in_vartree:
+            return self.dblink.getpath()
+        return os.sep.join(self.ebuild_path().split(os.sep)[:-1])
+
+    def repo_name(self, fallback=True):
+        """Determine the repository name.
+
+        @type fallback: bool
+        @param fallback: if the repo_name file does not exist, return the
+                repository name from the path
+        @rtype: str
+        @return: output of the repository metadata file, which stores the
+                repo_name variable, or try to get the name of the repo from
+                the path.
+        @raise GentoolkitFatalError: if fallback is False and repo_name is
+                not specified by the repository.
+        """
+
+        try:
+            return self.environment("repository")
+        except errors.GentoolkitFatalError:
+            if fallback:
+                return self.package_path().split(os.sep)[-3]
+            raise
+
+    def use(self):
+        """Returns the USE flags active at time of installation."""
+
+        return self.dblink.getstring("USE")
+
+    def use_status(self):
+        """Returns the USE flags active for installation."""
+
+        iuse, final_flags = get_flags(self.cpv, final_setting=True)
+        return final_flags
+
+    def parsed_contents(self, prefix_root=False):
+        """Returns the parsed CONTENTS file.
+
+        @rtype: dict
+        @return: {'/full/path/to/obj': ['type', 'timestamp', 'md5sum'], ...}
+        """
+
+        contents = self.dblink.getcontents()
+
+        # Portage will automatically prepend ROOT.  Undo that.
+        if not prefix_root:
+            myroot = self._settings["ROOT"]
+            if myroot != "/":
+                ret = {}
+                for key, val in self.dblink.getcontents().items():
+                    ret["/" + os.path.relpath(key, myroot)] = val
+                contents = ret
+
+        return contents
+
+    def size(self):
+        """Estimates the installed size of the contents of this package.
+
+        @rtype: tuple
+        @return: (size, number of files in total, number of uncounted files)
+        """
+
+        seen = set()
+        size = n_files = n_uncounted = 0
+        for path in self.parsed_contents(prefix_root=True):
+            try:
+                st = os.lstat(_unicode_encode(path, encoding=_encodings["fs"]))
+            except OSError:
+                continue
+
+            # Remove hardlinks by checking for duplicate inodes. Bug #301026.
+            file_inode = st.st_ino
+            if file_inode in seen:
+                continue
+            seen.add(file_inode)
+
+            try:
+                size += st.st_size
+                n_files += 1
+            except OSError:
+                n_uncounted += 1
+
+        return (size, n_files, n_uncounted)
+
+    def is_installed(self):
+        """Returns True if this package is installed (merged)."""
+
+        return self.dblink.exists()
 
-	def is_overlay(self):
-		"""Returns True if the package is in an overlay."""
+    def is_overlay(self):
+        """Returns True if the package is in an overlay."""
 
-		ebuild, tree = portage.db[portage.root]["porttree"].dbapi.findname2(self.cpv)
-		if not ebuild:
-			return None
-		if self._portdir_path is None:
-			self._portdir_path = os.path.realpath(self._settings["PORTDIR"])
-		return (tree and tree != self._portdir_path)
+        ebuild, tree = portage.db[portage.root]["porttree"].dbapi.findname2(self.cpv)
+        if not ebuild:
+            return None
+        if self._portdir_path is None:
+            self._portdir_path = os.path.realpath(self._settings["PORTDIR"])
+        return tree and tree != self._portdir_path
 
-	def is_masked(self):
-		"""Returns True if this package is masked against installation.
+    def is_masked(self):
+        """Returns True if this package is masked against installation.
 
-		@note: We blindly assume that the package actually exists on disk.
-		"""
+        @note: We blindly assume that the package actually exists on disk.
+        """
 
-		unmasked = portage.db[portage.root]['porttree'].dbapi.xmatch(
-			'match-visible', self.cpv)
-		return self.cpv not in unmasked
+        unmasked = portage.db[portage.root]["porttree"].dbapi.xmatch(
+            "match-visible", self.cpv
+        )
+        return self.cpv not in unmasked
 
 
 class PackageFormatter:
-	"""When applied to a L{gentoolkit.package.Package} object, determine the
-	location (Portage Tree vs. overlay), install status and masked status. That
-	information can then be easily formatted and displayed.
-
-	Example usage:
-		>>> from gentoolkit.query import Query
-		>>> from gentoolkit.package import PackageFormatter
-		>>> import portage.output
-		>>> q = Query('gcc')
-		>>> pkgs = [PackageFormatter(x) for x in q.find()]
-		>>> havecolor = portage.output.havecolor
-		>>> portage.output.havecolor = False
-		>>> for pkg in pkgs:
-		...     # Only print packages that are installed and from the Portage
-		...     # tree
-		...     if set('IP').issubset(pkg.location):
-		...             print(pkg)
-		...
-		[IP-] [  ] sys-devel/gcc-4.5.4:4.5
-		>>> portage.output.havecolor = havecolor
-
-	@type pkg: L{gentoolkit.package.Package}
-	@param pkg: package to format
-	@type do_format: bool
-	@param do_format: Whether to format the package name or not.
-		Essentially C{do_format} should be set to False when piping or when
-		quiet output is desired. If C{do_format} is False, only the location
-		attribute will be created to save time.
-	"""
-
-	_tmpl_verbose = "[$location] [$mask] $cpv:$slot"
-	_tmpl_quiet = "$cpv"
-
-	def __init__(self, pkg, do_format=True, custom_format=None):
-		self._pkg = None
-		self._do_format = do_format
-		self._str = None
-		self._location = None
-		if not custom_format:
-			if do_format:
-				custom_format = self._tmpl_verbose
-			else:
-				custom_format = self._tmpl_quiet
-		self.tmpl = Template(custom_format)
-		self.format_vars = LazyItemsDict()
-		self.pkg = pkg
-
-	def __repr__(self):
-		return "<%s %s @%#8x>" % (self.__class__.__name__, self.pkg, id(self))
-
-	def __str__(self):
-		if self._str is None:
-			self._str = self.tmpl.safe_substitute(self.format_vars)
-		return self._str
-
-	@property
-	def location(self):
-		if self._location is None:
-			self._location = self.format_package_location()
-		return self._location
-
-	@property
-	def pkg(self):
-		"""Package to format"""
-		return self._pkg
-
-	@pkg.setter
-	def pkg(self, value):
-		if self._pkg == value:
-			return
-		self._pkg = value
-		self._location = None
-
-		fmt_vars = self.format_vars
-		self.format_vars.clear()
-		fmt_vars.addLazySingleton("location",
-			lambda: getattr(self, "location"))
-		fmt_vars.addLazySingleton("mask", self.format_mask)
-		fmt_vars.addLazySingleton("mask2", self.format_mask_status2)
-		fmt_vars.addLazySingleton("cpv", self.format_cpv)
-		fmt_vars.addLazySingleton("cp", self.format_cpv, "cp")
-		fmt_vars.addLazySingleton("category", self.format_cpv, "category")
-		fmt_vars.addLazySingleton("name", self.format_cpv, "name")
-		fmt_vars.addLazySingleton("version", self.format_cpv, "version")
-		fmt_vars.addLazySingleton("revision", self.format_cpv, "revision")
-		fmt_vars.addLazySingleton("fullversion", self.format_cpv,
-			"fullversion")
-		fmt_vars.addLazySingleton("slot", self.format_slot)
-		fmt_vars.addLazySingleton("repo", self.pkg.repo_name)
-		fmt_vars.addLazySingleton("keywords", self.format_keywords)
-
-	def format_package_location(self):
-		"""Get the install status (in /var/db/?) and origin (from an overlay
-		and the Portage tree?).
-
-		@rtype: str
-		@return: one of:
-			'I--' : Installed but ebuild doesn't exist on system anymore
-			'-P-' : Not installed and from the Portage tree
-			'--O' : Not installed and from an overlay
-			'IP-' : Installed and from the Portage tree
-			'I-O' : Installed and from an overlay
-		"""
-
-		result = ['-', '-', '-']
-
-		if self.pkg.is_installed():
-			result[0] = 'I'
-
-		overlay = self.pkg.is_overlay()
-		if overlay is None:
-			pass
-		elif overlay:
-			result[2] = 'O'
-		else:
-			result[1] = 'P'
-
-		return ''.join(result)
-
-	def format_mask_status(self):
-		"""Get the mask status of a given package.
-
-		@rtype: tuple: (int, list)
-		@return: int = an index for this list:
-			["  ", " ~", " -", "M ", "M~", "M-", "??"]
-			0 = not masked
-			1 = keyword masked
-			2 = arch masked
-			3 = hard masked
-			4 = hard and keyword masked,
-			5 = hard and arch masked
-			6 = ebuild doesn't exist on system anymore
-
-			list = original output of portage.getmaskingstatus
-		"""
-
-		result = 0
-		masking_status = self.pkg.mask_status()
-		if masking_status is None:
-			return (6, [])
-
-		if ("~%s keyword" % self.pkg.settings("ARCH")) in masking_status:
-			result += 1
-		if "missing keyword" in masking_status:
-			result += 2
-		if set(('profile', 'package.mask')).intersection(masking_status):
-			result += 3
-
-		return (result, masking_status)
-
-	def format_mask_status2(self):
-		"""Get the mask status of a given package.
-		"""
-		mask = self.pkg.mask_status()
-		if mask:
-			return pp.masking(mask)
-		else:
-			arch = self.pkg.settings("ARCH")
-			keywords = self.pkg.environment('KEYWORDS')
-			mask =  [determine_keyword(arch,
-				portage.settings["ACCEPT_KEYWORDS"],
-				keywords)]
-		return pp.masking(mask)
-
-	def format_mask(self):
-		maskmodes = ['  ', ' ~', ' -', 'M ', 'M~', 'M-', '??']
-		maskmode = maskmodes[self.format_mask_status()[0]]
-		return pp.keyword(
-			maskmode,
-			stable=not maskmode.strip(),
-			hard_masked=set(('M', '?', '-')).intersection(maskmode)
-		)
-
-	def format_cpv(self, attr=None):
-		if attr is None:
-			value = self.pkg.cpv
-		else:
-			value = getattr(self.pkg, attr)
-		if self._do_format:
-			return pp.cpv(value)
-		else:
-			return value
-
-	def format_slot(self):
-		value = self.pkg.environment("SLOT")
-		if self._do_format:
-			return pp.slot(value)
-		else:
-			return value
-
-	def format_keywords(self):
-		value = self.pkg.environment("KEYWORDS")
-		if self._do_format:
-			return pp.keyword(value)
-		else:
-			return value
+    """When applied to a L{gentoolkit.package.Package} object, determine the
+    location (Portage Tree vs. overlay), install status and masked status. That
+    information can then be easily formatted and displayed.
+
+    Example usage:
+            >>> from gentoolkit.query import Query
+            >>> from gentoolkit.package import PackageFormatter
+            >>> import portage.output
+            >>> q = Query('gcc')
+            >>> pkgs = [PackageFormatter(x) for x in q.find()]
+            >>> havecolor = portage.output.havecolor
+            >>> portage.output.havecolor = False
+            >>> for pkg in pkgs:
+            ...     # Only print packages that are installed and from the Portage
+            ...     # tree
+            ...     if set('IP').issubset(pkg.location):
+            ...             print(pkg)
+            ...
+            [IP-] [  ] sys-devel/gcc-4.5.4:4.5
+            >>> portage.output.havecolor = havecolor
+
+    @type pkg: L{gentoolkit.package.Package}
+    @param pkg: package to format
+    @type do_format: bool
+    @param do_format: Whether to format the package name or not.
+            Essentially C{do_format} should be set to False when piping or when
+            quiet output is desired. If C{do_format} is False, only the location
+            attribute will be created to save time.
+    """
+
+    _tmpl_verbose = "[$location] [$mask] $cpv:$slot"
+    _tmpl_quiet = "$cpv"
+
+    def __init__(self, pkg, do_format=True, custom_format=None):
+        self._pkg = None
+        self._do_format = do_format
+        self._str = None
+        self._location = None
+        if not custom_format:
+            if do_format:
+                custom_format = self._tmpl_verbose
+            else:
+                custom_format = self._tmpl_quiet
+        self.tmpl = Template(custom_format)
+        self.format_vars = LazyItemsDict()
+        self.pkg = pkg
+
+    def __repr__(self):
+        return "<%s %s @%#8x>" % (self.__class__.__name__, self.pkg, id(self))
+
+    def __str__(self):
+        if self._str is None:
+            self._str = self.tmpl.safe_substitute(self.format_vars)
+        return self._str
+
+    @property
+    def location(self):
+        if self._location is None:
+            self._location = self.format_package_location()
+        return self._location
+
+    @property
+    def pkg(self):
+        """Package to format"""
+        return self._pkg
+
+    @pkg.setter
+    def pkg(self, value):
+        if self._pkg == value:
+            return
+        self._pkg = value
+        self._location = None
+
+        fmt_vars = self.format_vars
+        self.format_vars.clear()
+        fmt_vars.addLazySingleton("location", lambda: getattr(self, "location"))
+        fmt_vars.addLazySingleton("mask", self.format_mask)
+        fmt_vars.addLazySingleton("mask2", self.format_mask_status2)
+        fmt_vars.addLazySingleton("cpv", self.format_cpv)
+        fmt_vars.addLazySingleton("cp", self.format_cpv, "cp")
+        fmt_vars.addLazySingleton("category", self.format_cpv, "category")
+        fmt_vars.addLazySingleton("name", self.format_cpv, "name")
+        fmt_vars.addLazySingleton("version", self.format_cpv, "version")
+        fmt_vars.addLazySingleton("revision", self.format_cpv, "revision")
+        fmt_vars.addLazySingleton("fullversion", self.format_cpv, "fullversion")
+        fmt_vars.addLazySingleton("slot", self.format_slot)
+        fmt_vars.addLazySingleton("repo", self.pkg.repo_name)
+        fmt_vars.addLazySingleton("keywords", self.format_keywords)
+
+    def format_package_location(self):
+        """Get the install status (in /var/db/?) and origin (from an overlay
+        and the Portage tree?).
+
+        @rtype: str
+        @return: one of:
+                'I--' : Installed but ebuild doesn't exist on system anymore
+                '-P-' : Not installed and from the Portage tree
+                '--O' : Not installed and from an overlay
+                'IP-' : Installed and from the Portage tree
+                'I-O' : Installed and from an overlay
+        """
+
+        result = ["-", "-", "-"]
+
+        if self.pkg.is_installed():
+            result[0] = "I"
+
+        overlay = self.pkg.is_overlay()
+        if overlay is None:
+            pass
+        elif overlay:
+            result[2] = "O"
+        else:
+            result[1] = "P"
+
+        return "".join(result)
+
+    def format_mask_status(self):
+        """Get the mask status of a given package.
+
+        @rtype: tuple: (int, list)
+        @return: int = an index for this list:
+                ["  ", " ~", " -", "M ", "M~", "M-", "??"]
+                0 = not masked
+                1 = keyword masked
+                2 = arch masked
+                3 = hard masked
+                4 = hard and keyword masked,
+                5 = hard and arch masked
+                6 = ebuild doesn't exist on system anymore
+
+                list = original output of portage.getmaskingstatus
+        """
+
+        result = 0
+        masking_status = self.pkg.mask_status()
+        if masking_status is None:
+            return (6, [])
+
+        if ("~%s keyword" % self.pkg.settings("ARCH")) in masking_status:
+            result += 1
+        if "missing keyword" in masking_status:
+            result += 2
+        if set(("profile", "package.mask")).intersection(masking_status):
+            result += 3
+
+        return (result, masking_status)
+
+    def format_mask_status2(self):
+        """Get the mask status of a given package."""
+        mask = self.pkg.mask_status()
+        if mask:
+            return pp.masking(mask)
+        else:
+            arch = self.pkg.settings("ARCH")
+            keywords = self.pkg.environment("KEYWORDS")
+            mask = [
+                determine_keyword(arch, portage.settings["ACCEPT_KEYWORDS"], keywords)
+            ]
+        return pp.masking(mask)
+
+    def format_mask(self):
+        maskmodes = ["  ", " ~", " -", "M ", "M~", "M-", "??"]
+        maskmode = maskmodes[self.format_mask_status()[0]]
+        return pp.keyword(
+            maskmode,
+            stable=not maskmode.strip(),
+            hard_masked=set(("M", "?", "-")).intersection(maskmode),
+        )
+
+    def format_cpv(self, attr=None):
+        if attr is None:
+            value = self.pkg.cpv
+        else:
+            value = getattr(self.pkg, attr)
+        if self._do_format:
+            return pp.cpv(value)
+        else:
+            return value
+
+    def format_slot(self):
+        value = self.pkg.environment("SLOT")
+        if self._do_format:
+            return pp.slot(value)
+        else:
+            return value
+
+    def format_keywords(self):
+        value = self.pkg.environment("KEYWORDS")
+        if self._do_format:
+            return pp.keyword(value)
+        else:
+            return value
 
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/pprinter.py b/pym/gentoolkit/pprinter.py
index 25e145b..c42c354 100644
--- a/pym/gentoolkit/pprinter.py
+++ b/pym/gentoolkit/pprinter.py
@@ -7,24 +7,24 @@
 """Provides a consistent color scheme for Gentoolkit scripts."""
 
 __all__ = (
-	'command',
-	'cpv',
-	'die',
-	'emph',
-	'error',
-	'globaloption',
-	'localoption',
-	'number',
-	'path',
-	'path_symlink',
-	'pkgquery',
-	'productname',
-	'regexpquery',
-	'section',
-	'slot',
-	'subsection',
-	'useflag',
-	'warn'
+    "command",
+    "cpv",
+    "die",
+    "emph",
+    "error",
+    "globaloption",
+    "localoption",
+    "number",
+    "path",
+    "path_symlink",
+    "pkgquery",
+    "productname",
+    "regexpquery",
+    "section",
+    "slot",
+    "subsection",
+    "useflag",
+    "warn",
 )
 
 # =======
@@ -42,146 +42,169 @@ from portage import archlist
 # Functions
 # =========
 
+
 def command(string):
-	"""Returns a program command string."""
-	return output.green(string)
+    """Returns a program command string."""
+    return output.green(string)
+
 
 def cpv(string):
-	"""Returns a category/package-<version> string."""
-	return output.green(string)
+    """Returns a category/package-<version> string."""
+    return output.green(string)
+
 
 def die(err, string):
-	"""Returns an error string and die with an error code."""
-	sys.stderr.write(error(string))
-	sys.exit(err)
+    """Returns an error string and die with an error code."""
+    sys.stderr.write(error(string))
+    sys.exit(err)
+
 
 def emph(string):
-	"""Returns a string as emphasized."""
-	return output.bold(string)
+    """Returns a string as emphasized."""
+    return output.bold(string)
+
 
 def error(string):
-	"""Prints an error string."""
-	return output.red("!!! ") + string + "\n"
+    """Prints an error string."""
+    return output.red("!!! ") + string + "\n"
+
 
 def globaloption(string):
-	"""Returns a global option string, i.e. the program global options."""
-	return output.yellow(string)
+    """Returns a global option string, i.e. the program global options."""
+    return output.yellow(string)
+
 
 def localoption(string):
-	"""Returns a local option string, i.e. the program local options."""
-	return output.green(string)
+    """Returns a local option string, i.e. the program local options."""
+    return output.green(string)
+
 
 def number(string):
-	"""Returns a number string."""
-	return output.turquoise(string)
+    """Returns a number string."""
+    return output.turquoise(string)
+
 
 def path(string):
-	"""Returns a file or directory path string."""
-	return output.bold(string)
+    """Returns a file or directory path string."""
+    return output.bold(string)
+
 
 def path_symlink(string):
-	"""Returns a symlink string."""
-	return output.turquoise(string)
+    """Returns a symlink string."""
+    return output.turquoise(string)
+
 
 def pkgquery(string):
-	"""Returns a package query string."""
-	return output.bold(string)
+    """Returns a package query string."""
+    return output.bold(string)
+
 
 def productname(string):
-	"""Returns a product name string, i.e. the program name."""
-	return output.turquoise(string)
+    """Returns a product name string, i.e. the program name."""
+    return output.turquoise(string)
+
 
 def regexpquery(string):
-	"""Returns a regular expression string."""
-	return output.bold(string)
+    """Returns a regular expression string."""
+    return output.bold(string)
+
 
 def section(string):
-	"""Returns a string as a section header."""
-	return output.turquoise(string)
+    """Returns a string as a section header."""
+    return output.turquoise(string)
+
 
 def slot(string):
-	"""Returns a slot string"""
-	return output.bold(string)
+    """Returns a slot string"""
+    return output.bold(string)
+
 
 def subsection(string):
-	"""Returns a string as a subsection header."""
-	return output.turquoise(string)
+    """Returns a string as a subsection header."""
+    return output.turquoise(string)
+
 
 def useflag(string, enabled=True):
-	"""Returns a USE flag string."""
-	return output.red(string) if enabled else output.blue(string)
+    """Returns a USE flag string."""
+    return output.red(string) if enabled else output.blue(string)
+
 
 def keyword(string, stable=True, hard_masked=False):
-	"""Returns a keyword string."""
-	if stable:
-		return output.green(string)
-	if hard_masked:
-		return output.red(string)
-	# keyword masked:
-	return output.blue(string)
+    """Returns a keyword string."""
+    if stable:
+        return output.green(string)
+    if hard_masked:
+        return output.red(string)
+    # keyword masked:
+    return output.blue(string)
+
 
 def masking(mask):
-	"""Returns a 'masked by' string."""
-	if 'package.mask' in mask or 'profile' in mask:
-		# use porthole wrap style to help clarify meaning
-		return output.red("M["+mask[0]+"]")
-	if mask is not []:
-		for status in mask:
-			if 'keyword' in status:
-				# keyword masked | " [missing keyword] " <=looks better
-				return output.blue("["+status+"]")
-			if status in archlist:
-				return output.green(status)
-			if 'unknown' in status:
-				return output.yellow(status)
-		return output.red(status)
-	return ''
+    """Returns a 'masked by' string."""
+    if "package.mask" in mask or "profile" in mask:
+        # use porthole wrap style to help clarify meaning
+        return output.red("M[" + mask[0] + "]")
+    if mask is not []:
+        for status in mask:
+            if "keyword" in status:
+                # keyword masked | " [missing keyword] " <=looks better
+                return output.blue("[" + status + "]")
+            if status in archlist:
+                return output.green(status)
+            if "unknown" in status:
+                return output.yellow(status)
+        return output.red(status)
+    return ""
+
 
 def warn(string):
-	"""Returns a warning string."""
-	return "!!! " + string + "\n"
+    """Returns a warning string."""
+    return "!!! " + string + "\n"
+
 
 try:
-	unicode
+    unicode
 except NameError:
-	unicode = str
+    unicode = str
+
 
 def uprint(*args, **kw):
-	"""Replacement for the builtin print function.
-
-	This version gracefully handles characters not representable in the
-	user's current locale (through the errors='replace' handler).
-
-	@see: >>> help(print)
-	"""
-
-	sep = kw.pop('sep', ' ')
-	end = kw.pop('end', '\n')
-	file = kw.pop("file", sys.stdout)
-	if kw:
-		raise TypeError("got invalid keyword arguments: {0}".format(list(kw)))
-	file = getattr(file, 'buffer', file)
-
-	encoding = locale.getpreferredencoding()
-	# Make sure that python knows the encoding. Bug 350156
-	try:
-		# We don't care about what is returned, we just want to
-		# verify that we can find a codec.
-		codecs.lookup(encoding)
-	except LookupError:
-		# Python does not know the encoding, so use utf-8.
-		encoding = 'utf_8'
-
-	def encoded_args():
-		for arg in args:
-			if isinstance(arg, bytes):
-				yield arg
-			else:
-				yield unicode(arg).encode(encoding, 'replace')
-
-	sep = sep.encode(encoding, 'replace')
-	end = end.encode(encoding, 'replace')
-	text = sep.join(encoded_args())
-	file.write(text + end)
+    """Replacement for the builtin print function.
+
+    This version gracefully handles characters not representable in the
+    user's current locale (through the errors='replace' handler).
+
+    @see: >>> help(print)
+    """
+
+    sep = kw.pop("sep", " ")
+    end = kw.pop("end", "\n")
+    file = kw.pop("file", sys.stdout)
+    if kw:
+        raise TypeError("got invalid keyword arguments: {0}".format(list(kw)))
+    file = getattr(file, "buffer", file)
+
+    encoding = locale.getpreferredencoding()
+    # Make sure that python knows the encoding. Bug 350156
+    try:
+        # We don't care about what is returned, we just want to
+        # verify that we can find a codec.
+        codecs.lookup(encoding)
+    except LookupError:
+        # Python does not know the encoding, so use utf-8.
+        encoding = "utf_8"
+
+    def encoded_args():
+        for arg in args:
+            if isinstance(arg, bytes):
+                yield arg
+            else:
+                yield unicode(arg).encode(encoding, "replace")
+
+    sep = sep.encode(encoding, "replace")
+    end = end.encode(encoding, "replace")
+    text = sep.join(encoded_args())
+    file.write(text + end)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/profile.py b/pym/gentoolkit/profile.py
index dcd02cc..f6943be 100644
--- a/pym/gentoolkit/profile.py
+++ b/pym/gentoolkit/profile.py
@@ -6,9 +6,7 @@
 
 """Routines to load profile information for ekeyword/eshowkw"""
 
-__all__ = (
-	'load_profile_data',
-)
+__all__ = ("load_profile_data",)
 
 
 import os.path
@@ -19,93 +17,101 @@ from portage import _encodings, _unicode_encode
 
 
 def warning(msg):
-	"""Write |msg| as a warning to stderr"""
-	print('warning: %s' % msg, file=sys.stderr)
-
-
-def load_profile_data(portdir=None, repo='gentoo'):
-	"""Load the list of known arches from the tree
-
-	Args:
-	  portdir: The repository to load all data from (and ignore |repo|)
-	  repo: Look up this repository by name to locate profile data
-
-	Returns:
-	  A dict mapping the keyword to its preferred state:
-	  {'x86': ('stable', 'arch'), 'mips': ('dev', '~arch'), ...}
-	"""
-	if portdir is None:
-		portdir = portage.db[portage.root]['vartree'].settings.repositories[repo].location
-
-	arch_status = {}
-
-	try:
-		arch_list = os.path.join(portdir, 'profiles', 'arch.list')
-		with open(_unicode_encode(arch_list, encoding=_encodings['fs']),
-				encoding=_encodings['content']) as f:
-			for line in f:
-				line = line.split('#', 1)[0].strip()
-				if line:
-					arch_status[line] = None
-	except IOError:
-		pass
-
-	try:
-		profile_status = {
-			'stable': 0,
-			'dev': 1,
-			'exp': 2,
-			None: 3,
-		}
-		profiles_list = os.path.join(portdir, 'profiles', 'profiles.desc')
-		with open(_unicode_encode(profiles_list, encoding=_encodings['fs']),
-				encoding=_encodings['content']) as f:
-			for line in f:
-				line = line.split('#', 1)[0].split()
-				if line:
-					arch, _profile, status = line
-					arch_status.setdefault(arch, status)
-					curr_status = profile_status[arch_status[arch]]
-					new_status = profile_status[status]
-					if new_status < curr_status:
-						arch_status[arch] = status
-	except IOError:
-		pass
-
-	if arch_status:
-		arch_status['all'] = None
-	else:
-		warning('could not read profile files: %s' % arch_list)
-		warning('will not be able to verify args are correct')
-
-	arches_desc = {}
-	try:
-		arches_list = os.path.join(portdir, 'profiles', 'arches.desc')
-		with open(_unicode_encode(arches_list, encoding=_encodings['fs']),
-				encoding=_encodings['content']) as f:
-			for line in f:
-				line = line.split('#', 1)[0].split()
-				if line:
-					arch, status = line
-					arches_desc[arch] = status
-	except IOError:
-		# backwards compatibility
-		arches_desc = {
-			'alpha': 'testing',
-			'ia64': 'testing',
-			'm68k': 'testing',
-			'mips': 'testing',
-			'riscv': 'testing',
-		}
-		for k in arch_status:
-			if '-' in k:
-				arches_desc[k] = 'testing'
-
-	for k, v in arch_status.items():
-		if arches_desc.get(k) == 'testing':
-			arch_status[k] = (v, '~arch')
-		else:
-			# TODO: explicit distinction of transitional, bad values?
-			arch_status[k] = (v, 'arch')
-
-	return arch_status
+    """Write |msg| as a warning to stderr"""
+    print("warning: %s" % msg, file=sys.stderr)
+
+
+def load_profile_data(portdir=None, repo="gentoo"):
+    """Load the list of known arches from the tree
+
+    Args:
+      portdir: The repository to load all data from (and ignore |repo|)
+      repo: Look up this repository by name to locate profile data
+
+    Returns:
+      A dict mapping the keyword to its preferred state:
+      {'x86': ('stable', 'arch'), 'mips': ('dev', '~arch'), ...}
+    """
+    if portdir is None:
+        portdir = (
+            portage.db[portage.root]["vartree"].settings.repositories[repo].location
+        )
+
+    arch_status = {}
+
+    try:
+        arch_list = os.path.join(portdir, "profiles", "arch.list")
+        with open(
+            _unicode_encode(arch_list, encoding=_encodings["fs"]),
+            encoding=_encodings["content"],
+        ) as f:
+            for line in f:
+                line = line.split("#", 1)[0].strip()
+                if line:
+                    arch_status[line] = None
+    except IOError:
+        pass
+
+    try:
+        profile_status = {
+            "stable": 0,
+            "dev": 1,
+            "exp": 2,
+            None: 3,
+        }
+        profiles_list = os.path.join(portdir, "profiles", "profiles.desc")
+        with open(
+            _unicode_encode(profiles_list, encoding=_encodings["fs"]),
+            encoding=_encodings["content"],
+        ) as f:
+            for line in f:
+                line = line.split("#", 1)[0].split()
+                if line:
+                    arch, _profile, status = line
+                    arch_status.setdefault(arch, status)
+                    curr_status = profile_status[arch_status[arch]]
+                    new_status = profile_status[status]
+                    if new_status < curr_status:
+                        arch_status[arch] = status
+    except IOError:
+        pass
+
+    if arch_status:
+        arch_status["all"] = None
+    else:
+        warning("could not read profile files: %s" % arch_list)
+        warning("will not be able to verify args are correct")
+
+    arches_desc = {}
+    try:
+        arches_list = os.path.join(portdir, "profiles", "arches.desc")
+        with open(
+            _unicode_encode(arches_list, encoding=_encodings["fs"]),
+            encoding=_encodings["content"],
+        ) as f:
+            for line in f:
+                line = line.split("#", 1)[0].split()
+                if line:
+                    arch, status = line
+                    arches_desc[arch] = status
+    except IOError:
+        # backwards compatibility
+        arches_desc = {
+            "alpha": "testing",
+            "ia64": "testing",
+            "m68k": "testing",
+            "mips": "testing",
+            "riscv": "testing",
+        }
+        for k in arch_status:
+            if "-" in k:
+                arches_desc[k] = "testing"
+
+    for k, v in arch_status.items():
+        if arches_desc.get(k) == "testing":
+            arch_status[k] = (v, "~arch")
+        else:
+            # TODO: explicit distinction of transitional, bad values?
+            arch_status[k] = (v, "arch")
+
+    return arch_status

diff --git a/pym/gentoolkit/query.py b/pym/gentoolkit/query.py
index c2b8d56..4304670 100644
--- a/pym/gentoolkit/query.py
+++ b/pym/gentoolkit/query.py
@@ -6,9 +6,7 @@
 
 """Provides common methods on a package query."""
 
-__all__ = (
-	'Query',
-)
+__all__ = ("Query",)
 
 # =======
 # Imports
@@ -34,357 +32,367 @@ from gentoolkit.sets import get_set_atoms, SETPREFIX
 # Classes
 # =======
 
+
 class Query(CPV):
-	"""Provides common methods on a package query."""
-
-	def __init__(self, query, is_regex=False):
-		"""Create query object.
-
-		@type is_regex: bool
-		@param is_regex: query is a regular expression
-		"""
-
-		# We need at least one of these chars for a valid query
-		needed_chars = ascii_letters + digits + '*'
-		if not set(query).intersection(needed_chars):
-			raise errors.GentoolkitInvalidPackage(query)
-
-		# Separate repository
-		repository = None
-		if query.count(':') == 2:
-			query, repository = query.rsplit(':', 1)
-		self.query = query.rstrip(':') # Don't leave dangling colon
-		self.repo_filter = repository
-		self.is_regex = is_regex
-		self.query_type = self._get_query_type()
-
-		# Name the rest of the chunks, if possible
-		if self.query_type != "set":
-			try:
-				atom = Atom(self.query)
-				self.__dict__.update(atom.__dict__)
-			except errors.GentoolkitInvalidAtom:
-				CPV.__init__(self, self.query)
-				self.operator = ''
-				self.atom = self.cpv
-
-	def __repr__(self):
-		rx = ''
-		if self.is_regex:
-			rx = ' regex'
-		repo = ''
-		if self.repo_filter:
-			repo = ' in %s' % self.repo_filter
-		return "<%s%s %r%s>" % (self.__class__.__name__, rx, self.query, repo)
-
-	def __str__(self):
-		return self.query
-
-	def print_summary(self):
-		"""Print a summary of the query."""
-
-		if self.query_type == "set":
-			cat_str = ""
-			pkg_str = pp.emph(self.query)
-		else:
-			try:
-				cat, pkg = self.category, self.name + self.fullversion
-			except errors.GentoolkitInvalidCPV:
-				cat = ''
-				pkg = self.atom
-			if cat and not self.is_regex:
-				cat_str = "in %s " % pp.emph(cat.lstrip('><=~!'))
-			else:
-				cat_str = ""
-
-			if self.is_regex:
-				pkg_str = pp.emph(self.query)
-			else:
-				pkg_str = pp.emph(pkg)
-
-		repo = ''
-		if self.repo_filter is not None:
-			repo = ' %s' % pp.section(self.repo_filter)
-
-		pp.uprint(" * Searching%s for %s %s..." % (repo, pkg_str, cat_str))
-
-	def smart_find(
-		self,
-		in_installed=True,
-		in_porttree=True,
-		in_overlay=True,
-		include_masked=True,
-		show_progress=True,
-		no_matches_fatal=True,
-		**kwargs
-	):
-		"""A high-level wrapper around gentoolkit package-finder functions.
-
-		@type in_installed: bool
-		@param in_installed: search for query in VARDB
-		@type in_porttree: bool
-		@param in_porttree: search for query in PORTDB
-		@type in_overlay: bool
-		@param in_overlay: search for query in overlays
-		@type show_progress: bool
-		@param show_progress: output search progress
-		@type no_matches_fatal: bool
-		@param no_matches_fatal: raise errors.GentoolkitNoMatches
-		@rtype: list
-		@return: Package objects matching query
-		"""
-
-		if in_installed:
-			if in_porttree or in_overlay:
-				simple_package_finder = partial(
-					self.find,
-					include_masked=include_masked
-				)
-				complex_package_finder = helpers.get_cpvs
-			else:
-				simple_package_finder = self.find_installed
-				complex_package_finder = helpers.get_installed_cpvs
-		elif in_porttree or in_overlay:
-			simple_package_finder = partial(
-				self.find,
-				include_masked=include_masked,
-				in_installed=False
-			)
-			complex_package_finder = helpers.get_uninstalled_cpvs
-		else:
-			raise errors.GentoolkitFatalError(
-				"Not searching in installed, Portage tree, or overlay. "
-				"Nothing to do."
-			)
-
-		if self.query_type == "set":
-			self.package_finder = simple_package_finder
-			matches = self._do_set_lookup(show_progress=show_progress)
-		elif self.query_type == "simple":
-			self.package_finder = simple_package_finder
-			matches = self._do_simple_lookup(
-				in_installed=in_installed,
-				show_progress=show_progress
-			)
-		else:
-			self.package_finder = complex_package_finder
-			matches = self._do_complex_lookup(show_progress=show_progress)
-
-		if self.repo_filter is not None:
-			matches = self._filter_by_repository(matches)
-
-		if no_matches_fatal and not matches:
-			ii = in_installed and not (in_porttree or in_overlay)
-			raise errors.GentoolkitNoMatches(self.query, in_installed=ii)
-		return matches
-
-	def find(self, in_installed=True, include_masked=True):
-		"""Returns a list of Package objects that matched the query.
-
-		@rtype: list
-		@return: matching Package objects
-		"""
-
-		if not self.query:
-			return []
-
-		try:
-			if include_masked:
-				matches = portage.db[portage.root]["porttree"].dbapi.xmatch("match-all", self.query)
-			else:
-				matches = portage.db[portage.root]["porttree"].dbapi.match(self.query)
-			if in_installed:
-				matches.extend(portage.db[portage.root]["vartree"].dbapi.match(self.query))
-		except portage.exception.InvalidAtom as err:
-			message = "query.py: find(), query=%s, InvalidAtom=%s" %(
-				self.query, str(err))
-			raise errors.GentoolkitInvalidAtom(message)
-
-		return [Package(x) for x in set(matches)]
-
-	def find_installed(self):
-		"""Return a list of Package objects that matched the search key."""
-
-		try:
-			matches = portage.db[portage.root]["vartree"].dbapi.match(self.query)
-		# catch the ambiguous package Exception
-		except portage.exception.AmbiguousPackageName as err:
-			matches = []
-			for pkgkey in err.args[0]:
-				matches.extend(portage.db[portage.root]["vartree"].dbapi.match(pkgkey))
-		except portage.exception.InvalidAtom as err:
-			raise errors.GentoolkitInvalidAtom(err)
-
-		return [Package(x) for x in set(matches)]
-
-	def find_best(self, include_keyworded=True, include_masked=True):
-		"""Returns the "best" version available.
-
-		Order of preference:
-			highest available stable =>
-			highest available keyworded =>
-			highest available masked
-
-		@rtype: Package object or None
-		@return: best of up to three options
-		@raise errors.GentoolkitInvalidAtom: if query is not valid input
-		"""
-
-		best = keyworded = masked = None
-		try:
-			best = portage.db[portage.root]["porttree"].dbapi.xmatch("bestmatch-visible", self.query)
-		except portage.exception.InvalidAtom as err:
-			message = "query.py: find_best(), bestmatch-visible, " + \
-				"query=%s, InvalidAtom=%s" %(self.query, str(err))
-			raise errors.GentoolkitInvalidAtom(message)
-		# xmatch can return an empty string, so checking for None is not enough
-		if not best:
-			if not (include_keyworded or include_masked):
-				return None
-			try:
-				matches = portage.db[portage.root]["porttree"].dbapi.xmatch("match-all", self.query)
-			except portage.exception.InvalidAtom as err:
-				message = "query.py: find_best(), match-all, query=%s, InvalidAtom=%s" %(
-					self.query, str(err))
-				raise errors.GentoolkitInvalidAtom(message)
-			masked = portage.best(matches)
-			keywordable = []
-			for m in matches:
-				status = portage.getmaskingstatus(m)
-				if 'package.mask' not in status or 'profile' not in status:
-					keywordable.append(m)
-				if matches:
-					keyworded = portage.best(keywordable)
-		else:
-			return Package(best)
-		if include_keyworded and keyworded:
-			return Package(keyworded)
-		if include_masked and masked:
-			return Package(masked)
-		return None
-
-	def uses_globbing(self):
-		"""Check the query to see if it is using globbing.
-
-		@rtype: bool
-		@return: True if query uses globbing, else False
-		"""
-
-		if set('!*?[]').intersection(self.query):
-			# Is query an atom such as '=sys-apps/portage-2.2*'?
-			if self.query[0] != '=':
-				return True
-
-		return False
-
-	def is_ranged(self):
-		"""Return True if the query appears to be ranged, else False."""
-
-		q = self.query
-		return q.startswith(('~', '<', '>')) or q.endswith('*')
-
-	def _do_simple_lookup(self, in_installed=True, show_progress=True):
-		"""Find matches for a query which is an atom or cpv."""
-
-		result = []
-
-		if show_progress and CONFIG['verbose']:
-			self.print_summary()
-
-		result = self.package_finder()
-		if not in_installed:
-			result = [x for x in result if not x.is_installed()]
-
-		return result
-
-	def _do_complex_lookup(self, show_progress=True):
-		"""Find matches for a query which is a regex or includes globbing."""
-
-		result = []
-
-		if show_progress and not CONFIG["piping"]:
-			self.print_summary()
-
-		try:
-			cat = CPV(self.query).category
-		except errors.GentoolkitInvalidCPV:
-			cat = ''
-
-		pre_filter = []
-		# The "get_" functions can pre-filter against the whole package key,
-		# but since we allow globbing now, we run into issues like:
-		# >>> portage.dep.dep_getkey("sys-apps/portage-*")
-		# 'sys-apps/portage-'
-		# So the only way to guarantee we don't overrun the key is to
-		# prefilter by cat only.
-		if cat:
-			if self.is_regex:
-				cat_re = cat
-			else:
-				cat_re = fnmatch.translate(cat)
-			predicate = lambda x: re.match(cat_re, x.split("/", 1)[0])
-			pre_filter = self.package_finder(predicate=predicate)
-
-		# Post-filter
-		if self.is_regex:
-			try:
-				re.compile(self.query)
-			except re.error:
-				raise errors.GentoolkitInvalidRegex(self.query)
-			predicate = lambda x: re.search(self.query, x)
-		else:
-			if cat:
-				query_re = fnmatch.translate(self.query)
-			else:
-				query_re = fnmatch.translate("*/%s" % self.query)
-			predicate = lambda x: re.search(query_re, x)
-		if pre_filter:
-			result = [x for x in pre_filter if predicate(x)]
-		else:
-			result = self.package_finder(predicate=predicate)
-
-		return [Package(x) for x in result]
-
-	def _do_set_lookup(self, show_progress=True):
-		"""Find matches for a query that is a package set."""
-
-		if show_progress and not CONFIG["piping"]:
-			self.print_summary()
-
-		setname = self.query[len(SETPREFIX):]
-		result = []
-		try:
-			atoms = get_set_atoms(setname)
-		except errors.GentoolkitSetNotFound:
-			return result
-
-		q = self.query
-		for atom in atoms:
-			self.query = str(atom)
-			result.extend(self._do_simple_lookup(show_progress=False))
-		self.query = q
-
-		return result
-
-	def _filter_by_repository(self, matches):
-		"""Filter out packages which do not belong to self.repo_filter."""
-
-		result = []
-		for match in matches:
-			repo_name = match.repo_name()
-			if repo_name == self.repo_filter:
-				result.append(match)
-			elif (not repo_name and
-				self.repo_filter in ('unknown', 'null')):
-				result.append(match)
-
-		return result
-
-	def _get_query_type(self):
-		"""Determine of what type the query is."""
-
-		if self.query.startswith(SETPREFIX):
-			return "set"
-		elif self.is_regex or self.uses_globbing():
-			return "complex"
-		return "simple"
+    """Provides common methods on a package query."""
+
+    def __init__(self, query, is_regex=False):
+        """Create query object.
+
+        @type is_regex: bool
+        @param is_regex: query is a regular expression
+        """
+
+        # We need at least one of these chars for a valid query
+        needed_chars = ascii_letters + digits + "*"
+        if not set(query).intersection(needed_chars):
+            raise errors.GentoolkitInvalidPackage(query)
+
+        # Separate repository
+        repository = None
+        if query.count(":") == 2:
+            query, repository = query.rsplit(":", 1)
+        self.query = query.rstrip(":")  # Don't leave dangling colon
+        self.repo_filter = repository
+        self.is_regex = is_regex
+        self.query_type = self._get_query_type()
+
+        # Name the rest of the chunks, if possible
+        if self.query_type != "set":
+            try:
+                atom = Atom(self.query)
+                self.__dict__.update(atom.__dict__)
+            except errors.GentoolkitInvalidAtom:
+                CPV.__init__(self, self.query)
+                self.operator = ""
+                self.atom = self.cpv
+
+    def __repr__(self):
+        rx = ""
+        if self.is_regex:
+            rx = " regex"
+        repo = ""
+        if self.repo_filter:
+            repo = " in %s" % self.repo_filter
+        return "<%s%s %r%s>" % (self.__class__.__name__, rx, self.query, repo)
+
+    def __str__(self):
+        return self.query
+
+    def print_summary(self):
+        """Print a summary of the query."""
+
+        if self.query_type == "set":
+            cat_str = ""
+            pkg_str = pp.emph(self.query)
+        else:
+            try:
+                cat, pkg = self.category, self.name + self.fullversion
+            except errors.GentoolkitInvalidCPV:
+                cat = ""
+                pkg = self.atom
+            if cat and not self.is_regex:
+                cat_str = "in %s " % pp.emph(cat.lstrip("><=~!"))
+            else:
+                cat_str = ""
+
+            if self.is_regex:
+                pkg_str = pp.emph(self.query)
+            else:
+                pkg_str = pp.emph(pkg)
+
+        repo = ""
+        if self.repo_filter is not None:
+            repo = " %s" % pp.section(self.repo_filter)
+
+        pp.uprint(" * Searching%s for %s %s..." % (repo, pkg_str, cat_str))
+
+    def smart_find(
+        self,
+        in_installed=True,
+        in_porttree=True,
+        in_overlay=True,
+        include_masked=True,
+        show_progress=True,
+        no_matches_fatal=True,
+        **kwargs
+    ):
+        """A high-level wrapper around gentoolkit package-finder functions.
+
+        @type in_installed: bool
+        @param in_installed: search for query in VARDB
+        @type in_porttree: bool
+        @param in_porttree: search for query in PORTDB
+        @type in_overlay: bool
+        @param in_overlay: search for query in overlays
+        @type show_progress: bool
+        @param show_progress: output search progress
+        @type no_matches_fatal: bool
+        @param no_matches_fatal: raise errors.GentoolkitNoMatches
+        @rtype: list
+        @return: Package objects matching query
+        """
+
+        if in_installed:
+            if in_porttree or in_overlay:
+                simple_package_finder = partial(
+                    self.find, include_masked=include_masked
+                )
+                complex_package_finder = helpers.get_cpvs
+            else:
+                simple_package_finder = self.find_installed
+                complex_package_finder = helpers.get_installed_cpvs
+        elif in_porttree or in_overlay:
+            simple_package_finder = partial(
+                self.find, include_masked=include_masked, in_installed=False
+            )
+            complex_package_finder = helpers.get_uninstalled_cpvs
+        else:
+            raise errors.GentoolkitFatalError(
+                "Not searching in installed, Portage tree, or overlay. "
+                "Nothing to do."
+            )
+
+        if self.query_type == "set":
+            self.package_finder = simple_package_finder
+            matches = self._do_set_lookup(show_progress=show_progress)
+        elif self.query_type == "simple":
+            self.package_finder = simple_package_finder
+            matches = self._do_simple_lookup(
+                in_installed=in_installed, show_progress=show_progress
+            )
+        else:
+            self.package_finder = complex_package_finder
+            matches = self._do_complex_lookup(show_progress=show_progress)
+
+        if self.repo_filter is not None:
+            matches = self._filter_by_repository(matches)
+
+        if no_matches_fatal and not matches:
+            ii = in_installed and not (in_porttree or in_overlay)
+            raise errors.GentoolkitNoMatches(self.query, in_installed=ii)
+        return matches
+
+    def find(self, in_installed=True, include_masked=True):
+        """Returns a list of Package objects that matched the query.
+
+        @rtype: list
+        @return: matching Package objects
+        """
+
+        if not self.query:
+            return []
+
+        try:
+            if include_masked:
+                matches = portage.db[portage.root]["porttree"].dbapi.xmatch(
+                    "match-all", self.query
+                )
+            else:
+                matches = portage.db[portage.root]["porttree"].dbapi.match(self.query)
+            if in_installed:
+                matches.extend(
+                    portage.db[portage.root]["vartree"].dbapi.match(self.query)
+                )
+        except portage.exception.InvalidAtom as err:
+            message = "query.py: find(), query=%s, InvalidAtom=%s" % (
+                self.query,
+                str(err),
+            )
+            raise errors.GentoolkitInvalidAtom(message)
+
+        return [Package(x) for x in set(matches)]
+
+    def find_installed(self):
+        """Return a list of Package objects that matched the search key."""
+
+        try:
+            matches = portage.db[portage.root]["vartree"].dbapi.match(self.query)
+        # catch the ambiguous package Exception
+        except portage.exception.AmbiguousPackageName as err:
+            matches = []
+            for pkgkey in err.args[0]:
+                matches.extend(portage.db[portage.root]["vartree"].dbapi.match(pkgkey))
+        except portage.exception.InvalidAtom as err:
+            raise errors.GentoolkitInvalidAtom(err)
+
+        return [Package(x) for x in set(matches)]
+
+    def find_best(self, include_keyworded=True, include_masked=True):
+        """Returns the "best" version available.
+
+        Order of preference:
+                highest available stable =>
+                highest available keyworded =>
+                highest available masked
+
+        @rtype: Package object or None
+        @return: best of up to three options
+        @raise errors.GentoolkitInvalidAtom: if query is not valid input
+        """
+
+        best = keyworded = masked = None
+        try:
+            best = portage.db[portage.root]["porttree"].dbapi.xmatch(
+                "bestmatch-visible", self.query
+            )
+        except portage.exception.InvalidAtom as err:
+            message = (
+                "query.py: find_best(), bestmatch-visible, "
+                + "query=%s, InvalidAtom=%s" % (self.query, str(err))
+            )
+            raise errors.GentoolkitInvalidAtom(message)
+        # xmatch can return an empty string, so checking for None is not enough
+        if not best:
+            if not (include_keyworded or include_masked):
+                return None
+            try:
+                matches = portage.db[portage.root]["porttree"].dbapi.xmatch(
+                    "match-all", self.query
+                )
+            except portage.exception.InvalidAtom as err:
+                message = (
+                    "query.py: find_best(), match-all, query=%s, InvalidAtom=%s"
+                    % (self.query, str(err))
+                )
+                raise errors.GentoolkitInvalidAtom(message)
+            masked = portage.best(matches)
+            keywordable = []
+            for m in matches:
+                status = portage.getmaskingstatus(m)
+                if "package.mask" not in status or "profile" not in status:
+                    keywordable.append(m)
+                if matches:
+                    keyworded = portage.best(keywordable)
+        else:
+            return Package(best)
+        if include_keyworded and keyworded:
+            return Package(keyworded)
+        if include_masked and masked:
+            return Package(masked)
+        return None
+
+    def uses_globbing(self):
+        """Check the query to see if it is using globbing.
+
+        @rtype: bool
+        @return: True if query uses globbing, else False
+        """
+
+        if set("!*?[]").intersection(self.query):
+            # Is query an atom such as '=sys-apps/portage-2.2*'?
+            if self.query[0] != "=":
+                return True
+
+        return False
+
+    def is_ranged(self):
+        """Return True if the query appears to be ranged, else False."""
+
+        q = self.query
+        return q.startswith(("~", "<", ">")) or q.endswith("*")
+
+    def _do_simple_lookup(self, in_installed=True, show_progress=True):
+        """Find matches for a query which is an atom or cpv."""
+
+        result = []
+
+        if show_progress and CONFIG["verbose"]:
+            self.print_summary()
+
+        result = self.package_finder()
+        if not in_installed:
+            result = [x for x in result if not x.is_installed()]
+
+        return result
+
+    def _do_complex_lookup(self, show_progress=True):
+        """Find matches for a query which is a regex or includes globbing."""
+
+        result = []
+
+        if show_progress and not CONFIG["piping"]:
+            self.print_summary()
+
+        try:
+            cat = CPV(self.query).category
+        except errors.GentoolkitInvalidCPV:
+            cat = ""
+
+        pre_filter = []
+        # The "get_" functions can pre-filter against the whole package key,
+        # but since we allow globbing now, we run into issues like:
+        # >>> portage.dep.dep_getkey("sys-apps/portage-*")
+        # 'sys-apps/portage-'
+        # So the only way to guarantee we don't overrun the key is to
+        # prefilter by cat only.
+        if cat:
+            if self.is_regex:
+                cat_re = cat
+            else:
+                cat_re = fnmatch.translate(cat)
+            predicate = lambda x: re.match(cat_re, x.split("/", 1)[0])
+            pre_filter = self.package_finder(predicate=predicate)
+
+        # Post-filter
+        if self.is_regex:
+            try:
+                re.compile(self.query)
+            except re.error:
+                raise errors.GentoolkitInvalidRegex(self.query)
+            predicate = lambda x: re.search(self.query, x)
+        else:
+            if cat:
+                query_re = fnmatch.translate(self.query)
+            else:
+                query_re = fnmatch.translate("*/%s" % self.query)
+            predicate = lambda x: re.search(query_re, x)
+        if pre_filter:
+            result = [x for x in pre_filter if predicate(x)]
+        else:
+            result = self.package_finder(predicate=predicate)
+
+        return [Package(x) for x in result]
+
+    def _do_set_lookup(self, show_progress=True):
+        """Find matches for a query that is a package set."""
+
+        if show_progress and not CONFIG["piping"]:
+            self.print_summary()
+
+        setname = self.query[len(SETPREFIX) :]
+        result = []
+        try:
+            atoms = get_set_atoms(setname)
+        except errors.GentoolkitSetNotFound:
+            return result
+
+        q = self.query
+        for atom in atoms:
+            self.query = str(atom)
+            result.extend(self._do_simple_lookup(show_progress=False))
+        self.query = q
+
+        return result
+
+    def _filter_by_repository(self, matches):
+        """Filter out packages which do not belong to self.repo_filter."""
+
+        result = []
+        for match in matches:
+            repo_name = match.repo_name()
+            if repo_name == self.repo_filter:
+                result.append(match)
+            elif not repo_name and self.repo_filter in ("unknown", "null"):
+                result.append(match)
+
+        return result
+
+    def _get_query_type(self):
+        """Determine of what type the query is."""
+
+        if self.query.startswith(SETPREFIX):
+            return "set"
+        elif self.is_regex or self.uses_globbing():
+            return "complex"
+        return "simple"

diff --git a/pym/gentoolkit/revdep_rebuild/analyse.py b/pym/gentoolkit/revdep_rebuild/analyse.py
index bdd8306..c9c4a77 100644
--- a/pym/gentoolkit/revdep_rebuild/analyse.py
+++ b/pym/gentoolkit/revdep_rebuild/analyse.py
@@ -10,8 +10,12 @@ from portage import _encodings, _unicode_encode
 from portage.output import bold, blue, yellow, green
 
 from .stuff import scan
-from .collect import (prepare_search_dirs, parse_revdep_config,
-	collect_libraries_from_dir, collect_binaries_from_dir)
+from .collect import (
+    prepare_search_dirs,
+    parse_revdep_config,
+    collect_libraries_from_dir,
+    collect_binaries_from_dir,
+)
 from .assign import assign_packages
 from .cache import save_cache
 
@@ -19,381 +23,422 @@ current_milli_time = lambda: int(round(time.time() * 1000))
 
 
 def scan_files(libs_and_bins, cmd_max_args, logger, searchbits):
-	'''Calls stuff.scan() and processes the data into a dictionary
-	of scanned files information.
-
-	@param libs_and_bins: set of libraries and binaries to scan for lib links.
-	@param cmd_max_args: maximum number of files to pass into scanelf calls.
-	@param logger: python style Logging function to use for output.
-	@returns dict: {bit_length: {soname: {filename: set(needed)}}}
-	'''
-	stime = current_milli_time()
-	scanned_files = {} # {bits: {soname: (filename, needed), ...}, ...}
-	lines = scan(['-BF', '%F;%f;%S;%n;%M'],
-				 libs_and_bins, cmd_max_args, logger)
-	ftime = current_milli_time()
-	logger.debug("\tscan_files(); total time to get scanelf data is "
-		"%d milliseconds" % (ftime-stime))
-	stime = current_milli_time()
-	count = 0
-	for line in lines:
-		parts = line.split(';')
-		if len(parts) != 5:
-			logger.error("\tscan_files(); error processing lib: %s" % line)
-			logger.error("\tscan_files(); parts = %s" % str(parts))
-			continue
-		filename, sfilename, soname, needed, bits = parts
-		filename = os.path.realpath(filename)
-		needed = needed.split(',')
-		bits = bits[8:] # 8: -> strlen('ELFCLASS')
-		if bits not in searchbits:
-			continue
-		if not soname:
-			soname = sfilename
-
-		if bits not in scanned_files:
-			scanned_files[bits] = {}
-		if soname not in scanned_files[bits]:
-			scanned_files[bits][soname] = {}
-		if filename not in scanned_files[bits][soname]:
-			scanned_files[bits][soname][filename] = set(needed)
-			count += 1
-		else:
-			scanned_files[bits][soname][filename].update(needed)
-	ftime = current_milli_time()
-	logger.debug("\tscan_files(); total filenames found: %d in %d milliseconds"
-		% (count, ftime-stime))
-	return scanned_files
+    """Calls stuff.scan() and processes the data into a dictionary
+    of scanned files information.
+
+    @param libs_and_bins: set of libraries and binaries to scan for lib links.
+    @param cmd_max_args: maximum number of files to pass into scanelf calls.
+    @param logger: python style Logging function to use for output.
+    @returns dict: {bit_length: {soname: {filename: set(needed)}}}
+    """
+    stime = current_milli_time()
+    scanned_files = {}  # {bits: {soname: (filename, needed), ...}, ...}
+    lines = scan(["-BF", "%F;%f;%S;%n;%M"], libs_and_bins, cmd_max_args, logger)
+    ftime = current_milli_time()
+    logger.debug(
+        "\tscan_files(); total time to get scanelf data is "
+        "%d milliseconds" % (ftime - stime)
+    )
+    stime = current_milli_time()
+    count = 0
+    for line in lines:
+        parts = line.split(";")
+        if len(parts) != 5:
+            logger.error("\tscan_files(); error processing lib: %s" % line)
+            logger.error("\tscan_files(); parts = %s" % str(parts))
+            continue
+        filename, sfilename, soname, needed, bits = parts
+        filename = os.path.realpath(filename)
+        needed = needed.split(",")
+        bits = bits[8:]  # 8: -> strlen('ELFCLASS')
+        if bits not in searchbits:
+            continue
+        if not soname:
+            soname = sfilename
+
+        if bits not in scanned_files:
+            scanned_files[bits] = {}
+        if soname not in scanned_files[bits]:
+            scanned_files[bits][soname] = {}
+        if filename not in scanned_files[bits][soname]:
+            scanned_files[bits][soname][filename] = set(needed)
+            count += 1
+        else:
+            scanned_files[bits][soname][filename].update(needed)
+    ftime = current_milli_time()
+    logger.debug(
+        "\tscan_files(); total filenames found: %d in %d milliseconds"
+        % (count, ftime - stime)
+    )
+    return scanned_files
 
 
 def extract_dependencies_from_la(la, libraries, to_check, logger):
-	broken = []
-
-	libnames = []
-	for lib in libraries:
-		match = re.match(r'.+\/(.+)\.(so|la|a)(\..+)?', lib)
-		if match is not None:
-			libname = match.group(1)
-			if libname not in libnames:
-				libnames += [libname, ]
-
-	for _file in la:
-		if not os.path.exists(_file):
-			continue
-
-		for line in open(_unicode_encode(_file, encoding=_encodings['fs']), mode='r',
-			encoding=_encodings['content']).readlines():
-			line = line.strip()
-			if line.startswith('dependency_libs='):
-				match = re.match(r"dependency_libs='([^']+)'", line)
-				if match is not None:
-					for el in match.group(1).split(' '):
-						el = el.strip()
-						if (len(el) < 1 or el.startswith('-L')
-							or el.startswith('-R')
-							):
-							continue
-
-						if el.startswith('-l') and 'lib'+el[2:] in libnames:
-							pass
-						elif el in la or el in libraries:
-							pass
-						else:
-							if to_check:
-								_break = False
-								for tc in to_check:
-									if tc in el:
-										_break = True
-										break
-								if not _break:
-									continue
-
-							logger.info('\t' + yellow(' * ') + _file +
-								' is broken (requires: ' + bold(el)+')')
-							broken.append(_file)
-	return broken
+    broken = []
+
+    libnames = []
+    for lib in libraries:
+        match = re.match(r".+\/(.+)\.(so|la|a)(\..+)?", lib)
+        if match is not None:
+            libname = match.group(1)
+            if libname not in libnames:
+                libnames += [
+                    libname,
+                ]
+
+    for _file in la:
+        if not os.path.exists(_file):
+            continue
+
+        for line in open(
+            _unicode_encode(_file, encoding=_encodings["fs"]),
+            mode="r",
+            encoding=_encodings["content"],
+        ).readlines():
+            line = line.strip()
+            if line.startswith("dependency_libs="):
+                match = re.match(r"dependency_libs='([^']+)'", line)
+                if match is not None:
+                    for el in match.group(1).split(" "):
+                        el = el.strip()
+                        if len(el) < 1 or el.startswith("-L") or el.startswith("-R"):
+                            continue
+
+                        if el.startswith("-l") and "lib" + el[2:] in libnames:
+                            pass
+                        elif el in la or el in libraries:
+                            pass
+                        else:
+                            if to_check:
+                                _break = False
+                                for tc in to_check:
+                                    if tc in el:
+                                        _break = True
+                                        break
+                                if not _break:
+                                    continue
+
+                            logger.info(
+                                "\t"
+                                + yellow(" * ")
+                                + _file
+                                + " is broken (requires: "
+                                + bold(el)
+                                + ")"
+                            )
+                            broken.append(_file)
+    return broken
 
 
 class LibCheck:
-	def __init__(self, scanned_files, logger, searchlibs=None, searchbits=None,
-				all_masks=None, masked_dirs=None):
-		'''LibCheck init function.
-
-		@param scanned_files: optional dictionary if the type created by
-				scan_files().  Defaults to the class instance of scanned_files
-		@param logger: python style Logging function to use for output.
-		@param searchlibs: optional set() of libraries to search for. If defined
-				it toggles several settings to configure this class for
-				a target search rather than a broken libs search.
-		'''
-		self.scanned_files = scanned_files
-		self.logger = logger
-		self.searchlibs = searchlibs
-		self.searchbits = sorted(searchbits) or ['32', '64']
-		self.all_masks = all_masks
-		self.masked_dirs = masked_dirs
-		self.logger.debug("\tLibCheck.__init__(), new searchlibs: %s" %(self.searchbits))
-		if searchlibs:
-			self.smsg = '\tLibCheck.search(), Checking for %s bit dependants'
-			self.pmsg = yellow(" * ") + 'Files that depend on: %s (%s bits)'
-			self.setlibs = self._setslibs
-			self.check = self._checkforlib
-		else:
-			self.smsg = '\tLibCheck.search(), Checking for broken %s bit libs'
-			self.pmsg = green(' * ') + bold('Broken files that require:') + ' %s (%s bits)'
-			self.setlibs = self._setlibs
-			self.check = self._checkbroken
-		self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds"
-		self.alllibs = None
-
-
-	def _setslibs(self, l, b):
-		'''Internal function.  Use the class's setlibs variable'''
-		sonames = []
-		for s in self.searchlibs:
-			if s in self.scanned_files[b].keys():
-				sonames.append(s)
-				continue
-
-			found_partial = [a for a in self.scanned_files[b].keys() if s in a]
-			if found_partial:
-				sonames += found_partial
-				continue
-
-			for k, v in self.scanned_files[b].items():
-				for vv in v.keys():
-					if s in vv:
-						sonames.append(k)
-						break
-
-		self.alllibs = '|'.join(sonames) + '|'
-		self.logger.debug("\tLibCheck._setslibs(), new alllibs: %s" %(self.alllibs))
-
-
-	def _setlibs(self, l, b):
-		'''Internal function.  Use the class's setlibs variable'''
-		self.alllibs = '|'.join(l) + '|'
-
-
-	def _checkforlib(self, l):
-		'''Internal function.  Use the class's check variable'''
-		if l:
-			return l+'|' in self.alllibs
-		return False
-
-
-	def _checkbroken(self, l):
-		'''Internal function.  Use the class's check variable'''
-		if l:
-			return l+'|' not in self.alllibs
-		return False
-
-
-	def search(self, scanned_files=None):
-		'''Searches the scanned files for broken lib links
-		or for libs to search for
-
-		@param scanned_files: optional dictionary of the type created by
-				scan_files(). Defaults to the class instance of scanned_files
-		@ returns: dict: {bit_length: {found_lib: set(file_paths)}}.
-		'''
-		stime = current_milli_time()
-		count = 0
-		fcount = 0
-		if not scanned_files:
-			scanned_files = self.scanned_files
-		found_libs = {}
-		for bits in self.searchbits:
-			try:
-				scanned = scanned_files[bits]
-			except KeyError:
-				self.logger.debug('There are no %s-bit libraries'%bits)
-				continue
-			self.logger.debug(self.smsg % bits)
-			self.setlibs(sorted(scanned), bits)
-			for soname, filepaths in scanned.items():
-				for filename, needed in filepaths.items():
-					for l in needed:
-						if self.check(l):
-							if l in self.all_masks:
-								self.logger.debug('\tLibrary %s ignored as it is masked' % l)
-								continue
-							if (filename in self.all_masks or
-								os.path.realpath(filename) in self.all_masks or
-								self.is_masked(os.path.realpath(filename))
-								):
-								self.logger.debug('\tFile %s ignored as it is masked' % filename)
-								continue
-							if not bits in found_libs:
-								found_libs[bits] = {}
-							try:
-								found_libs[bits][l].add(filename)
-							except KeyError:
-								found_libs[bits][l] = set([filename])
-								count += 1
-							fcount += 1
-							self.logger.debug("\tLibCheck.search(); FOUND:"
-									" %sbit, %s, %s" % (bits, l, filename))
-		ftime = current_milli_time()
-		self.logger.debug(self.sfmsg % {'count': count, 'deps': fcount,
-			'time': ftime-stime})
-		return found_libs
-
-
-	def is_masked(self, filename):
-		for m in self.masked_dirs:
-			t = os.path.realpath(m).split(os.sep)
-			f = filename.split(os.sep)
-			# self.logger.debug("\tis_masked(); %s, %s" % (t, f))
-			if t == f[:min(len(t), len(f))]:
-				return True
-		return False
-
-
-	def process_results(self, found_libs, scanned_files=None):
-		'''Processes the search results, logs the files found
-
-		@param found_libs: dictionary of the type returned by search()
-		@param scanned_files: optional dictionary if the type created by
-				scan_files().  Defaults to the class instance of scanned_files
-		@ returns: list: of filepaths from teh search results.
-		'''
-		stime = current_milli_time()
-		if not scanned_files:
-			scanned_files = self.scanned_files
-		found_pathes = []
-		for bits, found in found_libs.items():
-			for lib, files in found.items():
-				self.logger.info(self.pmsg  % (bold(lib), bits))
-				for fp in sorted(files):
-					self.logger.info('\t' +yellow('* ') + fp)
-					found_pathes.append(fp)
-		ftime = current_milli_time()
-		self.logger.debug("\tLibCheck.process_results(); total filepaths found: "
-			"%d in %d milliseconds" % (len(found_pathes), ftime-stime))
-		return found_pathes
-
-
-def analyse(settings, logger, libraries=None, la_libraries=None,
-		libraries_links=None, binaries=None, _libs_to_check=None):
-	"""Main program body.  It will collect all info and determine the
-	pkgs needing rebuilding.
-
-	@param logger: logger used for logging messages, instance of logging.Logger
-				   class. Can be logging (RootLogger).
-	@param _libs_to_check Libraries that need to be checked only
-	@rtype list: list of pkgs that need rebuilding
-	"""
-
-	searchbits = set()
-	'''if _libs_to_check:
+    def __init__(
+        self,
+        scanned_files,
+        logger,
+        searchlibs=None,
+        searchbits=None,
+        all_masks=None,
+        masked_dirs=None,
+    ):
+        """LibCheck init function.
+
+        @param scanned_files: optional dictionary if the type created by
+                        scan_files().  Defaults to the class instance of scanned_files
+        @param logger: python style Logging function to use for output.
+        @param searchlibs: optional set() of libraries to search for. If defined
+                        it toggles several settings to configure this class for
+                        a target search rather than a broken libs search.
+        """
+        self.scanned_files = scanned_files
+        self.logger = logger
+        self.searchlibs = searchlibs
+        self.searchbits = sorted(searchbits) or ["32", "64"]
+        self.all_masks = all_masks
+        self.masked_dirs = masked_dirs
+        self.logger.debug(
+            "\tLibCheck.__init__(), new searchlibs: %s" % (self.searchbits)
+        )
+        if searchlibs:
+            self.smsg = "\tLibCheck.search(), Checking for %s bit dependants"
+            self.pmsg = yellow(" * ") + "Files that depend on: %s (%s bits)"
+            self.setlibs = self._setslibs
+            self.check = self._checkforlib
+        else:
+            self.smsg = "\tLibCheck.search(), Checking for broken %s bit libs"
+            self.pmsg = (
+                green(" * ") + bold("Broken files that require:") + " %s (%s bits)"
+            )
+            self.setlibs = self._setlibs
+            self.check = self._checkbroken
+        self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds"
+        self.alllibs = None
+
+    def _setslibs(self, l, b):
+        """Internal function.  Use the class's setlibs variable"""
+        sonames = []
+        for s in self.searchlibs:
+            if s in self.scanned_files[b].keys():
+                sonames.append(s)
+                continue
+
+            found_partial = [a for a in self.scanned_files[b].keys() if s in a]
+            if found_partial:
+                sonames += found_partial
+                continue
+
+            for k, v in self.scanned_files[b].items():
+                for vv in v.keys():
+                    if s in vv:
+                        sonames.append(k)
+                        break
+
+        self.alllibs = "|".join(sonames) + "|"
+        self.logger.debug("\tLibCheck._setslibs(), new alllibs: %s" % (self.alllibs))
+
+    def _setlibs(self, l, b):
+        """Internal function.  Use the class's setlibs variable"""
+        self.alllibs = "|".join(l) + "|"
+
+    def _checkforlib(self, l):
+        """Internal function.  Use the class's check variable"""
+        if l:
+            return l + "|" in self.alllibs
+        return False
+
+    def _checkbroken(self, l):
+        """Internal function.  Use the class's check variable"""
+        if l:
+            return l + "|" not in self.alllibs
+        return False
+
+    def search(self, scanned_files=None):
+        """Searches the scanned files for broken lib links
+        or for libs to search for
+
+        @param scanned_files: optional dictionary of the type created by
+                        scan_files(). Defaults to the class instance of scanned_files
+        @ returns: dict: {bit_length: {found_lib: set(file_paths)}}.
+        """
+        stime = current_milli_time()
+        count = 0
+        fcount = 0
+        if not scanned_files:
+            scanned_files = self.scanned_files
+        found_libs = {}
+        for bits in self.searchbits:
+            try:
+                scanned = scanned_files[bits]
+            except KeyError:
+                self.logger.debug("There are no %s-bit libraries" % bits)
+                continue
+            self.logger.debug(self.smsg % bits)
+            self.setlibs(sorted(scanned), bits)
+            for soname, filepaths in scanned.items():
+                for filename, needed in filepaths.items():
+                    for l in needed:
+                        if self.check(l):
+                            if l in self.all_masks:
+                                self.logger.debug(
+                                    "\tLibrary %s ignored as it is masked" % l
+                                )
+                                continue
+                            if (
+                                filename in self.all_masks
+                                or os.path.realpath(filename) in self.all_masks
+                                or self.is_masked(os.path.realpath(filename))
+                            ):
+                                self.logger.debug(
+                                    "\tFile %s ignored as it is masked" % filename
+                                )
+                                continue
+                            if not bits in found_libs:
+                                found_libs[bits] = {}
+                            try:
+                                found_libs[bits][l].add(filename)
+                            except KeyError:
+                                found_libs[bits][l] = set([filename])
+                                count += 1
+                            fcount += 1
+                            self.logger.debug(
+                                "\tLibCheck.search(); FOUND:"
+                                " %sbit, %s, %s" % (bits, l, filename)
+                            )
+        ftime = current_milli_time()
+        self.logger.debug(
+            self.sfmsg % {"count": count, "deps": fcount, "time": ftime - stime}
+        )
+        return found_libs
+
+    def is_masked(self, filename):
+        for m in self.masked_dirs:
+            t = os.path.realpath(m).split(os.sep)
+            f = filename.split(os.sep)
+            # self.logger.debug("\tis_masked(); %s, %s" % (t, f))
+            if t == f[: min(len(t), len(f))]:
+                return True
+        return False
+
+    def process_results(self, found_libs, scanned_files=None):
+        """Processes the search results, logs the files found
+
+        @param found_libs: dictionary of the type returned by search()
+        @param scanned_files: optional dictionary if the type created by
+                        scan_files().  Defaults to the class instance of scanned_files
+        @ returns: list: of filepaths from teh search results.
+        """
+        stime = current_milli_time()
+        if not scanned_files:
+            scanned_files = self.scanned_files
+        found_pathes = []
+        for bits, found in found_libs.items():
+            for lib, files in found.items():
+                self.logger.info(self.pmsg % (bold(lib), bits))
+                for fp in sorted(files):
+                    self.logger.info("\t" + yellow("* ") + fp)
+                    found_pathes.append(fp)
+        ftime = current_milli_time()
+        self.logger.debug(
+            "\tLibCheck.process_results(); total filepaths found: "
+            "%d in %d milliseconds" % (len(found_pathes), ftime - stime)
+        )
+        return found_pathes
+
+
+def analyse(
+    settings,
+    logger,
+    libraries=None,
+    la_libraries=None,
+    libraries_links=None,
+    binaries=None,
+    _libs_to_check=None,
+):
+    """Main program body.  It will collect all info and determine the
+    pkgs needing rebuilding.
+
+    @param logger: logger used for logging messages, instance of logging.Logger
+                               class. Can be logging (RootLogger).
+    @param _libs_to_check Libraries that need to be checked only
+    @rtype list: list of pkgs that need rebuilding
+    """
+
+    searchbits = set()
+    """if _libs_to_check:
 		for lib in _libs_to_check:
 			if "lib64" in lib:
 				searchbits.add('64')
 			elif "lib32" in lib:
 				searchbits.add('32')
 	else:
-		_libs_to_check = set()'''
-	searchbits.update(['64', '32'])
-
-	masked_dirs, masked_files, ld = parse_revdep_config(settings['REVDEP_CONFDIR'])
-	masked_dirs.update([
-		'/lib/modules',
-		'/lib32/modules',
-		'/lib64/modules',
-		]
-	)
-
-	if '64' not in searchbits:
-		masked_dirs.update(['/lib64', '/usr/lib64'])
-	elif '32' not in searchbits:
-		masked_dirs.update(['/lib32', '/usr/lib32'])
-
-	all_masks = masked_dirs.copy()
-	all_masks.update(masked_files)
-	logger.debug("\tall_masks:")
-	for x in sorted(all_masks):
-		logger.debug('\t\t%s' % (x))
-
-	if libraries and la_libraries and libraries_links and binaries:
-		logger.info(blue(' * ') +
-			bold('Found a valid cache, skipping collecting phase'))
-	else:
-		#TODO: add partial cache (for ex. only libraries)
-		# when found for some reason
-
-		stime = current_milli_time()
-		logger.warning(green(' * ') +
-			bold('Collecting system binaries and libraries'))
-		bin_dirs, lib_dirs = prepare_search_dirs(logger, settings)
-
-		lib_dirs.update(ld)
-		bin_dirs.update(ld)
-
-		logger.debug('\tanalyse(), bin directories:')
-		for x in sorted(bin_dirs):
-			logger.debug('\t\t%s' % (x))
-		logger.debug('\tanalyse(), lib directories:')
-		for x in sorted(lib_dirs):
-			logger.debug('\t\t%s' % (x))
-		logger.debug('\tanalyse(), masked directories:')
-		for x in sorted(masked_dirs):
-			logger.debug('\t\t%s' % (x))
-		logger.debug('\tanalyse(), masked files:')
-		for x in sorted(masked_files):
-			logger.debug('\t\t%s' % (x))
-
-		ftime = current_milli_time()
-		logger.debug('\ttime to complete task: %d milliseconds' % (ftime-stime))
-		stime = current_milli_time()
-		logger.info(green(' * ') +
-			bold('Collecting dynamic linking informations'))
-
-		libraries, la_libraries, libraries_links = \
-			collect_libraries_from_dir(lib_dirs, all_masks, logger)
-		binaries = collect_binaries_from_dir(bin_dirs, all_masks, logger)
-		ftime = current_milli_time()
-		logger.debug('\ttime to complete task: %d milliseconds' % (ftime-stime))
-
-		if settings['USE_TMP_FILES']:
-			save_cache(logger=logger,
-				to_save={'libraries':libraries, 'la_libraries':la_libraries,
-					'libraries_links':libraries_links, 'binaries':binaries
-				},
-			temp_path=settings['DEFAULT_TMP_DIR']
-			)
-
-
-	logger.debug('\tanalyse(), Found %i libraries (+%i symlinks) and %i binaries' %
-		(len(libraries), len(libraries_links), len(binaries))
-	)
-	logger.info(green(' * ') + bold('Scanning files'))
-
-	libs_and_bins = libraries.union(binaries)
-
-	scanned_files = scan_files(libs_and_bins, settings['CMD_MAX_ARGS'],
-		logger, searchbits)
-
-	logger.warning(green(' * ') + bold('Checking dynamic linking consistency'))
-	logger.debug(
-		'\tanalyse(), Searching for %i libs, bins within %i libraries and links'
-		% (len(libs_and_bins), len(libraries)+len(libraries_links))
-	)
-
-	libcheck = LibCheck(scanned_files, logger, _libs_to_check, searchbits,
-						all_masks, masked_dirs)
-
-	broken_pathes = libcheck.process_results(libcheck.search())
-
-	broken_la = extract_dependencies_from_la(la_libraries,
-		libraries.union(libraries_links), _libs_to_check, logger)
-	broken_pathes += broken_la
-
-	if broken_pathes:
-		logger.warning(green(' * ') + bold('Assign files to packages'))
-		return assign_packages(broken_pathes, logger, settings)
-	return None, None # no need to assign anything
-
-
-if __name__ == '__main__':
-	print("This script shouldn't be called directly")
+		_libs_to_check = set()"""
+    searchbits.update(["64", "32"])
+
+    masked_dirs, masked_files, ld = parse_revdep_config(settings["REVDEP_CONFDIR"])
+    masked_dirs.update(
+        [
+            "/lib/modules",
+            "/lib32/modules",
+            "/lib64/modules",
+        ]
+    )
+
+    if "64" not in searchbits:
+        masked_dirs.update(["/lib64", "/usr/lib64"])
+    elif "32" not in searchbits:
+        masked_dirs.update(["/lib32", "/usr/lib32"])
+
+    all_masks = masked_dirs.copy()
+    all_masks.update(masked_files)
+    logger.debug("\tall_masks:")
+    for x in sorted(all_masks):
+        logger.debug("\t\t%s" % (x))
+
+    if libraries and la_libraries and libraries_links and binaries:
+        logger.info(
+            blue(" * ") + bold("Found a valid cache, skipping collecting phase")
+        )
+    else:
+        # TODO: add partial cache (for ex. only libraries)
+        # when found for some reason
+
+        stime = current_milli_time()
+        logger.warning(green(" * ") + bold("Collecting system binaries and libraries"))
+        bin_dirs, lib_dirs = prepare_search_dirs(logger, settings)
+
+        lib_dirs.update(ld)
+        bin_dirs.update(ld)
+
+        logger.debug("\tanalyse(), bin directories:")
+        for x in sorted(bin_dirs):
+            logger.debug("\t\t%s" % (x))
+        logger.debug("\tanalyse(), lib directories:")
+        for x in sorted(lib_dirs):
+            logger.debug("\t\t%s" % (x))
+        logger.debug("\tanalyse(), masked directories:")
+        for x in sorted(masked_dirs):
+            logger.debug("\t\t%s" % (x))
+        logger.debug("\tanalyse(), masked files:")
+        for x in sorted(masked_files):
+            logger.debug("\t\t%s" % (x))
+
+        ftime = current_milli_time()
+        logger.debug("\ttime to complete task: %d milliseconds" % (ftime - stime))
+        stime = current_milli_time()
+        logger.info(green(" * ") + bold("Collecting dynamic linking informations"))
+
+        libraries, la_libraries, libraries_links = collect_libraries_from_dir(
+            lib_dirs, all_masks, logger
+        )
+        binaries = collect_binaries_from_dir(bin_dirs, all_masks, logger)
+        ftime = current_milli_time()
+        logger.debug("\ttime to complete task: %d milliseconds" % (ftime - stime))
+
+        if settings["USE_TMP_FILES"]:
+            save_cache(
+                logger=logger,
+                to_save={
+                    "libraries": libraries,
+                    "la_libraries": la_libraries,
+                    "libraries_links": libraries_links,
+                    "binaries": binaries,
+                },
+                temp_path=settings["DEFAULT_TMP_DIR"],
+            )
+
+    logger.debug(
+        "\tanalyse(), Found %i libraries (+%i symlinks) and %i binaries"
+        % (len(libraries), len(libraries_links), len(binaries))
+    )
+    logger.info(green(" * ") + bold("Scanning files"))
+
+    libs_and_bins = libraries.union(binaries)
+
+    scanned_files = scan_files(
+        libs_and_bins, settings["CMD_MAX_ARGS"], logger, searchbits
+    )
+
+    logger.warning(green(" * ") + bold("Checking dynamic linking consistency"))
+    logger.debug(
+        "\tanalyse(), Searching for %i libs, bins within %i libraries and links"
+        % (len(libs_and_bins), len(libraries) + len(libraries_links))
+    )
+
+    libcheck = LibCheck(
+        scanned_files, logger, _libs_to_check, searchbits, all_masks, masked_dirs
+    )
+
+    broken_pathes = libcheck.process_results(libcheck.search())
+
+    broken_la = extract_dependencies_from_la(
+        la_libraries, libraries.union(libraries_links), _libs_to_check, logger
+    )
+    broken_pathes += broken_la
+
+    if broken_pathes:
+        logger.warning(green(" * ") + bold("Assign files to packages"))
+        return assign_packages(broken_pathes, logger, settings)
+    return None, None  # no need to assign anything
+
+
+if __name__ == "__main__":
+    print("This script shouldn't be called directly")

diff --git a/pym/gentoolkit/revdep_rebuild/assign.py b/pym/gentoolkit/revdep_rebuild/assign.py
index 570c114..bfc56eb 100644
--- a/pym/gentoolkit/revdep_rebuild/assign.py
+++ b/pym/gentoolkit/revdep_rebuild/assign.py
@@ -9,6 +9,7 @@ import os
 import io
 import re
 import time
+
 current_milli_time = lambda: int(round(time.time() * 1000))
 
 import portage
@@ -17,160 +18,175 @@ from portage.output import bold, red, yellow, green
 
 
 class _file_matcher:
-	"""
-	Compares files by basename and parent directory (device, inode),
-	so comparisons work regardless of directory symlinks. If a
-	parent directory does not exist, the realpath of the parent
-	directory is used instead of the (device, inode). When multiple
-	files share the same parent directory, stat is only called
-	once per directory, and the result is cached internally.
-	"""
-	def __init__(self):
-		self._file_ids = {}
-		self._added = {}
-
-	def _file_id(self, filename):
-		try:
-			return self._file_ids[filename]
-		except KeyError:
-			try:
-				st = os.stat(filename)
-			except OSError as e:
-				if e.errno != errno.ENOENT:
-					raise
-				file_id = (os.path.realpath(filename),)
-			else:
-				file_id = (st.st_dev, st.st_ino)
-
-			self._file_ids[filename] = file_id
-			return file_id
-
-	def _file_key(self, filename):
-		head, tail = os.path.split(filename)
-		key = self._file_id(head) + (tail,)
-		return key
-
-	def add(self, filename):
-		self._added[self._file_key(filename)] = filename
-
-	def intersection(self, other):
-		for file_key in self._added:
-			match = other._added.get(file_key)
-			if match is not None:
-				yield match
+    """
+    Compares files by basename and parent directory (device, inode),
+    so comparisons work regardless of directory symlinks. If a
+    parent directory does not exist, the realpath of the parent
+    directory is used instead of the (device, inode). When multiple
+    files share the same parent directory, stat is only called
+    once per directory, and the result is cached internally.
+    """
+
+    def __init__(self):
+        self._file_ids = {}
+        self._added = {}
+
+    def _file_id(self, filename):
+        try:
+            return self._file_ids[filename]
+        except KeyError:
+            try:
+                st = os.stat(filename)
+            except OSError as e:
+                if e.errno != errno.ENOENT:
+                    raise
+                file_id = (os.path.realpath(filename),)
+            else:
+                file_id = (st.st_dev, st.st_ino)
+
+            self._file_ids[filename] = file_id
+            return file_id
+
+    def _file_key(self, filename):
+        head, tail = os.path.split(filename)
+        key = self._file_id(head) + (tail,)
+        return key
+
+    def add(self, filename):
+        self._added[self._file_key(filename)] = filename
+
+    def intersection(self, other):
+        for file_key in self._added:
+            match = other._added.get(file_key)
+            if match is not None:
+                yield match
 
 
 def assign_packages(broken, logger, settings):
-	''' Finds and returns packages that owns files placed in broken.
-		Broken is list of files
-	'''
-	stime = current_milli_time()
-
-	broken_matcher = _file_matcher()
-	for filename in broken:
-		broken_matcher.add(filename)
-
-	assigned_pkgs = set()
-	assigned_filenames = set()
-	for group in os.listdir(settings['PKG_DIR']):
-		grppath = settings['PKG_DIR'] + group
-		if not os.path.isdir(grppath):
-			continue
-		for pkg in os.listdir(grppath):
-			pkgpath = settings['PKG_DIR'] + group + '/' + pkg
-			if not os.path.isdir(pkgpath):
-				continue
-			f = pkgpath + '/CONTENTS'
-			if os.path.exists(f):
-				contents_matcher = _file_matcher()
-				try:
-					with io.open(f, 'r', encoding='utf_8') as cnt:
-						for line in cnt.readlines():
-							m = re.match(r'^obj (/[^ ]+)', line)
-							if m is not None:
-								contents_matcher.add(m.group(1))
-				except Exception as e:
-					logger.warning(red(' !! Failed to read ' + f))
-					logger.warning(red(' !! Error was:' + str(e)))
-				else:
-					for m in contents_matcher.intersection(broken_matcher):
-						found = group+'/'+pkg
-						assigned_pkgs.add(found)
-						assigned_filenames.add(m)
-						logger.info('\t' + green('* ') + m +
-									' -> ' + bold(found))
-
-	broken_filenames = set(broken)
-	orphaned = broken_filenames.difference(assigned_filenames)
-	ftime = current_milli_time()
-	logger.debug("\tassign_packages(); assigned "
-		"%d packages, %d orphans in %d milliseconds"
-		% (len(assigned_pkgs), len(orphaned), ftime-stime))
-
-	return (assigned_pkgs, orphaned)
+    """Finds and returns packages that owns files placed in broken.
+    Broken is list of files
+    """
+    stime = current_milli_time()
+
+    broken_matcher = _file_matcher()
+    for filename in broken:
+        broken_matcher.add(filename)
+
+    assigned_pkgs = set()
+    assigned_filenames = set()
+    for group in os.listdir(settings["PKG_DIR"]):
+        grppath = settings["PKG_DIR"] + group
+        if not os.path.isdir(grppath):
+            continue
+        for pkg in os.listdir(grppath):
+            pkgpath = settings["PKG_DIR"] + group + "/" + pkg
+            if not os.path.isdir(pkgpath):
+                continue
+            f = pkgpath + "/CONTENTS"
+            if os.path.exists(f):
+                contents_matcher = _file_matcher()
+                try:
+                    with io.open(f, "r", encoding="utf_8") as cnt:
+                        for line in cnt.readlines():
+                            m = re.match(r"^obj (/[^ ]+)", line)
+                            if m is not None:
+                                contents_matcher.add(m.group(1))
+                except Exception as e:
+                    logger.warning(red(" !! Failed to read " + f))
+                    logger.warning(red(" !! Error was:" + str(e)))
+                else:
+                    for m in contents_matcher.intersection(broken_matcher):
+                        found = group + "/" + pkg
+                        assigned_pkgs.add(found)
+                        assigned_filenames.add(m)
+                        logger.info("\t" + green("* ") + m + " -> " + bold(found))
+
+    broken_filenames = set(broken)
+    orphaned = broken_filenames.difference(assigned_filenames)
+    ftime = current_milli_time()
+    logger.debug(
+        "\tassign_packages(); assigned "
+        "%d packages, %d orphans in %d milliseconds"
+        % (len(assigned_pkgs), len(orphaned), ftime - stime)
+    )
+
+    return (assigned_pkgs, orphaned)
 
 
 def get_best_match(cpv, cp, logger):
-	"""Tries to find another version of the pkg with the same slot
-	as the deprecated installed version.  Failing that attempt to get any version
-	of the same app
-
-	@param cpv: string
-	@param cp: string
-	@rtype tuple: ([cpv,...], SLOT)
-	"""
-
-	slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"])[0]
-	logger.warning('\t%s "%s" %s.' % (yellow('* Warning:'), cpv,bold('ebuild not found.')))
-	logger.debug('\tget_best_match(); Looking for %s:%s' %(cp, slot))
-	try:
-		match = portdb.match('%s:%s' %(cp, slot))
-	except portage.exception.InvalidAtom:
-		match = None
-
-	if not match:
-		logger.warning('\t' + red('!!') + ' ' + yellow(
-			'Could not find ebuild for %s:%s' %(cp, slot)))
-		slot = ['']
-		match = portdb.match(cp)
-		if not match:
-			logger.warning('\t' + red('!!') + ' ' +
-				yellow('Could not find ebuild for ' + cp))
-	return match, slot
+    """Tries to find another version of the pkg with the same slot
+    as the deprecated installed version.  Failing that attempt to get any version
+    of the same app
+
+    @param cpv: string
+    @param cp: string
+    @rtype tuple: ([cpv,...], SLOT)
+    """
+
+    slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"])[0]
+    logger.warning(
+        '\t%s "%s" %s.' % (yellow("* Warning:"), cpv, bold("ebuild not found."))
+    )
+    logger.debug("\tget_best_match(); Looking for %s:%s" % (cp, slot))
+    try:
+        match = portdb.match("%s:%s" % (cp, slot))
+    except portage.exception.InvalidAtom:
+        match = None
+
+    if not match:
+        logger.warning(
+            "\t"
+            + red("!!")
+            + " "
+            + yellow("Could not find ebuild for %s:%s" % (cp, slot))
+        )
+        slot = [""]
+        match = portdb.match(cp)
+        if not match:
+            logger.warning(
+                "\t" + red("!!") + " " + yellow("Could not find ebuild for " + cp)
+            )
+    return match, slot
 
 
 def get_slotted_cps(cpvs, logger):
-	"""Uses portage to reduce the cpv list into a cp:slot list and returns it
-	"""
-	from portage.versions import catpkgsplit
-	from portage import portdb
-
-	cps = []
-	for cpv in cpvs:
-		parts = catpkgsplit(cpv)
-		if not parts:
-			logger.warning(('\t' + red("Failed to split the following pkg: "
-				"%s, not a valid cat/pkg-ver" %cpv)))
-			continue
-
-		cp = parts[0] + '/' + parts[1]
-		try:
-			slot = portdb.aux_get(cpv, ["SLOT"])
-		except KeyError:
-			match, slot = get_best_match(cpv, cp, logger)
-			if not match:
-				logger.warning('\t' + red("Installed package: "
-					"%s is no longer available" %cp))
-				continue
-
-		if slot[0]:
-			cps.append(cp + ":" + slot[0])
-		else:
-			cps.append(cp)
-
-	return cps
-
-
-
-if __name__ == '__main__':
-	print('Nothing to call here')
+    """Uses portage to reduce the cpv list into a cp:slot list and returns it"""
+    from portage.versions import catpkgsplit
+    from portage import portdb
+
+    cps = []
+    for cpv in cpvs:
+        parts = catpkgsplit(cpv)
+        if not parts:
+            logger.warning(
+                (
+                    "\t"
+                    + red(
+                        "Failed to split the following pkg: "
+                        "%s, not a valid cat/pkg-ver" % cpv
+                    )
+                )
+            )
+            continue
+
+        cp = parts[0] + "/" + parts[1]
+        try:
+            slot = portdb.aux_get(cpv, ["SLOT"])
+        except KeyError:
+            match, slot = get_best_match(cpv, cp, logger)
+            if not match:
+                logger.warning(
+                    "\t" + red("Installed package: " "%s is no longer available" % cp)
+                )
+                continue
+
+        if slot[0]:
+            cps.append(cp + ":" + slot[0])
+        else:
+            cps.append(cp)
+
+    return cps
+
+
+if __name__ == "__main__":
+    print("Nothing to call here")

diff --git a/pym/gentoolkit/revdep_rebuild/cache.py b/pym/gentoolkit/revdep_rebuild/cache.py
index ab0b7d7..3815d72 100644
--- a/pym/gentoolkit/revdep_rebuild/cache.py
+++ b/pym/gentoolkit/revdep_rebuild/cache.py
@@ -1,4 +1,3 @@
-
 """Caching module
 Functions for reading, saving and verifying the data caches
 """
@@ -10,125 +9,161 @@ from portage import _encodings, _unicode_encode
 from portage.output import red
 from .settings import DEFAULTS
 
-def read_cache(temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
-	''' Reads cache information needed by analyse function.
-		This function does not checks if files exists nor timestamps,
-		check_temp_files should be called first
-		@param temp_path: directory where all temp files should reside
-		@return tuple with values of:
-			libraries, la_libraries, libraries_links, symlink_pairs, binaries
-	'''
-
-	ret = {
-		'libraries': set(),
-		'la_libraries': set(),
-		'libraries_links': set(),
-		'binaries': set()
-		}
-	try:
-		for key,val in ret.items():
-			_file = open(_unicode_encode(os.path.join(temp_path, key),
-				encoding=_encodings['fs']), encoding=_encodings['content'])
-			for line in _file.readlines():
-				val.add(line.strip())
-			#libraries.remove('\n')
-			_file .close()
-	except EnvironmentError:
-		pass
-
-	return (ret['libraries'], ret['la_libraries'],
-		ret['libraries_links'], ret['binaries'])
-
-
-def save_cache(logger, to_save={}, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
-	''' Tries to store caching information.
-		@param logger
-		@param to_save have to be dict with keys:
-			libraries, la_libraries, libraries_links and binaries
-	'''
-
-	if not os.path.exists(temp_path):
-		os.makedirs(temp_path)
-
-	try:
-		_file = open(_unicode_encode(os.path.join(temp_path, 'timestamp'),
-			encoding=_encodings['fs']), mode='w', encoding=_encodings['content'])
-		_file.write(str(int(time.time())))
-		_file.close()
-
-		for key,val in to_save.items():
-			_file = open(_unicode_encode(os.path.join(temp_path, key),
-				encoding=_encodings['fs']), mode='w',
-				encoding=_encodings['content'])
-			for line in val:
-				_file.write(line + '\n')
-			_file.close()
-	except Exception as ex:
-		logger.warning('\t' + red('Could not save cache: %s' %str(ex)))
-
-
-
-def check_temp_files(temp_path=DEFAULTS['DEFAULT_TMP_DIR'], max_delay=3600,
-		logger=None):
-	''' Checks if temporary files from previous run are still available
-		and if they aren't too old
-		@param temp_path is directory, where temporary files should be found
-		@param max_delay is maximum time difference (in seconds)
-			when those files are still considered fresh and useful
-		returns True, when files can be used, or False, when they don't
-		exists or they are too old
-	'''
-
-	if not os.path.exists(temp_path) or not os.path.isdir(temp_path):
-		return False
-
-	timestamp_path = os.path.join(temp_path, 'timestamp')
-	if not os.path.exists(timestamp_path) or not os.path.isfile(timestamp_path):
-		return False
-
-	try:
-		_file = open(_unicode_encode(timestamp_path, encoding=_encodings['fs']),
-			encoding=_encodings['content'])
-		timestamp = int(_file.readline())
-		_file .close()
-	except Exception as ex:
-		if logger:
-			logger.debug("\tcheck_temp_files(); error retrieving"
-				" timestamp_path:\n" + str(ex))
-		timestamp = 0
-		return False
-
-	diff = int(time.time()) - timestamp
-	return max_delay > diff
-
-
-
-if __name__ == '__main__':
-	print('Preparing cache ... ')
-
-	from .collect import (prepare_search_dirs, parse_revdep_config,
-		collect_libraries_from_dir, collect_binaries_from_dir)
-	import logging
-
-	bin_dirs, lib_dirs = prepare_search_dirs()
-
-	masked_dirs, masked_files, ld = parse_revdep_config()
-	lib_dirs.update(ld)
-	bin_dirs.update(ld)
-	masked_dirs.update(
-		set([
-			'/lib/modules',
-			'/lib32/modules',
-			'/lib64/modules',
-		])
-	)
-
-	libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs, logging)
-	binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging)
-
-	save_cache(logger=logging,
-		to_save={'libraries':libraries, 'la_libraries':la_libraries,
-			'libraries_links':libraries_links, 'binaries':binaries}
-		)
-
-	print('Done.')
+
+def read_cache(temp_path=DEFAULTS["DEFAULT_TMP_DIR"]):
+    """Reads cache information needed by analyse function.
+    This function does not checks if files exists nor timestamps,
+    check_temp_files should be called first
+    @param temp_path: directory where all temp files should reside
+    @return tuple with values of:
+            libraries, la_libraries, libraries_links, symlink_pairs, binaries
+    """
+
+    ret = {
+        "libraries": set(),
+        "la_libraries": set(),
+        "libraries_links": set(),
+        "binaries": set(),
+    }
+    try:
+        for key, val in ret.items():
+            _file = open(
+                _unicode_encode(
+                    os.path.join(temp_path, key), encoding=_encodings["fs"]
+                ),
+                encoding=_encodings["content"],
+            )
+            for line in _file.readlines():
+                val.add(line.strip())
+            # libraries.remove('\n')
+            _file.close()
+    except EnvironmentError:
+        pass
+
+    return (
+        ret["libraries"],
+        ret["la_libraries"],
+        ret["libraries_links"],
+        ret["binaries"],
+    )
+
+
+def save_cache(logger, to_save={}, temp_path=DEFAULTS["DEFAULT_TMP_DIR"]):
+    """Tries to store caching information.
+    @param logger
+    @param to_save have to be dict with keys:
+            libraries, la_libraries, libraries_links and binaries
+    """
+
+    if not os.path.exists(temp_path):
+        os.makedirs(temp_path)
+
+    try:
+        _file = open(
+            _unicode_encode(
+                os.path.join(temp_path, "timestamp"), encoding=_encodings["fs"]
+            ),
+            mode="w",
+            encoding=_encodings["content"],
+        )
+        _file.write(str(int(time.time())))
+        _file.close()
+
+        for key, val in to_save.items():
+            _file = open(
+                _unicode_encode(
+                    os.path.join(temp_path, key), encoding=_encodings["fs"]
+                ),
+                mode="w",
+                encoding=_encodings["content"],
+            )
+            for line in val:
+                _file.write(line + "\n")
+            _file.close()
+    except Exception as ex:
+        logger.warning("\t" + red("Could not save cache: %s" % str(ex)))
+
+
+def check_temp_files(
+    temp_path=DEFAULTS["DEFAULT_TMP_DIR"], max_delay=3600, logger=None
+):
+    """Checks if temporary files from previous run are still available
+    and if they aren't too old
+    @param temp_path is directory, where temporary files should be found
+    @param max_delay is maximum time difference (in seconds)
+            when those files are still considered fresh and useful
+    returns True, when files can be used, or False, when they don't
+    exists or they are too old
+    """
+
+    if not os.path.exists(temp_path) or not os.path.isdir(temp_path):
+        return False
+
+    timestamp_path = os.path.join(temp_path, "timestamp")
+    if not os.path.exists(timestamp_path) or not os.path.isfile(timestamp_path):
+        return False
+
+    try:
+        _file = open(
+            _unicode_encode(timestamp_path, encoding=_encodings["fs"]),
+            encoding=_encodings["content"],
+        )
+        timestamp = int(_file.readline())
+        _file.close()
+    except Exception as ex:
+        if logger:
+            logger.debug(
+                "\tcheck_temp_files(); error retrieving" " timestamp_path:\n" + str(ex)
+            )
+        timestamp = 0
+        return False
+
+    diff = int(time.time()) - timestamp
+    return max_delay > diff
+
+
+if __name__ == "__main__":
+    print("Preparing cache ... ")
+
+    from .collect import (
+        prepare_search_dirs,
+        parse_revdep_config,
+        collect_libraries_from_dir,
+        collect_binaries_from_dir,
+    )
+    import logging
+
+    bin_dirs, lib_dirs = prepare_search_dirs()
+
+    masked_dirs, masked_files, ld = parse_revdep_config()
+    lib_dirs.update(ld)
+    bin_dirs.update(ld)
+    masked_dirs.update(
+        set(
+            [
+                "/lib/modules",
+                "/lib32/modules",
+                "/lib64/modules",
+            ]
+        )
+    )
+
+    (
+        libraries,
+        la_libraries,
+        libraries_links,
+        symlink_pairs,
+    ) = collect_libraries_from_dir(lib_dirs, masked_dirs, logging)
+    binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging)
+
+    save_cache(
+        logger=logging,
+        to_save={
+            "libraries": libraries,
+            "la_libraries": la_libraries,
+            "libraries_links": libraries_links,
+            "binaries": binaries,
+        },
+    )
+
+    print("Done.")

diff --git a/pym/gentoolkit/revdep_rebuild/collect.py b/pym/gentoolkit/revdep_rebuild/collect.py
index 38ff48e..0e5d274 100644
--- a/pym/gentoolkit/revdep_rebuild/collect.py
+++ b/pym/gentoolkit/revdep_rebuild/collect.py
@@ -14,219 +14,242 @@ from .settings import parse_revdep_config
 
 
 def parse_conf(conf_file, visited=None, logger=None):
-	''' Parses supplied conf_file for libraries pathes.
-		conf_file is file or files to parse
-		visited is set of files already parsed
-	'''
-	lib_dirs = set()
-	to_parse = set()
-
-	if isinstance(conf_file, str):
-		conf_file = [conf_file]
-
-	for conf in conf_file:
-		try:
-			with open(_unicode_encode(conf, encoding=_encodings['fs']),
-					encoding=_encodings['content']) as _file:
-				for line in _file.readlines():
-					line = line.strip()
-					if line.startswith('#'):
-						continue
-					elif line.startswith('include'):
-						include_line = line.split()[1:]
-						for included in include_line:
-							if not included.startswith('/'):
-								path = os.path.join(os.path.dirname(conf), \
-													included)
-							else:
-								path = included
-
-							to_parse.update(glob.glob(path))
-					else:
-						lib_dirs.add(line)
-		except EnvironmentError:
-			logger.warn('\t' + yellow('Error when parsing file %s' %conf))
-
-	if visited is None:
-		visited = set()
-
-	visited.update(conf_file)
-	to_parse = to_parse.difference(visited)
-	if to_parse:
-		lib_dirs.update(parse_conf(to_parse, visited, logger=logger))
-
-	return lib_dirs
+    """Parses supplied conf_file for libraries pathes.
+    conf_file is file or files to parse
+    visited is set of files already parsed
+    """
+    lib_dirs = set()
+    to_parse = set()
+
+    if isinstance(conf_file, str):
+        conf_file = [conf_file]
+
+    for conf in conf_file:
+        try:
+            with open(
+                _unicode_encode(conf, encoding=_encodings["fs"]),
+                encoding=_encodings["content"],
+            ) as _file:
+                for line in _file.readlines():
+                    line = line.strip()
+                    if line.startswith("#"):
+                        continue
+                    elif line.startswith("include"):
+                        include_line = line.split()[1:]
+                        for included in include_line:
+                            if not included.startswith("/"):
+                                path = os.path.join(os.path.dirname(conf), included)
+                            else:
+                                path = included
+
+                            to_parse.update(glob.glob(path))
+                    else:
+                        lib_dirs.add(line)
+        except EnvironmentError:
+            logger.warn("\t" + yellow("Error when parsing file %s" % conf))
+
+    if visited is None:
+        visited = set()
+
+    visited.update(conf_file)
+    to_parse = to_parse.difference(visited)
+    if to_parse:
+        lib_dirs.update(parse_conf(to_parse, visited, logger=logger))
+
+    return lib_dirs
 
 
 def prepare_search_dirs(logger, settings):
-	''' Lookup for search dirs. Returns tuple with two lists,
-		(list_of_bin_dirs, list_of_lib_dirs)
-	'''
-
-	bin_dirs = set(['/bin', '/usr/bin', ])
-	lib_dirs = set(['/lib', '/usr/lib', ])
-
-	#try:
-	with open(_unicode_encode(os.path.join(
-		portage.root, settings['DEFAULT_ENV_FILE']),
-		encoding=_encodings['fs']), mode='r',
-		encoding=_encodings['content']) as _file:
-		for line in _file.readlines():
-			line = line.strip()
-			match = re.match(r"^export (ROOT)?PATH='([^']+)'", line)
-			if match is not None:
-				bin_dirs.update(set(match.group(2).split(':')))
-	#except EnvironmentError:
-		#logger.debug('\t' + yellow('Could not open file %s' % f))
-
-	lib_dirs = parse_conf(settings['DEFAULT_LD_FILE'], logger=logger)
-	return (bin_dirs, lib_dirs)
-
+    """Lookup for search dirs. Returns tuple with two lists,
+    (list_of_bin_dirs, list_of_lib_dirs)
+    """
+
+    bin_dirs = set(
+        [
+            "/bin",
+            "/usr/bin",
+        ]
+    )
+    lib_dirs = set(
+        [
+            "/lib",
+            "/usr/lib",
+        ]
+    )
+
+    # try:
+    with open(
+        _unicode_encode(
+            os.path.join(portage.root, settings["DEFAULT_ENV_FILE"]),
+            encoding=_encodings["fs"],
+        ),
+        mode="r",
+        encoding=_encodings["content"],
+    ) as _file:
+        for line in _file.readlines():
+            line = line.strip()
+            match = re.match(r"^export (ROOT)?PATH='([^']+)'", line)
+            if match is not None:
+                bin_dirs.update(set(match.group(2).split(":")))
+    # except EnvironmentError:
+    # logger.debug('\t' + yellow('Could not open file %s' % f))
+
+    lib_dirs = parse_conf(settings["DEFAULT_LD_FILE"], logger=logger)
+    return (bin_dirs, lib_dirs)
 
 
 def collect_libraries_from_dir(dirs, mask, logger):
-	''' Collects all libraries from specified list of directories.
-		mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory
-		Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair
-		(symlink_id, library_id) for resolving dependencies
-	'''
-
-	# contains list of directories found
-	# allows us to reduce number of fnc calls
-	found_directories = set()
-	found_files = set()
-	found_symlinks = set()
-	found_la_files = set() # la libraries
-
-	for _dir in dirs:
-		if _dir in mask:
-			continue
-
-		try:
-			for _listing in os.listdir(_dir):
-				listing = os.path.join(_dir, _listing)
-				if listing in mask or _listing in mask:
-					continue
-
-				if os.path.isdir(listing):
-					if os.path.islink(listing):
-						#we do not want scan symlink-directories
-						pass
-					else:
-						found_directories.add(listing)
-				elif os.path.isfile(listing):
-					if (listing.endswith('.so') or
-						listing.endswith('.a') or
-						'.so.' in listing
-						):
-
-						if os.path.islink(listing):
-							found_symlinks.add(listing)
-						else:
-							found_files.add(listing)
-						continue
-					elif listing.endswith('.la'):
-						if listing in found_la_files:
-							continue
-
-						found_la_files.add(listing)
-					else:
-						# sometimes there are binaries in libs' subdir,
-						# for example in nagios
-						if not os.path.islink(listing):
-							#if listing in found_files or listing in found_symlinks:
-								#continue
-							prv = os.stat(listing)[stat.ST_MODE]
-							if prv & stat.S_IXUSR == stat.S_IXUSR or \
-									prv & stat.S_IXGRP == stat.S_IXGRP or \
-									prv & stat.S_IXOTH == stat.S_IXOTH:
-								found_files.add(listing)
-		except Exception as ex:
-			logger.debug('\t' +
-				yellow('Exception collecting libraries: ' +
-				blue('%s')  %str(ex)))
-
-	if found_directories:
-		_file, la_file, link = \
-			collect_libraries_from_dir(found_directories, mask, logger)
-		found_files.update(_file)
-		found_la_files.update(la_file)
-		found_symlinks.update(link)
-	return (found_files, found_la_files, found_symlinks)
+    """Collects all libraries from specified list of directories.
+    mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory
+    Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair
+    (symlink_id, library_id) for resolving dependencies
+    """
+
+    # contains list of directories found
+    # allows us to reduce number of fnc calls
+    found_directories = set()
+    found_files = set()
+    found_symlinks = set()
+    found_la_files = set()  # la libraries
+
+    for _dir in dirs:
+        if _dir in mask:
+            continue
+
+        try:
+            for _listing in os.listdir(_dir):
+                listing = os.path.join(_dir, _listing)
+                if listing in mask or _listing in mask:
+                    continue
+
+                if os.path.isdir(listing):
+                    if os.path.islink(listing):
+                        # we do not want scan symlink-directories
+                        pass
+                    else:
+                        found_directories.add(listing)
+                elif os.path.isfile(listing):
+                    if (
+                        listing.endswith(".so")
+                        or listing.endswith(".a")
+                        or ".so." in listing
+                    ):
+
+                        if os.path.islink(listing):
+                            found_symlinks.add(listing)
+                        else:
+                            found_files.add(listing)
+                        continue
+                    elif listing.endswith(".la"):
+                        if listing in found_la_files:
+                            continue
+
+                        found_la_files.add(listing)
+                    else:
+                        # sometimes there are binaries in libs' subdir,
+                        # for example in nagios
+                        if not os.path.islink(listing):
+                            # if listing in found_files or listing in found_symlinks:
+                            # continue
+                            prv = os.stat(listing)[stat.ST_MODE]
+                            if (
+                                prv & stat.S_IXUSR == stat.S_IXUSR
+                                or prv & stat.S_IXGRP == stat.S_IXGRP
+                                or prv & stat.S_IXOTH == stat.S_IXOTH
+                            ):
+                                found_files.add(listing)
+        except Exception as ex:
+            logger.debug(
+                "\t" + yellow("Exception collecting libraries: " + blue("%s") % str(ex))
+            )
+
+    if found_directories:
+        _file, la_file, link = collect_libraries_from_dir(
+            found_directories, mask, logger
+        )
+        found_files.update(_file)
+        found_la_files.update(la_file)
+        found_symlinks.update(link)
+    return (found_files, found_la_files, found_symlinks)
 
 
 def collect_binaries_from_dir(dirs, mask, logger):
-	''' Collects all binaries from specified list of directories.
-		mask is list of pathes, that are ommited in scanning,
-		can be eighter single file or entire directory
-		Returns list of binaries
-	'''
-
-	# contains list of directories found
-	# allows us to reduce number of fnc calls
-	found_directories = set()
-	found_files = set()
-
-	for _dir in dirs:
-		if _dir in mask:
-			continue
-
-		try:
-			for _listing in os.listdir(_dir):
-				listing = os.path.join(_dir, _listing)
-				if listing in mask or _listing in mask:
-					continue
-
-				if os.path.isdir(listing):
-					if os.path.islink(listing):
-						#we do not want scan symlink-directories
-						pass
-					else:
-						found_directories.add(listing)
-				elif os.path.isfile(listing):
-					# we're looking for binaries
-					# and with binaries we do not need links
-					# thus we can optimize a bit
-					if not os.path.islink(listing):
-						prv = os.stat(listing)[stat.ST_MODE]
-						if prv & stat.S_IXUSR == stat.S_IXUSR or \
-								prv & stat.S_IXGRP == stat.S_IXGRP or \
-								prv & stat.S_IXOTH == stat.S_IXOTH:
-							found_files.add(listing)
-		except Exception as ex:
-			logger.debug('\t' +
-				yellow('Exception during binaries collecting: '+
-				blue('%s') %str(ex)))
-
-	if found_directories:
-		found_files.update(collect_binaries_from_dir(found_directories, mask, logger))
-
-	return found_files
-
-
-
-if __name__ == '__main__':
-	import logging
-	bin_dirs, lib_dirs = prepare_search_dirs(logging)
-
-	masked_dirs, masked_files, ld = parse_revdep_config()
-	lib_dirs.update(ld)
-	bin_dirs.update(ld)
-	masked_dirs.update(
-		set([
-			'/lib/modules',
-			'/lib32/modules',
-			'/lib64/modules',
-		])
-	)
-
-	libraries, la_libraries, libraries_links = \
-		collect_libraries_from_dir(lib_dirs, masked_dirs, logging)
-	binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging)
-
-	logging.debug(
-		'Found: %i binaries and %i libraries.' %(
-		len(binaries), len(libraries)))
-
-
-
+    """Collects all binaries from specified list of directories.
+    mask is list of pathes, that are ommited in scanning,
+    can be eighter single file or entire directory
+    Returns list of binaries
+    """
+
+    # contains list of directories found
+    # allows us to reduce number of fnc calls
+    found_directories = set()
+    found_files = set()
+
+    for _dir in dirs:
+        if _dir in mask:
+            continue
+
+        try:
+            for _listing in os.listdir(_dir):
+                listing = os.path.join(_dir, _listing)
+                if listing in mask or _listing in mask:
+                    continue
+
+                if os.path.isdir(listing):
+                    if os.path.islink(listing):
+                        # we do not want scan symlink-directories
+                        pass
+                    else:
+                        found_directories.add(listing)
+                elif os.path.isfile(listing):
+                    # we're looking for binaries
+                    # and with binaries we do not need links
+                    # thus we can optimize a bit
+                    if not os.path.islink(listing):
+                        prv = os.stat(listing)[stat.ST_MODE]
+                        if (
+                            prv & stat.S_IXUSR == stat.S_IXUSR
+                            or prv & stat.S_IXGRP == stat.S_IXGRP
+                            or prv & stat.S_IXOTH == stat.S_IXOTH
+                        ):
+                            found_files.add(listing)
+        except Exception as ex:
+            logger.debug(
+                "\t"
+                + yellow(
+                    "Exception during binaries collecting: " + blue("%s") % str(ex)
+                )
+            )
+
+    if found_directories:
+        found_files.update(collect_binaries_from_dir(found_directories, mask, logger))
+
+    return found_files
+
+
+if __name__ == "__main__":
+    import logging
+
+    bin_dirs, lib_dirs = prepare_search_dirs(logging)
+
+    masked_dirs, masked_files, ld = parse_revdep_config()
+    lib_dirs.update(ld)
+    bin_dirs.update(ld)
+    masked_dirs.update(
+        set(
+            [
+                "/lib/modules",
+                "/lib32/modules",
+                "/lib64/modules",
+            ]
+        )
+    )
+
+    libraries, la_libraries, libraries_links = collect_libraries_from_dir(
+        lib_dirs, masked_dirs, logging
+    )
+    binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging)
+
+    logging.debug(
+        "Found: %i binaries and %i libraries." % (len(binaries), len(libraries))
+    )

diff --git a/pym/gentoolkit/revdep_rebuild/rebuild.py b/pym/gentoolkit/revdep_rebuild/rebuild.py
index 4109c4f..75e209d 100644
--- a/pym/gentoolkit/revdep_rebuild/rebuild.py
+++ b/pym/gentoolkit/revdep_rebuild/rebuild.py
@@ -19,6 +19,7 @@ import sys
 import logging
 import subprocess
 import time
+
 current_milli_time = lambda: int(round(time.time() * 1000))
 
 
@@ -40,136 +41,149 @@ __productname__ = "revdep-ng"
 
 # functions
 
-def init_logger(settings):
-	"""Creates and iitializes our logger according to the settings"""
-	logger = logging.getLogger()
-	log_handler = logging.StreamHandler(sys.stdout)
-	log_fmt = logging.Formatter('%(msg)s')
-	log_handler.setFormatter(log_fmt)
-	logger.addHandler(log_handler)
-	if settings['quiet']:
-		logger.setLevel(logging.ERROR)
-	elif settings['VERBOSITY'] == 2:
-		logger.setLevel(logging.INFO)
-	elif settings['VERBOSITY'] == 3 or settings['debug']:
-		logger.setLevel(logging.DEBUG)
-	else:
-		logger.setLevel(logging.WARNING)
-	return logger
-
 
+def init_logger(settings):
+    """Creates and iitializes our logger according to the settings"""
+    logger = logging.getLogger()
+    log_handler = logging.StreamHandler(sys.stdout)
+    log_fmt = logging.Formatter("%(msg)s")
+    log_handler.setFormatter(log_fmt)
+    logger.addHandler(log_handler)
+    if settings["quiet"]:
+        logger.setLevel(logging.ERROR)
+    elif settings["VERBOSITY"] == 2:
+        logger.setLevel(logging.INFO)
+    elif settings["VERBOSITY"] == 3 or settings["debug"]:
+        logger.setLevel(logging.DEBUG)
+    else:
+        logger.setLevel(logging.WARNING)
+    return logger
 
 
 def rebuild(logger, assigned, settings):
-	"""rebuilds the assigned pkgs"""
-
-	args = list(settings['pass_through_options'])
-	if settings['EXACT']:
-		_assigned = filter_masked(assigned, logger)
-		emerge_command = ['='+a for a in _assigned]
-	else:
-		_assigned = get_slotted_cps(assigned, logger)
-		emerge_command = [a for a in _assigned]
-	if settings['PRETEND']:
-		args.append('--pretend')
-	if settings['VERBOSITY'] >= 2:
-		args.append('--verbose')
-	elif settings['VERBOSITY'] < 1:
-		args.append('--quiet')
-	if settings['nocolor']:
-		args.extend(['--color', 'n'])
-
-	if len(emerge_command) == 0:
-		logger.warning(bold('\nThere is nothing to emerge. Exiting.'))
-		return 0
-
-	logger.warning(yellow(
-		'\nemerge') +  ' ' + ' '.join(args) +
-		' --oneshot --complete-graph=y ' +
-		bold(' '.join(emerge_command)))
-
-	stime = current_milli_time()
-	_args = ['emerge'] + args + ['--oneshot', '--complete-graph=y'] + emerge_command
-	success = subprocess.call(_args)
-	ftime = current_milli_time()
-	logger.debug("\trebuild(); emerge call for %d ebuilds took: %s seconds"
-		% (len(_assigned), str((ftime-stime)/1000.0)))
-	return success
+    """rebuilds the assigned pkgs"""
+
+    args = list(settings["pass_through_options"])
+    if settings["EXACT"]:
+        _assigned = filter_masked(assigned, logger)
+        emerge_command = ["=" + a for a in _assigned]
+    else:
+        _assigned = get_slotted_cps(assigned, logger)
+        emerge_command = [a for a in _assigned]
+    if settings["PRETEND"]:
+        args.append("--pretend")
+    if settings["VERBOSITY"] >= 2:
+        args.append("--verbose")
+    elif settings["VERBOSITY"] < 1:
+        args.append("--quiet")
+    if settings["nocolor"]:
+        args.extend(["--color", "n"])
+
+    if len(emerge_command) == 0:
+        logger.warning(bold("\nThere is nothing to emerge. Exiting."))
+        return 0
+
+    logger.warning(
+        yellow("\nemerge")
+        + " "
+        + " ".join(args)
+        + " --oneshot --complete-graph=y "
+        + bold(" ".join(emerge_command))
+    )
+
+    stime = current_milli_time()
+    _args = ["emerge"] + args + ["--oneshot", "--complete-graph=y"] + emerge_command
+    success = subprocess.call(_args)
+    ftime = current_milli_time()
+    logger.debug(
+        "\trebuild(); emerge call for %d ebuilds took: %s seconds"
+        % (len(_assigned), str((ftime - stime) / 1000.0))
+    )
+    return success
 
 
 def main(settings=None, logger=None):
-	"""Main program operation method....
-
-	@param settings: dict.  defaults to settings.DEFAULTS
-	@param logger: python logging module defaults to init_logger(settings)
-	@return boolean  success/failure
-	"""
-	if settings is None:
-		print("NO Input settings, using defaults...")
-		settings = DEFAULTS.copy()
-
-	if logger is None:
-		logger = init_logger(settings)
-
-	_libs_to_check = settings['library']
-
-	if not settings['stdout'].isatty() or settings['nocolor']:
-		nocolor()
-
-	logger.warning(blue(' * ') +
-		yellow('This is the new python coded version'))
-	logger.warning(blue(' * ') +
-		yellow('Please report any bugs found using it.'))
-	logger.warning(blue(' * ') +
-		yellow('The original revdep-rebuild script is '
-			'installed as revdep-rebuild.sh'))
-	logger.warning(blue(' * ') +
-		yellow('Please file bugs at: '
-			'https://bugs.gentoo.org/'))
-
-	if os.getuid() != 0 and not settings['PRETEND']:
-		logger.warning(blue(' * ') +
-			yellow('You are not root, adding --pretend to portage options'))
-		settings['PRETEND'] = True
-
-	logger.debug("\tmain(), _libs_to_check = %s" % str(_libs_to_check))
-
-	if settings['USE_TMP_FILES'] \
-			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
-		libraries, la_libraries, libraries_links, binaries = read_cache(
-			settings['DEFAULT_TMP_DIR'])
-		assigned, orphaned = analyse(
-			settings=settings,
-			logger=logger,
-			libraries=libraries,
-			la_libraries=la_libraries,
-			libraries_links=libraries_links,
-			binaries=binaries,
-			_libs_to_check=_libs_to_check)
-	else:
-		assigned, orphaned = analyse(settings, logger, _libs_to_check=_libs_to_check)
-
-	if not assigned and not orphaned:
-		logger.warning('\n' + bold('Your system is consistent'))
-		# return the correct exit code
-		return 0
-	elif orphaned:
-		# blank line for beter visibility of the following lines
-		logger.warning('')
-		if settings['library']:
-			logger.warning(red(' !!! Dependant orphaned files: ') +
-				bold('No installed package was found for the following:'))
-		else:
-			logger.warning(red(' !!! Broken orphaned files: ') +
-				bold('No installed package was found for the following:'))
-		for filename in orphaned:
-			logger.warning(red('\t* ') + filename)
-
-	success = rebuild(logger, assigned, settings)
-	logger.debug("rebuild return code = %i" %success)
-	return success
-
-
-if __name__ == '__main__':
-	main(parse_options())
-
+    """Main program operation method....
+
+    @param settings: dict.  defaults to settings.DEFAULTS
+    @param logger: python logging module defaults to init_logger(settings)
+    @return boolean  success/failure
+    """
+    if settings is None:
+        print("NO Input settings, using defaults...")
+        settings = DEFAULTS.copy()
+
+    if logger is None:
+        logger = init_logger(settings)
+
+    _libs_to_check = settings["library"]
+
+    if not settings["stdout"].isatty() or settings["nocolor"]:
+        nocolor()
+
+    logger.warning(blue(" * ") + yellow("This is the new python coded version"))
+    logger.warning(blue(" * ") + yellow("Please report any bugs found using it."))
+    logger.warning(
+        blue(" * ")
+        + yellow(
+            "The original revdep-rebuild script is " "installed as revdep-rebuild.sh"
+        )
+    )
+    logger.warning(
+        blue(" * ") + yellow("Please file bugs at: " "https://bugs.gentoo.org/")
+    )
+
+    if os.getuid() != 0 and not settings["PRETEND"]:
+        logger.warning(
+            blue(" * ")
+            + yellow("You are not root, adding --pretend to portage options")
+        )
+        settings["PRETEND"] = True
+
+    logger.debug("\tmain(), _libs_to_check = %s" % str(_libs_to_check))
+
+    if settings["USE_TMP_FILES"] and check_temp_files(
+        settings["DEFAULT_TMP_DIR"], logger=logger
+    ):
+        libraries, la_libraries, libraries_links, binaries = read_cache(
+            settings["DEFAULT_TMP_DIR"]
+        )
+        assigned, orphaned = analyse(
+            settings=settings,
+            logger=logger,
+            libraries=libraries,
+            la_libraries=la_libraries,
+            libraries_links=libraries_links,
+            binaries=binaries,
+            _libs_to_check=_libs_to_check,
+        )
+    else:
+        assigned, orphaned = analyse(settings, logger, _libs_to_check=_libs_to_check)
+
+    if not assigned and not orphaned:
+        logger.warning("\n" + bold("Your system is consistent"))
+        # return the correct exit code
+        return 0
+    elif orphaned:
+        # blank line for beter visibility of the following lines
+        logger.warning("")
+        if settings["library"]:
+            logger.warning(
+                red(" !!! Dependant orphaned files: ")
+                + bold("No installed package was found for the following:")
+            )
+        else:
+            logger.warning(
+                red(" !!! Broken orphaned files: ")
+                + bold("No installed package was found for the following:")
+            )
+        for filename in orphaned:
+            logger.warning(red("\t* ") + filename)
+
+    success = rebuild(logger, assigned, settings)
+    logger.debug("rebuild return code = %i" % success)
+    return success
+
+
+if __name__ == "__main__":
+    main(parse_options())

diff --git a/pym/gentoolkit/revdep_rebuild/runner.py b/pym/gentoolkit/revdep_rebuild/runner.py
index 24411a5..5dd5c33 100644
--- a/pym/gentoolkit/revdep_rebuild/runner.py
+++ b/pym/gentoolkit/revdep_rebuild/runner.py
@@ -5,65 +5,60 @@ import subprocess
 
 
 class ProcessRunner(threading.Thread):
-    '''
+    """
     ProcessRunner is class designed to run arbitrary command
-    in background (separate thread). It's replacement for old 
+    in background (separate thread). It's replacement for old
     stuff.call_program function.
-    
+
     When called program is finished, its output can be accessed
     through .stdout and .stderr fields
-    '''
-    
+    """
+
     def __init__(self, args, autorun=True):
-        '''
+        """
         @param args - program name and its arguments
         @param autorun - if True, then automatically starts new thread
-        ''' 
+        """
 
         threading.Thread.__init__(self)
         self.args = args
         self.lock = threading.Lock()
-        self.stdout = ''
-        self.stderr = ''
-        
+        self.stdout = ""
+        self.stderr = ""
+
         if autorun:
             self.start()
-            
-        
-        
+
     def run(self):
         self.lock.acquire()
-        
-        subp = subprocess.Popen(self.args, stdout=subprocess.PIPE, \
-                                stderr=subprocess.PIPE)
+
+        subp = subprocess.Popen(
+            self.args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+        )
         self.stdout, self.stderr = subp.communicate()
         self.lock.release()
-        
-        
+
     def is_ready(self):
-        ''' Checks whether current command is finished '''
+        """Checks whether current command is finished"""
         return not self.lock.locked()
-    
-    
+
     def wait(self):
-        ''' Waits until called program finishes '''
+        """Waits until called program finishes"""
         self.lock.acquire()
         self.lock.release()
 
 
-
-
 class ScanRunner(threading.Thread):
-    '''
-    ScanRunner is a class for calling scanelf in separate 
+    """
+    ScanRunner is a class for calling scanelf in separate
     thread, so several instances could be called at a time,
     and then all results could be consolidated.
-    
+
     Consolidated output is available through .out
-    '''   
-    
+    """
+
     def __init__(self, params, files, max_args, autorun=True):
-        '''
+        """
         @param params is list of parameters that should be passed into scanelf app.
         @param files list of files to scan.
         @param max_args number of files to process at once
@@ -71,44 +66,42 @@ class ScanRunner(threading.Thread):
 
         When files count is greater CMD_MAX_ARGS, then scanelf will be called
         several times.
-        '''
-        
+        """
+
         threading.Thread.__init__(self)
         self.params = params
         self.files = files
         self.max_args = max_args
-        
+
         self.out = []
         self.lock = threading.Lock()
-        
+
         if autorun:
             self.start()
-            
-            
+
     def run(self):
         self.lock.acquire()
-        
+
         process_pool = []
         for i in range(0, len(self.files), self.max_args):
-            process_pool.append(ProcessRunner(['scanelf'] + self.params + self.files[i:i+self.max_args]))
-                
+            process_pool.append(
+                ProcessRunner(
+                    ["scanelf"] + self.params + self.files[i : i + self.max_args]
+                )
+            )
+
         while process_pool:
             p = process_pool.pop()
             p.wait()
-            self.out += p.stdout.strip().split('\n')
-            
+            self.out += p.stdout.strip().split("\n")
+
         self.lock.release()
-        
-        
+
     def is_ready(self):
-        ''' Checks whether scanning is finished '''
+        """Checks whether scanning is finished"""
         return not self.lock.locked()
-    
-    
+
     def wait(self):
-        ''' Waits until all scanning instances are finished '''
+        """Waits until all scanning instances are finished"""
         self.lock.acquire()
         self.lock.release()
-        
-        
-        
\ No newline at end of file

diff --git a/pym/gentoolkit/revdep_rebuild/settings.py b/pym/gentoolkit/revdep_rebuild/settings.py
index c8d77bc..5551855 100644
--- a/pym/gentoolkit/revdep_rebuild/settings.py
+++ b/pym/gentoolkit/revdep_rebuild/settings.py
@@ -14,150 +14,173 @@ from portage import _encodings, _unicode_encode
 portage_root = str(portage.root)
 
 DEFAULTS = {
-		'DEFAULT_LD_FILE': os.path.join(portage_root, 'etc/ld.so.conf'),
-		'DEFAULT_ENV_FILE': os.path.join(portage_root, 'etc/profile.env'),
-		'REVDEP_CONFDIR': os.path.join(portage_root, 'etc/revdep-rebuild/'),
-		'PKG_DIR': os.path.join(portage_root, 'var/db/pkg/'),
-		'DEFAULT_TMP_DIR': os.path.join(portage_root, '/tmp/revdep-rebuild' if os.getgid() else '/var/cache/revdep-rebuild'), #cache default location
-
-		# number of maximum allowed files to be parsed at once
-		'CMD_MAX_ARGS': 1000,
-
-		'PRETEND': False,     #pretend only
-		'EXACT': False,      #exact package version
-		#if program should use temporary files from previous run
-		'USE_TMP_FILES': True,
-
-		'VERBOSITY': 1,
-
-		'quiet': False,
-		'nocolor': False,
-		'library': set(),
-		'no-progress': False,
-		'debug': False,
-		'no-ld-path': False,
-		'no-order': False,
-		'pass_through_options': [],
-		'stdout': sys.stdout,
-		'stdin': sys.stdin,
-		'stderr': sys.stderr
-		}
+    "DEFAULT_LD_FILE": os.path.join(portage_root, "etc/ld.so.conf"),
+    "DEFAULT_ENV_FILE": os.path.join(portage_root, "etc/profile.env"),
+    "REVDEP_CONFDIR": os.path.join(portage_root, "etc/revdep-rebuild/"),
+    "PKG_DIR": os.path.join(portage_root, "var/db/pkg/"),
+    "DEFAULT_TMP_DIR": os.path.join(
+        portage_root,
+        "/tmp/revdep-rebuild" if os.getgid() else "/var/cache/revdep-rebuild",
+    ),  # cache default location
+    # number of maximum allowed files to be parsed at once
+    "CMD_MAX_ARGS": 1000,
+    "PRETEND": False,  # pretend only
+    "EXACT": False,  # exact package version
+    # if program should use temporary files from previous run
+    "USE_TMP_FILES": True,
+    "VERBOSITY": 1,
+    "quiet": False,
+    "nocolor": False,
+    "library": set(),
+    "no-progress": False,
+    "debug": False,
+    "no-ld-path": False,
+    "no-order": False,
+    "pass_through_options": [],
+    "stdout": sys.stdout,
+    "stdin": sys.stdin,
+    "stderr": sys.stderr,
+}
 
 
 def parse_options():
-	"""Parses the command line options an sets settings accordingly"""
-
-	# TODO: Verify: options: no-ld-path, no-order, no-progress
-	#are not appliable
-	from .rebuild import VERSION, APP_NAME
-	settings = DEFAULTS.copy()
-
-	parser = argparse.ArgumentParser(
-		description='Broken reverse dependency rebuilder, python implementation.',
-		epilog='Calls emerge, options after -- are ignored by %s '
-				'and passed directly to emerge.' % APP_NAME,
-		add_help=False
-		)
-
-	parser.add_argument('-h', '--help',
-					 action='help',
-					 help='Print this usage and exit')
-	parser.add_argument('-V', '--version',
-					 action='version',
-					 help='Show version informations',
-					 version='%(prog)s ' + VERSION)
-
-	parser.add_argument('-i', '--ignore',
-					 action='store_true',
-					 help='Ignore temporary files from previous runs '
-						'(also won\'t create any)')
-
-	parser.add_argument('-L', '--library',
-					 action='append',
-					 help='Unconditionally emerge existing packages that use '
-						'the library with NAME. NAME can be a full path, full '
-						'or partial name')
-	parser.add_argument('-l', '--no-ld-path',
-					 action='store_true',
-					 help='Do not set LD_LIBRARY_PATH')
-	parser.add_argument('-o', '--no-order',
-					 action='store_true',
-					 help='Do not check the build order '
-						'(Saves time, but may cause breakage.)')
-	parser.add_argument('-p', '--pretend',
-					 action='store_true',
-					 help='Do a trial run without actually emerging anything '
-						'(also passed to emerge command)')
-
-	parser.add_argument('-C', '--nocolor',
-					 action='store_true',
-					 help='Turn off colored output')
-	parser.add_argument('-q', '--quiet',
-					 action='store_true',
-					 help='Be less verbose (also passed to emerge command)')
-	parser.add_argument('-v', '--verbose',
-					 action='store_true',
-					 help='Be more verbose (also passed to emerge command)')
-	parser.add_argument('-d', '--debug',
-					 action='store_true',
-					 help='Print debug informations')
-
-	parser.add_argument('portage_options', nargs='*')
-
-	args = parser.parse_args()
-	settings['VERBOSITY'] = 3 if args.debug else 2 if args.verbose else 0 if args.quiet else 1
-	settings['quiet'] = args.quiet
-	settings['PRETEND'] = args.pretend
-	settings['nocolor'] = args.nocolor
-	if args.library:
-		settings['library'] = set(settings['library']) | set(args.library)
-	settings['USE_TMP_FILES'] = not args.ignore
-	settings['pass_through_options'] = list(settings['pass_through_options']) + args.portage_options
-
-	return settings
+    """Parses the command line options an sets settings accordingly"""
+
+    # TODO: Verify: options: no-ld-path, no-order, no-progress
+    # are not appliable
+    from .rebuild import VERSION, APP_NAME
+
+    settings = DEFAULTS.copy()
+
+    parser = argparse.ArgumentParser(
+        description="Broken reverse dependency rebuilder, python implementation.",
+        epilog="Calls emerge, options after -- are ignored by %s "
+        "and passed directly to emerge." % APP_NAME,
+        add_help=False,
+    )
+
+    parser.add_argument("-h", "--help", action="help", help="Print this usage and exit")
+    parser.add_argument(
+        "-V",
+        "--version",
+        action="version",
+        help="Show version informations",
+        version="%(prog)s " + VERSION,
+    )
+
+    parser.add_argument(
+        "-i",
+        "--ignore",
+        action="store_true",
+        help="Ignore temporary files from previous runs " "(also won't create any)",
+    )
+
+    parser.add_argument(
+        "-L",
+        "--library",
+        action="append",
+        help="Unconditionally emerge existing packages that use "
+        "the library with NAME. NAME can be a full path, full "
+        "or partial name",
+    )
+    parser.add_argument(
+        "-l", "--no-ld-path", action="store_true", help="Do not set LD_LIBRARY_PATH"
+    )
+    parser.add_argument(
+        "-o",
+        "--no-order",
+        action="store_true",
+        help="Do not check the build order " "(Saves time, but may cause breakage.)",
+    )
+    parser.add_argument(
+        "-p",
+        "--pretend",
+        action="store_true",
+        help="Do a trial run without actually emerging anything "
+        "(also passed to emerge command)",
+    )
+
+    parser.add_argument(
+        "-C", "--nocolor", action="store_true", help="Turn off colored output"
+    )
+    parser.add_argument(
+        "-q",
+        "--quiet",
+        action="store_true",
+        help="Be less verbose (also passed to emerge command)",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="store_true",
+        help="Be more verbose (also passed to emerge command)",
+    )
+    parser.add_argument(
+        "-d", "--debug", action="store_true", help="Print debug informations"
+    )
+
+    parser.add_argument("portage_options", nargs="*")
+
+    args = parser.parse_args()
+    settings["VERBOSITY"] = (
+        3 if args.debug else 2 if args.verbose else 0 if args.quiet else 1
+    )
+    settings["quiet"] = args.quiet
+    settings["PRETEND"] = args.pretend
+    settings["nocolor"] = args.nocolor
+    if args.library:
+        settings["library"] = set(settings["library"]) | set(args.library)
+    settings["USE_TMP_FILES"] = not args.ignore
+    settings["pass_through_options"] = (
+        list(settings["pass_through_options"]) + args.portage_options
+    )
+
+    return settings
 
 
 def _parse_dirs_to_set(dir_str):
-	'''Changes space-delimited directory list into set with them
-	'''
-	_ret = set()
-	for search in dir_str.split():
-		if search == '-*':
-			break
-		_ret.update(glob.glob(search))
-	return _ret
+    """Changes space-delimited directory list into set with them"""
+    _ret = set()
+    for search in dir_str.split():
+        if search == "-*":
+            break
+        _ret.update(glob.glob(search))
+    return _ret
 
 
 def parse_revdep_config(revdep_confdir):
-	''' Parses all files under and returns
-		tuple of: (masked_dirs, masked_files, search_dirs)'''
-
-	search_dirs = os.environ.get('SEARCH_DIRS', '')
-	masked_dirs = os.environ.get('SEARCH_DIRS_MASK', '')
-	masked_files = os.environ.get('LD_LIBRARY_MASK', '')
-
-	for _file in os.listdir(revdep_confdir):
-		for line in open(_unicode_encode(os.path.join(revdep_confdir, _file),
-				encoding=_encodings['fs']), encoding=_encodings['content']):
-			line = line.strip()
-			#first check for comment, we do not want to regex all lines
-			if not line.startswith('#'):
-				match = re.match(r'LD_LIBRARY_MASK=\"([^"]+)"', line)
-				if match is not None:
-					masked_files += ' ' + match.group(1)
-					continue
-				match = re.match(r'SEARCH_DIRS_MASK=\"([^"]+)"', line)
-				if match is not None:
-					masked_dirs += ' ' + match.group(1)
-					continue
-				match = re.match(r'SEARCH_DIRS="([^"]+)"', line)
-				if match is not None:
-					search_dirs += ' ' + match.group(1)
-					continue
-
-	masked_files = set(masked_files.split(' '))
-	masked_dirs = _parse_dirs_to_set(masked_dirs)
-	search_dirs = _parse_dirs_to_set(search_dirs)
-
-	return (masked_dirs, masked_files, search_dirs)
-
+    """Parses all files under and returns
+    tuple of: (masked_dirs, masked_files, search_dirs)"""
+
+    search_dirs = os.environ.get("SEARCH_DIRS", "")
+    masked_dirs = os.environ.get("SEARCH_DIRS_MASK", "")
+    masked_files = os.environ.get("LD_LIBRARY_MASK", "")
+
+    for _file in os.listdir(revdep_confdir):
+        for line in open(
+            _unicode_encode(
+                os.path.join(revdep_confdir, _file), encoding=_encodings["fs"]
+            ),
+            encoding=_encodings["content"],
+        ):
+            line = line.strip()
+            # first check for comment, we do not want to regex all lines
+            if not line.startswith("#"):
+                match = re.match(r'LD_LIBRARY_MASK=\"([^"]+)"', line)
+                if match is not None:
+                    masked_files += " " + match.group(1)
+                    continue
+                match = re.match(r'SEARCH_DIRS_MASK=\"([^"]+)"', line)
+                if match is not None:
+                    masked_dirs += " " + match.group(1)
+                    continue
+                match = re.match(r'SEARCH_DIRS="([^"]+)"', line)
+                if match is not None:
+                    search_dirs += " " + match.group(1)
+                    continue
+
+    masked_files = set(masked_files.split(" "))
+    masked_dirs = _parse_dirs_to_set(masked_dirs)
+    search_dirs = _parse_dirs_to_set(search_dirs)
+
+    return (masked_dirs, masked_files, search_dirs)

diff --git a/pym/gentoolkit/revdep_rebuild/stuff.py b/pym/gentoolkit/revdep_rebuild/stuff.py
index 432dc8f..eee90c8 100644
--- a/pym/gentoolkit/revdep_rebuild/stuff.py
+++ b/pym/gentoolkit/revdep_rebuild/stuff.py
@@ -11,95 +11,103 @@ from portage.output import green, red
 
 # util. functions
 def call_program(args):
-	''' Calls program with specified parameters
-	and returns the stdout as a str object.
+    """Calls program with specified parameters
+    and returns the stdout as a str object.
 
-	@param, args: arument list to pass to subprocess
-	@return str
-	'''
-	args = [arg if isinstance(arg, bytes) else arg.encode('utf-8') for arg in args]
-	subp = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-	stdout, stderr = subp.communicate()
-	stdout = stdout.decode('utf-8')
-	return stdout
+    @param, args: arument list to pass to subprocess
+    @return str
+    """
+    args = [arg if isinstance(arg, bytes) else arg.encode("utf-8") for arg in args]
+    subp = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    stdout, stderr = subp.communicate()
+    stdout = stdout.decode("utf-8")
+    return stdout
 
 
 def scan(params, files, max_args, logger):
-	''' Calls scanelf with given params and files to scan.
-		@param params is list of parameters that should
-			be passed into scanelf app.
-		@param files list of files to scan.
-		@param max_args number of files to process at once
-
-		When files count is greater CMD_MAX_ARGS, it'll be divided
-		into several parts
-
-		@return scanelf output (joined if was called several times)
-	'''
-	logger.debug("\tscan(), scanelf params = %s, # files: %d" % (params, len(files)))
-	# change it to a sorted list for group processing
-	_files = sorted(files)
-	out = []
-	for i in range(0, len(_files), max_args):
-		output = call_program(
-			['scanelf'] + params + _files[i:i+max_args]).strip().split('\n')
-		output = [x for x in output if x != '']
-		if output:
-			out.extend(output)
-	logger.debug("\tscan(), final output length: %d" % len(out))
-	return out
+    """Calls scanelf with given params and files to scan.
+    @param params is list of parameters that should
+            be passed into scanelf app.
+    @param files list of files to scan.
+    @param max_args number of files to process at once
+
+    When files count is greater CMD_MAX_ARGS, it'll be divided
+    into several parts
+
+    @return scanelf output (joined if was called several times)
+    """
+    logger.debug("\tscan(), scanelf params = %s, # files: %d" % (params, len(files)))
+    # change it to a sorted list for group processing
+    _files = sorted(files)
+    out = []
+    for i in range(0, len(_files), max_args):
+        output = (
+            call_program(["scanelf"] + params + _files[i : i + max_args])
+            .strip()
+            .split("\n")
+        )
+        output = [x for x in output if x != ""]
+        if output:
+            out.extend(output)
+    logger.debug("\tscan(), final output length: %d" % len(out))
+    return out
 
 
 def get_masking_status(ebuild):
-	"""returns the masking status of an ebuild
+    """returns the masking status of an ebuild
 
-	@param ebuild: str
-	@return list
-	"""
-	try:
-		status = portage.getmaskingstatus(ebuild)
-	except KeyError:
-		status = ['unavailable']
-	return status
+    @param ebuild: str
+    @return list
+    """
+    try:
+        status = portage.getmaskingstatus(ebuild)
+    except KeyError:
+        status = ["unavailable"]
+    return status
 
 
 def _match_str_in_list(lst, stri):
-	"""
-	@param lst: list
-	@param stri: string
-	@return boolean or list menber that matches stri.endswith(member)
-	"""
-	for item in lst:
-		if stri.endswith(item):
-			return item
-	return False
+    """
+    @param lst: list
+    @param stri: string
+    @return boolean or list menber that matches stri.endswith(member)
+    """
+    for item in lst:
+        if stri.endswith(item):
+            return item
+    return False
 
 
 def filter_masked(assigned, logger):
-	'''Filter out masked pkgs/ebuilds'''
-
-	def is_masked(ebuild):
-		if get_masking_status(ebuild):
-			logger.warning(' !!! ' + red('All ebuilds that could satisfy: ') +
-				green(ebuild) + red(' have been masked'))
-			return True
-		return False
-
-	has_masked = False
-	tmp = []
-	for ebuild in assigned:
-		if not is_masked(ebuild):
-			tmp.append(ebuild)
-		else:
-			has_masked = True
-	if has_masked:
-		logger.info('\t' + red('* ') +
-			'Unmask all ebuild(s) listed above and call revdep-rebuild '
-			'again or manually emerge given packages.')
-	return tmp
-
-
-
-
-if __name__ == '__main__':
-	print("There is nothing to run here.")
+    """Filter out masked pkgs/ebuilds"""
+
+    def is_masked(ebuild):
+        if get_masking_status(ebuild):
+            logger.warning(
+                " !!! "
+                + red("All ebuilds that could satisfy: ")
+                + green(ebuild)
+                + red(" have been masked")
+            )
+            return True
+        return False
+
+    has_masked = False
+    tmp = []
+    for ebuild in assigned:
+        if not is_masked(ebuild):
+            tmp.append(ebuild)
+        else:
+            has_masked = True
+    if has_masked:
+        logger.info(
+            "\t"
+            + red("* ")
+            + "Unmask all ebuild(s) listed above and call revdep-rebuild "
+            "again or manually emerge given packages."
+        )
+    return tmp
+
+
+if __name__ == "__main__":
+    print("There is nothing to run here.")

diff --git a/pym/gentoolkit/sets.py b/pym/gentoolkit/sets.py
index d130603..4fd5ff4 100644
--- a/pym/gentoolkit/sets.py
+++ b/pym/gentoolkit/sets.py
@@ -4,54 +4,61 @@
 
 """Provides access to Portage sets api"""
 
-__docformat__ = 'epytext'
+__docformat__ = "epytext"
 
 import portage
+
 try:
-	# Per commit 25d8427b3b29cbcee97279186983dae818495f8f in portage,
-	# portage.sets is renamed to portage._sets.
-	import portage._sets
-	_sets_available = True
-	SETPREFIX = portage._sets.SETPREFIX
+    # Per commit 25d8427b3b29cbcee97279186983dae818495f8f in portage,
+    # portage.sets is renamed to portage._sets.
+    import portage._sets
+
+    _sets_available = True
+    SETPREFIX = portage._sets.SETPREFIX
 except ImportError:
-	_sets_available = False
-	SETPREFIX = "@"
+    _sets_available = False
+    SETPREFIX = "@"
 
 from gentoolkit import errors
 from gentoolkit.atom import Atom
 
 
 _set_config = None
+
+
 def _init_set_config():
-	global _set_config
-	if _set_config is None:
-		_set_config = portage._sets.load_default_config(
-			portage.settings, portage.db[portage.root])
+    global _set_config
+    if _set_config is None:
+        _set_config = portage._sets.load_default_config(
+            portage.settings, portage.db[portage.root]
+        )
+
 
 def get_available_sets():
-	"""Returns all available sets."""
+    """Returns all available sets."""
+
+    if _sets_available:
+        _init_set_config()
+        return _set_config.getSets()
+    return {}
 
-	if _sets_available:
-		_init_set_config()
-		return _set_config.getSets()
-	return {}
 
 def get_set_atoms(setname):
-	"""Return atoms belonging to the given set
-
-	@type setname: string
-	@param setname: Name of the set
-	@rtype list
-	@return: List of atoms in the given set
-	"""
-
-	if _sets_available:
-		_init_set_config()
-		try:
-			return set([Atom(str(x))
-				for x in _set_config.getSetAtoms(setname)])
-		except portage._sets.PackageSetNotFound:
-			raise errors.GentoolkitSetNotFound(setname)
-	raise errors.GentoolkitSetNotFound(setname)
+    """Return atoms belonging to the given set
+
+    @type setname: string
+    @param setname: Name of the set
+    @rtype list
+    @return: List of atoms in the given set
+    """
+
+    if _sets_available:
+        _init_set_config()
+        try:
+            return set([Atom(str(x)) for x in _set_config.getSetAtoms(setname)])
+        except portage._sets.PackageSetNotFound:
+            raise errors.GentoolkitSetNotFound(setname)
+    raise errors.GentoolkitSetNotFound(setname)
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/test/eclean/creator.py b/pym/gentoolkit/test/eclean/creator.py
index da5123e..7c47cf2 100644
--- a/pym/gentoolkit/test/eclean/creator.py
+++ b/pym/gentoolkit/test/eclean/creator.py
@@ -12,211 +12,242 @@ import random
 import gentoolkit.pprinter as pp
 from portage import _encodings, _unicode_encode
 
-__version__= "0.0.1"
+__version__ = "0.0.1"
 __author__ = "Brian Dolbec"
 __email__ = "brian.dolbec@gmail.com"
 
 
-dir_mode = int('0774', 8)
-file_mode = int('0644', 8)
+dir_mode = int("0774", 8)
+file_mode = int("0644", 8)
 
 
 def make_dir(path):
-	"""create the directory at path
+    """create the directory at path
 
-	@param path: full pathname to create
-	capable of multiple intermediate directory creations.
-	Will Error and exit if the target dir already exits"""
-	try:
-		os.makedirs(path, dir_mode)
-	except EnvironmentError as er:
-		print( pp.error("Error creating path:%s" %path), file=sys.stderr)
-		print( pp.error("Error: %s" %str(er), file=sys.stderr))
-		sys.exit(1)
+    @param path: full pathname to create
+    capable of multiple intermediate directory creations.
+    Will Error and exit if the target dir already exits"""
+    try:
+        os.makedirs(path, dir_mode)
+    except EnvironmentError as er:
+        print(pp.error("Error creating path:%s" % path), file=sys.stderr)
+        print(pp.error("Error: %s" % str(er), file=sys.stderr))
+        sys.exit(1)
 
 
 def make_dist(path, files, clean_dict=None):
-	"""Creates a small fake distfiles/binpkg directory @path populated
-	with generated files of small random sizes using real names from
-	the files list. udates the clean_dict with fullpathname.
-
-	@param path: the path to create the distfiles directory
-	@param files: list of file names to populate "path" with
-	@param clean_dict: dict of {file-key:[path/file-key,],}
-			that will be updated with full file-path-names
-	"""
-	make_dir(path)
-	for file_ in files:
-		size = random.randint(1000,5000)
-		data = "0" * size
-		filepath = os.path.join(path, file_)
-		with open(_unicode_encode(filepath, encoding=_encodings['fs']), 'w', file_mode,
-				encoding=_encodings['content']) as new_file:
-			new_file.write(data)
-		if file_ not in clean_dict:
-			# it is included in a multifile target
-			continue
-		elif clean_dict[file_] == []:
-			clean_dict[file_] = filepath
-		else:
-			file_list = clean_dict[file_]
-			for key in range(len(file_list)):
-				file_list[key] = os.path.join(path, file_list[key])
+    """Creates a small fake distfiles/binpkg directory @path populated
+    with generated files of small random sizes using real names from
+    the files list. udates the clean_dict with fullpathname.
+
+    @param path: the path to create the distfiles directory
+    @param files: list of file names to populate "path" with
+    @param clean_dict: dict of {file-key:[path/file-key,],}
+                    that will be updated with full file-path-names
+    """
+    make_dir(path)
+    for file_ in files:
+        size = random.randint(1000, 5000)
+        data = "0" * size
+        filepath = os.path.join(path, file_)
+        with open(
+            _unicode_encode(filepath, encoding=_encodings["fs"]),
+            "w",
+            file_mode,
+            encoding=_encodings["content"],
+        ) as new_file:
+            new_file.write(data)
+        if file_ not in clean_dict:
+            # it is included in a multifile target
+            continue
+        elif clean_dict[file_] == []:
+            clean_dict[file_] = filepath
+        else:
+            file_list = clean_dict[file_]
+            for key in range(len(file_list)):
+                file_list[key] = os.path.join(path, file_list[key])
 
 
 def make_pkgs(path, files_dict, clean_dict):
-	"""Create a small fake packages directory and call make_dist() to
-	create and populate the category dir & package files
-
-	@param path: the path to create the packages directory
-	@param files_dict: dictionary of {cat: [pkg1, pkg2,...]}
-	"""
-	make_dir(path)
-	for cat in files_dict.keys():
-		make_dist(os.path.join(path,cat),
-			files_dict[cat],
-			clean_dict)
-	# cp the Packages index file to path
-	source = os.path.join(os.path.dirname(__file__), 'Packages')
-	shutil.copy2(source, path)
+    """Create a small fake packages directory and call make_dist() to
+    create and populate the category dir & package files
+
+    @param path: the path to create the packages directory
+    @param files_dict: dictionary of {cat: [pkg1, pkg2,...]}
+    """
+    make_dir(path)
+    for cat in files_dict.keys():
+        make_dist(os.path.join(path, cat), files_dict[cat], clean_dict)
+    # cp the Packages index file to path
+    source = os.path.join(os.path.dirname(__file__), "Packages")
+    shutil.copy2(source, path)
 
 
 def make_symlinks(path, links, targets):
-	"""Create some symlinks at path
+    """Create some symlinks at path
 
-	@param path: the location to create the symlinks at
-	@param links: list of links to create
-	@param targets: list of targets to create links for,
-			and need to be in the same index order as links
-	"""
-	for i in range(len(links)):
-		os.symlink(os.path.join(path,targets[i]),
-			os.path.join(path, links[i]))
+    @param path: the location to create the symlinks at
+    @param links: list of links to create
+    @param targets: list of targets to create links for,
+                    and need to be in the same index order as links
+    """
+    for i in range(len(links)):
+        os.symlink(os.path.join(path, targets[i]), os.path.join(path, links[i]))
 
 
 class TestDirCreation:
     """"""
 
-    distfile_list = ['ExtUtils-ParseXS-2.22.tar.gz',
-        'xorg-server-1.5.3.tar.bz2',
-        'portage-utils-0.2.1.tar.bz2',
-        'sysvinit_2.87dsf.orig.tar.gz',
-        'sysvinit-2.86.tar.gz',
-        'ExtUtils-ParseXS-2.20.tar.gz',
-        'libisofs-0.6.22.tar.gz',
-        'pixman-0.16.0.tar.bz2',
-        'libburn-0.7.2.pl01.tar.gz',
-        'libisofs-0.6.24.tar.gz',
-        'xorg-server-1.5.3-gentoo-patches-08.tar.bz2',
-        'ExtUtils-ParseXS-2.200401.tar.gz',
-        'sysvinit-2.87-patches-2.tar.bz2',
-        'sysvinit-2.86-kexec.patch',
-        'Module-Build-0.3601.tar.gz',
-        'libisofs-0.6.20.tar.gz',
-        'xine-lib-1.1.17.tar.bz2',
-        'pixman-0.14.0.tar.bz2',
-        'Archive-Tar-1.52.tar.gz',
-        'libburn-0.6.8.pl00.tar.gz',
-        'libexif-0.6.17.tar.bz2',
-        'portage-utils-0.3.tar.bz2',
-        'xine-lib-1.1.15-textrel-fix.patch',
-        'Module-Build-0.34.tar.gz',
-        'Archive-Tar-1.54.tar.gz',
-        'pixman-0.16.2.tar.bz2',
-        'libburn-0.7.4.pl00.tar.gz ',
-        'Module-Build-0.340201.tar.gz',
-        'pixman-0.17.2.tar.bz2',
-        'util-macros-1.3.0.tar.bz2',
-        'Module-Build-0.35.tar.gz',
-        'libburn-0.7.2.pl00.tar.gz',
-        'util-macros-1.4.1.tar.bz2',
-        'xine-lib-1.1.16.3.tar.bz2',
-        'sysvinit-2.86-extra.patch',
-        'libburn-0.7.0.pl00.tar.gz',
-        'ExtUtils-ParseXS-2.21.tar.gz',
-        'libexif-0.6.19.tar.bz2',
-        'sysvinit-2.87-patches-1.tar.bz2',
+    distfile_list = [
+        "ExtUtils-ParseXS-2.22.tar.gz",
+        "xorg-server-1.5.3.tar.bz2",
+        "portage-utils-0.2.1.tar.bz2",
+        "sysvinit_2.87dsf.orig.tar.gz",
+        "sysvinit-2.86.tar.gz",
+        "ExtUtils-ParseXS-2.20.tar.gz",
+        "libisofs-0.6.22.tar.gz",
+        "pixman-0.16.0.tar.bz2",
+        "libburn-0.7.2.pl01.tar.gz",
+        "libisofs-0.6.24.tar.gz",
+        "xorg-server-1.5.3-gentoo-patches-08.tar.bz2",
+        "ExtUtils-ParseXS-2.200401.tar.gz",
+        "sysvinit-2.87-patches-2.tar.bz2",
+        "sysvinit-2.86-kexec.patch",
+        "Module-Build-0.3601.tar.gz",
+        "libisofs-0.6.20.tar.gz",
+        "xine-lib-1.1.17.tar.bz2",
+        "pixman-0.14.0.tar.bz2",
+        "Archive-Tar-1.52.tar.gz",
+        "libburn-0.6.8.pl00.tar.gz",
+        "libexif-0.6.17.tar.bz2",
+        "portage-utils-0.3.tar.bz2",
+        "xine-lib-1.1.15-textrel-fix.patch",
+        "Module-Build-0.34.tar.gz",
+        "Archive-Tar-1.54.tar.gz",
+        "pixman-0.16.2.tar.bz2",
+        "libburn-0.7.4.pl00.tar.gz ",
+        "Module-Build-0.340201.tar.gz",
+        "pixman-0.17.2.tar.bz2",
+        "util-macros-1.3.0.tar.bz2",
+        "Module-Build-0.35.tar.gz",
+        "libburn-0.7.2.pl00.tar.gz",
+        "util-macros-1.4.1.tar.bz2",
+        "xine-lib-1.1.16.3.tar.bz2",
+        "sysvinit-2.86-extra.patch",
+        "libburn-0.7.0.pl00.tar.gz",
+        "ExtUtils-ParseXS-2.21.tar.gz",
+        "libexif-0.6.19.tar.bz2",
+        "sysvinit-2.87-patches-1.tar.bz2",
         # now a base pkg with 2 additional symlink targets
-        'symlink-test-1.2.3.tar.bz2',
-        'target-1',
-        'target-2'
-        ]
+        "symlink-test-1.2.3.tar.bz2",
+        "target-1",
+        "target-2",
+    ]
 
-    distfile_symlink = ['symlink-test-1.2.3-symlink1',
-        'symlink-test-1.2.3-symlink2']
+    distfile_symlink = ["symlink-test-1.2.3-symlink1", "symlink-test-1.2.3-symlink2"]
 
     dist_clean = {
-        'Archive-Tar-1.52.tar.gz': [],
-        'ExtUtils-ParseXS-2.20.tar.gz': [],
-        'ExtUtils-ParseXS-2.200401.tar.gz': [],
-        'ExtUtils-ParseXS-2.21.tar.gz': [],
-        'Module-Build-0.34.tar.gz': [],
-        'Module-Build-0.340201.tar.gz': [],
-        'Module-Build-0.35.tar.gz': [],
-        'libburn-0.6.8.pl00.tar.gz': [],
-        'libburn-0.7.0.pl00.tar.gz': [],
-        'libburn-0.7.2.pl00.tar.gz': [],
-        'libburn-0.7.2.pl01.tar.gz': [],
-        'libexif-0.6.17.tar.bz2': [],
-        'libisofs-0.6.20.tar.gz': [],
-        'libisofs-0.6.22.tar.gz': [],
-        'pixman-0.14.0.tar.bz2': [],
-        'pixman-0.16.0.tar.bz2': [],
-        'pixman-0.16.2.tar.bz2': [],
-        'portage-utils-0.2.1.tar.bz2': [],
-        'sysvinit-2.86.tar.gz': ['sysvinit-2.86.tar.gz',
-            'sysvinit-2.86-kexec.patch', 'sysvinit-2.86-extra.patch'],
-        'util-macros-1.3.0.tar.bz2': [],
-        'xine-lib-1.1.15-textrel-fix.patch': [],
-        'xine-lib-1.1.16.3.tar.bz2': [],
-        'xorg-server-1.5.3.tar.bz2': ['xorg-server-1.5.3.tar.bz2',
-            'xorg-server-1.5.3-gentoo-patches-08.tar.bz2'],
-        'symlink-test-1.2.3.tar.bz2': distfile_symlink
+        "Archive-Tar-1.52.tar.gz": [],
+        "ExtUtils-ParseXS-2.20.tar.gz": [],
+        "ExtUtils-ParseXS-2.200401.tar.gz": [],
+        "ExtUtils-ParseXS-2.21.tar.gz": [],
+        "Module-Build-0.34.tar.gz": [],
+        "Module-Build-0.340201.tar.gz": [],
+        "Module-Build-0.35.tar.gz": [],
+        "libburn-0.6.8.pl00.tar.gz": [],
+        "libburn-0.7.0.pl00.tar.gz": [],
+        "libburn-0.7.2.pl00.tar.gz": [],
+        "libburn-0.7.2.pl01.tar.gz": [],
+        "libexif-0.6.17.tar.bz2": [],
+        "libisofs-0.6.20.tar.gz": [],
+        "libisofs-0.6.22.tar.gz": [],
+        "pixman-0.14.0.tar.bz2": [],
+        "pixman-0.16.0.tar.bz2": [],
+        "pixman-0.16.2.tar.bz2": [],
+        "portage-utils-0.2.1.tar.bz2": [],
+        "sysvinit-2.86.tar.gz": [
+            "sysvinit-2.86.tar.gz",
+            "sysvinit-2.86-kexec.patch",
+            "sysvinit-2.86-extra.patch",
+        ],
+        "util-macros-1.3.0.tar.bz2": [],
+        "xine-lib-1.1.15-textrel-fix.patch": [],
+        "xine-lib-1.1.16.3.tar.bz2": [],
+        "xorg-server-1.5.3.tar.bz2": [
+            "xorg-server-1.5.3.tar.bz2",
+            "xorg-server-1.5.3-gentoo-patches-08.tar.bz2",
+        ],
+        "symlink-test-1.2.3.tar.bz2": distfile_symlink,
     }
 
     package_dict = {
-        'app-arch': ['p7zip-4.65.tbz2', 'p7zip-4.57.tbz2',
-            'file-roller-2.26.3.tbz2', 'tar-1.20.tbz2',
-            'p7zip-4.58.tbz2', 'file-roller-2.28.2.tbz2',
-            'file-roller-2.24.3.tbz2', 'gzip-1.4.tbz2', 'rar-3.9.0.tbz2',
-            'bzip2-1.0.5-r1.tbz2', 'cpio-2.10.tbz2', 'tar-1.21-r1.tbz2',
-            'cpio-2.10-r1.tbz2', 'file-roller-2.28.1.tbz2', 'cpio-2.9-r2.tbz2',
-            'tar-1.22.tbz2', 'cpio-2.9-r3.tbz2'],
-        'app-editors': ['nano-2.2.0.tbz2', 'nano-2.1.10.tbz2',
-            'nano-2.0.9.tbz2', 'nano-2.2.2.tbz2'],
-        'app-portage': ['layman-1.3.0_rc1-r3.tbz2', 'layman-1.2.6.tbz2',
-            'portage-utils-0.3.1.tbz2', 'layman-1.3.0.tbz2',
-            'layman-1.2.4-r3.tbz2', 'layman-1.2.3.tbz2',
-            'layman-1.3.0_rc1.tbz2'],
-        'sys-apps': ['shadow-4.0.18.2.tbz2', 'shadow-4.1.2.2.tbz2',
-            'openrc-0.6.0-r1.tbz2', 'shadow-4.1.4.2-r1.tbz2',
-            'shadow-4.1.4.2-r2.tbz2']
-        }
+        "app-arch": [
+            "p7zip-4.65.tbz2",
+            "p7zip-4.57.tbz2",
+            "file-roller-2.26.3.tbz2",
+            "tar-1.20.tbz2",
+            "p7zip-4.58.tbz2",
+            "file-roller-2.28.2.tbz2",
+            "file-roller-2.24.3.tbz2",
+            "gzip-1.4.tbz2",
+            "rar-3.9.0.tbz2",
+            "bzip2-1.0.5-r1.tbz2",
+            "cpio-2.10.tbz2",
+            "tar-1.21-r1.tbz2",
+            "cpio-2.10-r1.tbz2",
+            "file-roller-2.28.1.tbz2",
+            "cpio-2.9-r2.tbz2",
+            "tar-1.22.tbz2",
+            "cpio-2.9-r3.tbz2",
+        ],
+        "app-editors": [
+            "nano-2.2.0.tbz2",
+            "nano-2.1.10.tbz2",
+            "nano-2.0.9.tbz2",
+            "nano-2.2.2.tbz2",
+        ],
+        "app-portage": [
+            "layman-1.3.0_rc1-r3.tbz2",
+            "layman-1.2.6.tbz2",
+            "portage-utils-0.3.1.tbz2",
+            "layman-1.3.0.tbz2",
+            "layman-1.2.4-r3.tbz2",
+            "layman-1.2.3.tbz2",
+            "layman-1.3.0_rc1.tbz2",
+        ],
+        "sys-apps": [
+            "shadow-4.0.18.2.tbz2",
+            "shadow-4.1.2.2.tbz2",
+            "openrc-0.6.0-r1.tbz2",
+            "shadow-4.1.4.2-r1.tbz2",
+            "shadow-4.1.4.2-r2.tbz2",
+        ],
+    }
 
     pkg_clean = {
-        'app-arch/p7zip-4.57.tbz2': [],
-        'app-arch/file-roller-2.26.3.tbz2': [],
-        'app-arch/tar-1.20.tbz2': [],
-        'app-arch/p7zip-4.58.tbz2': [],
-        'app-arch/file-roller-2.28.2.tbz2': [],
-        'app-arch/file-roller-2.24.3.tbz2': [],
-        'app-arch/bzip2-1.0.5-r1.tbz2': [],
-        'app-arch/cpio-2.10.tbz2': [],
-        'app-arch/tar-1.21-r1.tbz2': [],
-        'app-arch/cpio-2.9-r2.tbz2': [],
-        'app-arch/cpio-2.9-r3.tbz2': [],
-        'app-editors/nano-2.2.0.tbz2': [],
-        'app-editors/nano-2.1.10.tbz2': [],
-        'app-editors/nano-2.0.9.tbz2': [],
-        'app-portage/layman-1.3.0_rc1-r3.tbz2': [],
-        'app-portage/layman-1.2.6.tbz2': [],
-        'app-portage/layman-1.2.4-r3.tbz2': [],
-        'app-portage/layman-1.2.3.tbz2': [],
-        'app-portage/layman-1.3.0_rc1.tbz2': [],
-        'sys-apps/shadow-4.0.18.2.tbz2': [],
-        'sys-apps/shadow-4.1.2.2.tbz2': [],
-        'sys-apps/shadow-4.1.4.2-r1.tbz2': [],
-        }
+        "app-arch/p7zip-4.57.tbz2": [],
+        "app-arch/file-roller-2.26.3.tbz2": [],
+        "app-arch/tar-1.20.tbz2": [],
+        "app-arch/p7zip-4.58.tbz2": [],
+        "app-arch/file-roller-2.28.2.tbz2": [],
+        "app-arch/file-roller-2.24.3.tbz2": [],
+        "app-arch/bzip2-1.0.5-r1.tbz2": [],
+        "app-arch/cpio-2.10.tbz2": [],
+        "app-arch/tar-1.21-r1.tbz2": [],
+        "app-arch/cpio-2.9-r2.tbz2": [],
+        "app-arch/cpio-2.9-r3.tbz2": [],
+        "app-editors/nano-2.2.0.tbz2": [],
+        "app-editors/nano-2.1.10.tbz2": [],
+        "app-editors/nano-2.0.9.tbz2": [],
+        "app-portage/layman-1.3.0_rc1-r3.tbz2": [],
+        "app-portage/layman-1.2.6.tbz2": [],
+        "app-portage/layman-1.2.4-r3.tbz2": [],
+        "app-portage/layman-1.2.3.tbz2": [],
+        "app-portage/layman-1.3.0_rc1.tbz2": [],
+        "sys-apps/shadow-4.0.18.2.tbz2": [],
+        "sys-apps/shadow-4.1.2.2.tbz2": [],
+        "sys-apps/shadow-4.1.4.2-r1.tbz2": [],
+    }
 
     def __init__(self, options):
         """Initialization
@@ -226,12 +257,13 @@ class TestDirCreation:
         self.options = options
         self.targets_init = False
         # create distfiles dir and populate it
-        make_dist(self.options['target_path'], self.distfile_list, self.dist_clean)
+        make_dist(self.options["target_path"], self.distfile_list, self.dist_clean)
         # add some symlinks to it
-        path = os.path.join(self.options['target_path'], 'distfiles')
-        make_symlinks(path, self.distfile_symlink,
-            self.dist_clean['symlink-test-1.2.3.tar.bz2'])
+        path = os.path.join(self.options["target_path"], "distfiles")
+        make_symlinks(
+            path, self.distfile_symlink, self.dist_clean["symlink-test-1.2.3.tar.bz2"]
+        )
         # create the packages dir and populate it
-        path = os.path.join(self.options['target_path'], 'packages')
+        path = os.path.join(self.options["target_path"], "packages")
         make_pkgs(path, self.package_dict, self.pkg_clean)
         self.targets_init = True

diff --git a/pym/gentoolkit/test/eclean/distsupport.py b/pym/gentoolkit/test/eclean/distsupport.py
index da7cdbb..58f5624 100644
--- a/pym/gentoolkit/test/eclean/distsupport.py
+++ b/pym/gentoolkit/test/eclean/distsupport.py
@@ -14,452 +14,507 @@ import portage
 dir_mode = 0o774
 
 CPVS = [
-	'sys-auth/consolekit-0.4.1',
-	'sys-apps/devicekit-power-014',
-	'media-libs/sdl-pango-0.1.2',
-	'sys-apps/help2man-1.37.1',
-	'app-emulation/emul-linux-x86-baselibs-20100220'
-	]
+    "sys-auth/consolekit-0.4.1",
+    "sys-apps/devicekit-power-014",
+    "media-libs/sdl-pango-0.1.2",
+    "sys-apps/help2man-1.37.1",
+    "app-emulation/emul-linux-x86-baselibs-20100220",
+]
 
 PROPS = {
-	'sys-apps/devicekit-power-014': {
-		'SRC_URI':'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-		"RESTRICT": ''},
-	'sys-apps/help2man-1.37.1': {
-		"SRC_URI": 'mirror://gnu/help2man/help2man-1.37.1.tar.gz',
-		"RESTRICT": ''},
-	'sys-auth/consolekit-0.4.1': {
-		"SRC_URI": 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-		"RESTRICT": ''},
-	'app-emulation/emul-linux-x86-baselibs-20100220': {
-		"SRC_URI": 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-		"RESTRICT": 'strip'},
-	'media-libs/sdl-pango-0.1.2': {
-		"SRC_URI": 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch',
-		"RESTRICT": ''},
-	'x11-base/xorg-server-1.6.5-r1': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.6.5.tar.bz2 mirror://gentoo/xorg-server-1.6.5-gentoo-patches-01.tar.bz2',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.20.0401': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.200401.tar.gz',
-		"RESTRICT": ''},
-	'x11-misc/util-macros-1.3.0': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/util/util-macros-1.3.0.tar.bz2',
-		"RESTRICT": ''},
-	'x11-base/xorg-server-1.7.5': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.tar.bz2',
-		"RESTRICT": ''},
-	'app-portage/portage-utils-0.3.1': {
-		"SRC_URI": 'mirror://gentoo/portage-utils-0.3.1.tar.bz2',
-		"RESTRICT": ''},
-	'x11-misc/util-macros-1.5.0': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/util/util-macros-1.5.0.tar.bz2',
-		"RESTRICT": ''},
-	'perl-core/Module-Build-0.35': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.35.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.22.02': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2202.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.22.03': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2203.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.22.01': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2201.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Archive-Tar-1.38': {
-		"SRC_URI": 'mirror://cpan/authors/id/K/KA/KANE/Archive-Tar-1.38.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Archive-Tar-1.58': {
-		"SRC_URI": 'mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.58.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Archive-Tar-1.54': {
-		"SRC_URI": 'mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.54.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Archive-Tar-1.56': {
-		"SRC_URI": 'mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.56.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/portage-utils-0.2.1': {
-		"SRC_URI": 'mirror://gentoo/portage-utils-0.2.1.tar.bz2',
-		"RESTRICT": ''},
-	'dev-libs/libisofs-0.6.20-r1': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libisofs-0.6.20.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.22.02-r1': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2202.tar.gz',
-		"RESTRICT": ''},
-	'x11-misc/util-macros-1.6.0': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/util/util-macros-1.6.0.tar.bz2',
-		"RESTRICT": ''},
-	'x11-libs/pixman-0.16.0': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.0.tar.bz2',
-		"RESTRICT": ''},
-	'x11-libs/pixman-0.16.4': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.4.tar.bz2',
-		"RESTRICT": ''},
-	'x11-libs/pixman-0.17.4': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/lib/pixman-0.17.4.tar.bz2',
-		"RESTRICT": ''},
-	'x11-libs/pixman-0.17.2': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/lib/pixman-0.17.2.tar.bz2',
-		"RESTRICT": ''},
-	'dev-libs/libburn-0.7.6-r1': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libburn-0.7.6.pl00.tar.gz',
-		"RESTRICT": ''},
-	'dev-libs/libburn-0.7.0': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libburn-0.7.0.pl00.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Module-Build-0.34.0201': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.340201.tar.gz',
-		"RESTRICT": ''},
-	'dev-libs/libburn-0.6.8': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libburn-0.6.8.pl00.tar.gz',
-		"RESTRICT": ''},
-	'dev-libs/libburn-0.7.4': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libburn-0.7.4.pl00.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Module-Build-0.36.03': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.3603.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/Module-Build-0.36.01': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.3601.tar.gz',
-		"RESTRICT": ''},
-	'x11-base/xorg-server-1.5.3-r6': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.5.3.tar.bz2 mirror://gentoo/xorg-server-1.5.3-gentoo-patches-08.tar.bz2',
-		"RESTRICT": ''},
-	'dev-libs/libisofs-0.6.28': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libisofs-0.6.28.tar.gz',
-		"RESTRICT": ''},
-	'media-libs/xine-lib-1.1.17': {
-		"SRC_URI": 'mirror://sourceforge/xine/xine-lib-1.1.17.tar.bz2 mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch',
-		"RESTRICT": ''},
-	'media-libs/xine-lib-1.1.18': {
-		"SRC_URI": 'mirror://sourceforge/xine/xine-lib-1.1.18.tar.xz mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch mirror://gentoo/xine-lib-1.1.18-compat.c.tbz2',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.22': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.22.tar.gz',
-		"RESTRICT": ''},
-	'perl-core/ExtUtils-ParseXS-2.21': {
-		"SRC_URI": 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.21.tar.gz',
-		"RESTRICT": ''},
-	'x11-base/xorg-server-1.7.5.901': {
-		"SRC_URI": 'http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.901.tar.bz2',
-		"RESTRICT": ''},
-	'dev-libs/libisofs-0.6.24': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libisofs-0.6.24.tar.gz',
-		"RESTRICT": ''},
-	'dev-libs/libisofs-0.6.26': {
-		"SRC_URI": 'http://files.libburnia-project.org/releases/libisofs-0.6.26.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.3.0_rc8-r1': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.2.4.6-r1': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/eix-0.20.2': {
-		"SRC_URI": 'mirror://berlios/eix/eix-0.20.2.tar.xz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.2.4.5': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.3.0_rc8': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.2.4.6': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.3.0-r1': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.3.0.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.3.0_rc7': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.3.0': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.3.0.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.3.1': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.3.1.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.2.6': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.2.6.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-9999': {
-		"SRC_URI": '',
-		"RESTRICT": ''},
-	'app-portage/layman-1.2.5': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.2.5.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.3.0_rc1-r3': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.3.0_rc1.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/gentoolkit-0.3.0_rc9': {
-		"SRC_URI": 'mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz',
-		"RESTRICT": ''},
-	'app-portage/eix-0.20.1': {
-		"SRC_URI": 'mirror://sourceforge/eix/eix-0.20.1.tar.xz',
-		"RESTRICT": ''},
-	'app-portage/eix-0.19.2': {
-		"SRC_URI": 'mirror://sourceforge/eix/eix-0.19.2.tar.xz',
-		"RESTRICT": ''},
-	'app-portage/layman-1.3.2-r1': {
-		"SRC_URI": 'mirror://sourceforge/layman/layman-1.3.2.tar.gz',
-		"RESTRICT": ''},
+    "sys-apps/devicekit-power-014": {
+        "SRC_URI": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+        "RESTRICT": "",
+    },
+    "sys-apps/help2man-1.37.1": {
+        "SRC_URI": "mirror://gnu/help2man/help2man-1.37.1.tar.gz",
+        "RESTRICT": "",
+    },
+    "sys-auth/consolekit-0.4.1": {
+        "SRC_URI": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+        "RESTRICT": "",
+    },
+    "app-emulation/emul-linux-x86-baselibs-20100220": {
+        "SRC_URI": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+        "RESTRICT": "strip",
+    },
+    "media-libs/sdl-pango-0.1.2": {
+        "SRC_URI": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+        "RESTRICT": "",
+    },
+    "x11-base/xorg-server-1.6.5-r1": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.6.5.tar.bz2 mirror://gentoo/xorg-server-1.6.5-gentoo-patches-01.tar.bz2",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.20.0401": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.200401.tar.gz",
+        "RESTRICT": "",
+    },
+    "x11-misc/util-macros-1.3.0": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/util/util-macros-1.3.0.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-base/xorg-server-1.7.5": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.tar.bz2",
+        "RESTRICT": "",
+    },
+    "app-portage/portage-utils-0.3.1": {
+        "SRC_URI": "mirror://gentoo/portage-utils-0.3.1.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-misc/util-macros-1.5.0": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/util/util-macros-1.5.0.tar.bz2",
+        "RESTRICT": "",
+    },
+    "perl-core/Module-Build-0.35": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.35.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.22.02": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2202.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.22.03": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2203.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.22.01": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2201.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Archive-Tar-1.38": {
+        "SRC_URI": "mirror://cpan/authors/id/K/KA/KANE/Archive-Tar-1.38.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Archive-Tar-1.58": {
+        "SRC_URI": "mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.58.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Archive-Tar-1.54": {
+        "SRC_URI": "mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.54.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Archive-Tar-1.56": {
+        "SRC_URI": "mirror://cpan/authors/id/B/BI/BINGOS//Archive-Tar-1.56.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/portage-utils-0.2.1": {
+        "SRC_URI": "mirror://gentoo/portage-utils-0.2.1.tar.bz2",
+        "RESTRICT": "",
+    },
+    "dev-libs/libisofs-0.6.20-r1": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libisofs-0.6.20.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.22.02-r1": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.2202.tar.gz",
+        "RESTRICT": "",
+    },
+    "x11-misc/util-macros-1.6.0": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/util/util-macros-1.6.0.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-libs/pixman-0.16.0": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.0.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-libs/pixman-0.16.4": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.4.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-libs/pixman-0.17.4": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/lib/pixman-0.17.4.tar.bz2",
+        "RESTRICT": "",
+    },
+    "x11-libs/pixman-0.17.2": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/lib/pixman-0.17.2.tar.bz2",
+        "RESTRICT": "",
+    },
+    "dev-libs/libburn-0.7.6-r1": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libburn-0.7.6.pl00.tar.gz",
+        "RESTRICT": "",
+    },
+    "dev-libs/libburn-0.7.0": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libburn-0.7.0.pl00.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Module-Build-0.34.0201": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.340201.tar.gz",
+        "RESTRICT": "",
+    },
+    "dev-libs/libburn-0.6.8": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libburn-0.6.8.pl00.tar.gz",
+        "RESTRICT": "",
+    },
+    "dev-libs/libburn-0.7.4": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libburn-0.7.4.pl00.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Module-Build-0.36.03": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.3603.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/Module-Build-0.36.01": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//Module-Build-0.3601.tar.gz",
+        "RESTRICT": "",
+    },
+    "x11-base/xorg-server-1.5.3-r6": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.5.3.tar.bz2 mirror://gentoo/xorg-server-1.5.3-gentoo-patches-08.tar.bz2",
+        "RESTRICT": "",
+    },
+    "dev-libs/libisofs-0.6.28": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libisofs-0.6.28.tar.gz",
+        "RESTRICT": "",
+    },
+    "media-libs/xine-lib-1.1.17": {
+        "SRC_URI": "mirror://sourceforge/xine/xine-lib-1.1.17.tar.bz2 mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch",
+        "RESTRICT": "",
+    },
+    "media-libs/xine-lib-1.1.18": {
+        "SRC_URI": "mirror://sourceforge/xine/xine-lib-1.1.18.tar.xz mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch mirror://gentoo/xine-lib-1.1.18-compat.c.tbz2",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.22": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.22.tar.gz",
+        "RESTRICT": "",
+    },
+    "perl-core/ExtUtils-ParseXS-2.21": {
+        "SRC_URI": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.21.tar.gz",
+        "RESTRICT": "",
+    },
+    "x11-base/xorg-server-1.7.5.901": {
+        "SRC_URI": "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.901.tar.bz2",
+        "RESTRICT": "",
+    },
+    "dev-libs/libisofs-0.6.24": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libisofs-0.6.24.tar.gz",
+        "RESTRICT": "",
+    },
+    "dev-libs/libisofs-0.6.26": {
+        "SRC_URI": "http://files.libburnia-project.org/releases/libisofs-0.6.26.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.3.0_rc8-r1": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.2.4.6-r1": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/eix-0.20.2": {
+        "SRC_URI": "mirror://berlios/eix/eix-0.20.2.tar.xz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.2.4.5": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.3.0_rc8": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.2.4.6": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.3.0-r1": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.3.0.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.3.0_rc7": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.3.0": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.3.0.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.3.1": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.3.1.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.2.6": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.2.6.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-9999": {"SRC_URI": "", "RESTRICT": ""},
+    "app-portage/layman-1.2.5": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.2.5.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.3.0_rc1-r3": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.3.0_rc1.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/gentoolkit-0.3.0_rc9": {
+        "SRC_URI": "mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz",
+        "RESTRICT": "",
+    },
+    "app-portage/eix-0.20.1": {
+        "SRC_URI": "mirror://sourceforge/eix/eix-0.20.1.tar.xz",
+        "RESTRICT": "",
+    },
+    "app-portage/eix-0.19.2": {
+        "SRC_URI": "mirror://sourceforge/eix/eix-0.19.2.tar.xz",
+        "RESTRICT": "",
+    },
+    "app-portage/layman-1.3.2-r1": {
+        "SRC_URI": "mirror://sourceforge/layman/layman-1.3.2.tar.gz",
+        "RESTRICT": "",
+    },
 }
 
 PKGS = {
-	'app-portage/layman-1.3.2-r1': 'mirror://sourceforge/layman/layman-1.3.2.tar.gz',
-	'app-portage/eix-0.20.1': 'mirror://sourceforge/eix/eix-0.20.1.tar.xz',
-	'app-portage/eix-0.19.2': 'mirror://sourceforge/eix/eix-0.19.2.tar.xz',
-	'app-portage/gentoolkit-0.3.0_rc9': 'mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz',
-	'app-portage/gentoolkit-0.2.4.6': 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-	'media-libs/xine-lib-1.1.18': 'mirror://sourceforge/xine/xine-lib-1.1.18.tar.xz mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch mirror://gentoo/xine-lib-1.1.18-compat.c.tbz2',
-	'perl-core/ExtUtils-ParseXS-2.21': 'mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.21.tar.gz',
-	'dev-libs/libisofs-0.6.24': 'http://files.libburnia-project.org/releases/libisofs-0.6.24.tar.gz',
-	}
+    "app-portage/layman-1.3.2-r1": "mirror://sourceforge/layman/layman-1.3.2.tar.gz",
+    "app-portage/eix-0.20.1": "mirror://sourceforge/eix/eix-0.20.1.tar.xz",
+    "app-portage/eix-0.19.2": "mirror://sourceforge/eix/eix-0.19.2.tar.xz",
+    "app-portage/gentoolkit-0.3.0_rc9": "mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz",
+    "app-portage/gentoolkit-0.2.4.6": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+    "media-libs/xine-lib-1.1.18": "mirror://sourceforge/xine/xine-lib-1.1.18.tar.xz mirror://gentoo/xine-lib-1.1.15-textrel-fix.patch mirror://gentoo/xine-lib-1.1.18-compat.c.tbz2",
+    "perl-core/ExtUtils-ParseXS-2.21": "mirror://cpan/authors/id/D/DA/DAGOLDEN//ExtUtils-ParseXS-2.21.tar.gz",
+    "dev-libs/libisofs-0.6.24": "http://files.libburnia-project.org/releases/libisofs-0.6.24.tar.gz",
+}
 
 CLEAN_ME = {
-	'layman-1.3.2.tar.gz': '/path/to/some/where/layman-1.3.2.tar.gz',
-	'layman-1.2.5.tar.gz': '/path/to/some/where/layman-1.2.5.tar.gz',
-	'eix-0.20.1.tar.xz': '/path/to/some/where/eix-0.20.1.tar.xz',
-	'gentoolkit-0.3.0_rc9.tar.gz': '/path/to/some/where/gentoolkit-0.3.0_rc9.tar.gz',
-	'xine-lib-1.1.18.tar.xz': '/path/to/some/where/xine-lib-1.1.18.tar.xz',
-	'xine-lib-1.1.15-textrel-fix.patch': '/path/to/some/where/xine-lib-1.1.15-textrel-fix.patch',
-	'xine-lib-1.1.18-compat.c.tbz2': '/path/to/some/where/xine-lib-1.1.18-compat.c.tbz2',
-	'ExtUtils-ParseXS-2.21.tar.gz': '/path/to/some/where/ExtUtils-ParseXS-2.21.tar.gz',
-	'libisofs-0.6.24.tar.gz': '/path/to/some/where/libisofs-0.6.24.tar.gz'
-	}
+    "layman-1.3.2.tar.gz": "/path/to/some/where/layman-1.3.2.tar.gz",
+    "layman-1.2.5.tar.gz": "/path/to/some/where/layman-1.2.5.tar.gz",
+    "eix-0.20.1.tar.xz": "/path/to/some/where/eix-0.20.1.tar.xz",
+    "gentoolkit-0.3.0_rc9.tar.gz": "/path/to/some/where/gentoolkit-0.3.0_rc9.tar.gz",
+    "xine-lib-1.1.18.tar.xz": "/path/to/some/where/xine-lib-1.1.18.tar.xz",
+    "xine-lib-1.1.15-textrel-fix.patch": "/path/to/some/where/xine-lib-1.1.15-textrel-fix.patch",
+    "xine-lib-1.1.18-compat.c.tbz2": "/path/to/some/where/xine-lib-1.1.18-compat.c.tbz2",
+    "ExtUtils-ParseXS-2.21.tar.gz": "/path/to/some/where/ExtUtils-ParseXS-2.21.tar.gz",
+    "libisofs-0.6.24.tar.gz": "/path/to/some/where/libisofs-0.6.24.tar.gz",
+}
 
 CPVS2 = [
-	'app-emulation/emul-linux-x86-baselibs-20100220',
-	'app-portage/eix-0.19.2', 'app-portage/eix-0.20.1',
-	'app-portage/eix-0.20.2',
-	'app-portage/gentoolkit-0.2.4.5',
-	'app-portage/gentoolkit-0.2.4.6',
-	'app-portage/gentoolkit-0.2.4.6-r1',
-	'app-portage/gentoolkit-0.3.0_rc7',
-	'app-portage/gentoolkit-0.3.0_rc8',
-	'app-portage/gentoolkit-0.3.0_rc8-r1',
-	'app-portage/gentoolkit-0.3.0_rc9',
-	'app-portage/layman-1.2.5',
-	'app-portage/layman-1.2.6',
-	'app-portage/layman-1.3.0',
-	'app-portage/layman-1.3.0-r1',
-	'app-portage/layman-1.3.0_rc1-r3',
-	'app-portage/layman-1.3.1',
-	'app-portage/layman-1.3.2-r1',
-	'app-portage/layman-9999',
-	'app-portage/portage-utils-0.2.1',
-	'app-portage/portage-utils-0.3.1',
-	'dev-libs/libburn-0.6.8',
-	'dev-libs/libburn-0.7.0',
-	'dev-libs/libburn-0.7.4',
-	'dev-libs/libburn-0.7.6-r1',
-	'dev-libs/libisofs-0.6.20-r1',
-	'dev-libs/libisofs-0.6.24',
-	'dev-libs/libisofs-0.6.26',
-	'dev-libs/libisofs-0.6.28',
-	'media-libs/sdl-pango-0.1.2',
-	'media-libs/xine-lib-1.1.17',
-	'media-libs/xine-lib-1.1.18',
-	'perl-core/Archive-Tar-1.38',
-	'perl-core/Archive-Tar-1.54',
-	'perl-core/Archive-Tar-1.56',
-	'perl-core/Archive-Tar-1.58',
-	'perl-core/ExtUtils-ParseXS-2.20.0401',
-	'perl-core/ExtUtils-ParseXS-2.21',
-	'perl-core/ExtUtils-ParseXS-2.22',
-	'perl-core/ExtUtils-ParseXS-2.22.01',
-	'perl-core/ExtUtils-ParseXS-2.22.02',
-	'perl-core/ExtUtils-ParseXS-2.22.02-r1',
-	'perl-core/ExtUtils-ParseXS-2.22.03',
-	'perl-core/Module-Build-0.34.0201',
-	'perl-core/Module-Build-0.35',
-	'perl-core/Module-Build-0.36.01',
-	'perl-core/Module-Build-0.36.03',
-	'sys-apps/devicekit-power-014',
-	'sys-apps/help2man-1.37.1',
-	'sys-auth/consolekit-0.4.1',
-	'x11-base/xorg-server-1.5.3-r6',
-	'x11-base/xorg-server-1.6.5-r1',
-	'x11-base/xorg-server-1.7.5',
-	'x11-base/xorg-server-1.7.5.901',
-	'x11-libs/pixman-0.16.0',
-	'x11-libs/pixman-0.16.4',
-	'x11-libs/pixman-0.17.2',
-	'x11-libs/pixman-0.17.4',
-	'x11-misc/util-macros-1.3.0',
-	'x11-misc/util-macros-1.5.0',
-	'x11-misc/util-macros-1.6.0'
-	]
+    "app-emulation/emul-linux-x86-baselibs-20100220",
+    "app-portage/eix-0.19.2",
+    "app-portage/eix-0.20.1",
+    "app-portage/eix-0.20.2",
+    "app-portage/gentoolkit-0.2.4.5",
+    "app-portage/gentoolkit-0.2.4.6",
+    "app-portage/gentoolkit-0.2.4.6-r1",
+    "app-portage/gentoolkit-0.3.0_rc7",
+    "app-portage/gentoolkit-0.3.0_rc8",
+    "app-portage/gentoolkit-0.3.0_rc8-r1",
+    "app-portage/gentoolkit-0.3.0_rc9",
+    "app-portage/layman-1.2.5",
+    "app-portage/layman-1.2.6",
+    "app-portage/layman-1.3.0",
+    "app-portage/layman-1.3.0-r1",
+    "app-portage/layman-1.3.0_rc1-r3",
+    "app-portage/layman-1.3.1",
+    "app-portage/layman-1.3.2-r1",
+    "app-portage/layman-9999",
+    "app-portage/portage-utils-0.2.1",
+    "app-portage/portage-utils-0.3.1",
+    "dev-libs/libburn-0.6.8",
+    "dev-libs/libburn-0.7.0",
+    "dev-libs/libburn-0.7.4",
+    "dev-libs/libburn-0.7.6-r1",
+    "dev-libs/libisofs-0.6.20-r1",
+    "dev-libs/libisofs-0.6.24",
+    "dev-libs/libisofs-0.6.26",
+    "dev-libs/libisofs-0.6.28",
+    "media-libs/sdl-pango-0.1.2",
+    "media-libs/xine-lib-1.1.17",
+    "media-libs/xine-lib-1.1.18",
+    "perl-core/Archive-Tar-1.38",
+    "perl-core/Archive-Tar-1.54",
+    "perl-core/Archive-Tar-1.56",
+    "perl-core/Archive-Tar-1.58",
+    "perl-core/ExtUtils-ParseXS-2.20.0401",
+    "perl-core/ExtUtils-ParseXS-2.21",
+    "perl-core/ExtUtils-ParseXS-2.22",
+    "perl-core/ExtUtils-ParseXS-2.22.01",
+    "perl-core/ExtUtils-ParseXS-2.22.02",
+    "perl-core/ExtUtils-ParseXS-2.22.02-r1",
+    "perl-core/ExtUtils-ParseXS-2.22.03",
+    "perl-core/Module-Build-0.34.0201",
+    "perl-core/Module-Build-0.35",
+    "perl-core/Module-Build-0.36.01",
+    "perl-core/Module-Build-0.36.03",
+    "sys-apps/devicekit-power-014",
+    "sys-apps/help2man-1.37.1",
+    "sys-auth/consolekit-0.4.1",
+    "x11-base/xorg-server-1.5.3-r6",
+    "x11-base/xorg-server-1.6.5-r1",
+    "x11-base/xorg-server-1.7.5",
+    "x11-base/xorg-server-1.7.5.901",
+    "x11-libs/pixman-0.16.0",
+    "x11-libs/pixman-0.16.4",
+    "x11-libs/pixman-0.17.2",
+    "x11-libs/pixman-0.17.4",
+    "x11-misc/util-macros-1.3.0",
+    "x11-misc/util-macros-1.5.0",
+    "x11-misc/util-macros-1.6.0",
+]
 
 FILES = [
-	'DeviceKit-power-014.tar.gz',
-	'help2man-1.37.1.tar.gz',
-	'ConsoleKit-0.4.1.tar.bz2',
-	'emul-linux-x86-baselibs-20100220.tar.gz',
-	'SDL_Pango-0.1.2.tar.gz',
-	'SDL_Pango-0.1.2-API-adds.patch'
-	]
+    "DeviceKit-power-014.tar.gz",
+    "help2man-1.37.1.tar.gz",
+    "ConsoleKit-0.4.1.tar.bz2",
+    "emul-linux-x86-baselibs-20100220.tar.gz",
+    "SDL_Pango-0.1.2.tar.gz",
+    "SDL_Pango-0.1.2-API-adds.patch",
+]
 
 
 CPVS3 = [
-	'x11-base/xorg-server-1.7.5',
-	'x11-misc/util-macros-1.6.0',
-	'x11-libs/pixman-0.16.4',
-	#'dev-libs/libisofs-0.6.28',
-	#'perl-core/Module-Build-0.36.03',
-	#'perl-core/ExtUtils-ParseXS-2.22.02-r1',
-	#'perl-core/Archive-Tar-1.56',
-	#'app-portage/gentoolkit-0.3.0_rc8-r1',
-	#'app-portage/layman-1.3.1',
-	#'app-portage/eix-0.20.1',
-	]
-
-
-Exclude= {'packages': {
-		'media-libs/sdl-pango': None,
-		 },
-	'anti-packages': {'app-emulation/emul-linux-x86-baselibs': None},
-	'categories': {'app-portage': None,
-		'app-portage/gentoolkit': None
-		},
-	'filenames': {'sys-auth/consolekit-0.4.1': re.compile(r'sys-auth/consolekit-0\.4\.1')
-		}
-	}
+    "x11-base/xorg-server-1.7.5",
+    "x11-misc/util-macros-1.6.0",
+    "x11-libs/pixman-0.16.4",
+    # 'dev-libs/libisofs-0.6.28',
+    # 'perl-core/Module-Build-0.36.03',
+    # 'perl-core/ExtUtils-ParseXS-2.22.02-r1',
+    # 'perl-core/Archive-Tar-1.56',
+    # 'app-portage/gentoolkit-0.3.0_rc8-r1',
+    # 'app-portage/layman-1.3.1',
+    # 'app-portage/eix-0.20.1',
+]
+
+
+Exclude = {
+    "packages": {
+        "media-libs/sdl-pango": None,
+    },
+    "anti-packages": {"app-emulation/emul-linux-x86-baselibs": None},
+    "categories": {"app-portage": None, "app-portage/gentoolkit": None},
+    "filenames": {
+        "sys-auth/consolekit-0.4.1": re.compile(r"sys-auth/consolekit-0\.4\.1")
+    },
+}
 
 
 def get_props(cpvs):
-	props = {}
-	for cpv in cpvs:
-		props[cpv] = PROPS[cpv].copy()
-	return props
+    props = {}
+    for cpv in cpvs:
+        props[cpv] = PROPS[cpv].copy()
+    return props
+
 
 class Dbapi:
-	"""Fake portage dbapi class used to return
-	pre-determined test data in place of a live system
-
-	@param cp_all: list of cat/pkg's to use for testing
-				eg: ['app-portage/gentoolkit', 'app-portage/porthole',...]
-	@param cpv_all: list of cat/pkg-ver's to use for testing.
-	@param props: dictionary of ebuild properties to use for testing.
-				eg: {'cpv': {"SRC_URI": 'http://...', "RESTRICT": restriction},}
-	@param cp_list: ?????????
-	"""
-
-	def __init__(self, cp_all=None, cpv_all=None, props=None,
-			cp_list=None, name=None):
-		self._cp_all = cp_all
-		self._cpv_all = cpv_all
-		self._props = props
-		self._cp_list = cp_list
-		self.name = name
-		#print(self.name, "DBAPI: cpv_all=")
-		#print(self._cpv_all)
-		#print(self.name, "DBAPI: props=")
-		#print(self._props)
-
-	def cp_all(self):
-		return self._cp_all[:]
-
-	def cp_list(self, package):
-		#print(self._cp_list)
-		if self._cp_list is None or self._cp_list==[]:
-			cplist = []
-			for cpv in self._cpv_all:
-				parts = portage.catpkgsplit(cpv)
-				cp='/'.join(parts[:2])
-				if cp == package:
-					cplist.append(cpv)
-			#print("package = %s, cplist = %s" %(package, cplist))
-			return cplist
-		else:
-			return self._cp_list
-
-	def cpv_all(self):
-		#print(self.name, type(self._cpv_all), self._cpv_all)
-		return self._cpv_all
-
-	def cpv_exists(self, cpv):
-		return cpv in self._cpv_all
-
-	def aux_get(self, cpv, prop_list):
-		"""only need stubs for ["SRC_URI","RESTRICT"]
-		"""
-		#print("FAKE DBAPI", cpv, prop_list)
-		props = []
-		for prop in prop_list:
-			if cpv in self._props:
-				props.append(self._props[cpv][prop])
-			else:
-				raise KeyError(self.name)
-		#print(self.name,  "DBAPI", cpv, props)
-		return props
+    """Fake portage dbapi class used to return
+    pre-determined test data in place of a live system
+
+    @param cp_all: list of cat/pkg's to use for testing
+                            eg: ['app-portage/gentoolkit', 'app-portage/porthole',...]
+    @param cpv_all: list of cat/pkg-ver's to use for testing.
+    @param props: dictionary of ebuild properties to use for testing.
+                            eg: {'cpv': {"SRC_URI": 'http://...', "RESTRICT": restriction},}
+    @param cp_list: ?????????
+    """
+
+    def __init__(self, cp_all=None, cpv_all=None, props=None, cp_list=None, name=None):
+        self._cp_all = cp_all
+        self._cpv_all = cpv_all
+        self._props = props
+        self._cp_list = cp_list
+        self.name = name
+        # print(self.name, "DBAPI: cpv_all=")
+        # print(self._cpv_all)
+        # print(self.name, "DBAPI: props=")
+        # print(self._props)
+
+    def cp_all(self):
+        return self._cp_all[:]
+
+    def cp_list(self, package):
+        # print(self._cp_list)
+        if self._cp_list is None or self._cp_list == []:
+            cplist = []
+            for cpv in self._cpv_all:
+                parts = portage.catpkgsplit(cpv)
+                cp = "/".join(parts[:2])
+                if cp == package:
+                    cplist.append(cpv)
+            # print("package = %s, cplist = %s" %(package, cplist))
+            return cplist
+        else:
+            return self._cp_list
+
+    def cpv_all(self):
+        # print(self.name, type(self._cpv_all), self._cpv_all)
+        return self._cpv_all
+
+    def cpv_exists(self, cpv):
+        return cpv in self._cpv_all
+
+    def aux_get(self, cpv, prop_list):
+        """only need stubs for ["SRC_URI","RESTRICT"]"""
+        # print("FAKE DBAPI", cpv, prop_list)
+        props = []
+        for prop in prop_list:
+            if cpv in self._props:
+                props.append(self._props[cpv][prop])
+            else:
+                raise KeyError(self.name)
+        # print(self.name,  "DBAPI", cpv, props)
+        return props
 
 
 class OutputSimulator:
-	"""Simple output accumulator used for testing.
-	Simulates eclean.output.OutputControl class """
+    """Simple output accumulator used for testing.
+    Simulates eclean.output.OutputControl class"""
 
-	def __init__(self, callback):
-		self.callback = callback
+    def __init__(self, callback):
+        self.callback = callback
 
-	def set_data(self, data):
-		"""sets the data for the progress_controller to return
-		for the test being performed"""
-		self.data = data
+    def set_data(self, data):
+        """sets the data for the progress_controller to return
+        for the test being performed"""
+        self.data = data
 
-	def einfo(self, message=""):
-		self.callback('einfo', message)
+    def einfo(self, message=""):
+        self.callback("einfo", message)
 
-	def eprompt(self, message):
-		self.callback('eprompt', message)
+    def eprompt(self, message):
+        self.callback("eprompt", message)
 
-	def prettySize(self, size, justify=False, color=None):
-		self.callback('prettySize', size)
+    def prettySize(self, size, justify=False, color=None):
+        self.callback("prettySize", size)
 
-	def yesNoAllPrompt(self, message="Dummy"):
-		self.callback('yesNoAllPrompt', message)
+    def yesNoAllPrompt(self, message="Dummy"):
+        self.callback("yesNoAllPrompt", message)
 
-	def progress_controller(self, size, key, clean_list, file_type):
-		self.callback('progress_controller', self.data)
-		return self.data
+    def progress_controller(self, size, key, clean_list, file_type):
+        self.callback("progress_controller", self.data)
+        return self.data
 
-	def total(self, mode, size, num_files, verb, action):
-		pass
+    def total(self, mode, size, num_files, verb, action):
+        pass
 
-	def list_pkgs(self, pkgs):
-		self.callback('list_pkgs', pkgs)
+    def list_pkgs(self, pkgs):
+        self.callback("list_pkgs", pkgs)
 
 
 class TestDisfiles:
-
-	def setUp(self):
-		# create the dist dir
-		self.tmpdir = mkdtemp()
-		#print("New tmpdir =", self.tmpdir)
-		os.chmod(self.tmpdir, dir_mode)
-		self.workdir = os.path.join(self.tmpdir, 'distfiles')
-		dir = os.path.dirname(os.path.abspath(__file__))
-		file = os.path.join(dir,"testdistfiles.tar.gz")
-		command = "tar -xpf %s -C %s" %(file, self.tmpdir)
-		subprocess.call(command, shell=True)
-		# create a symlink as part of the test files
-		#print()
-		self.target_symlink = "symlink-1.0.0.tar.gz"
-		os.symlink(file, os.path.join(self.workdir, self.target_symlink))
-		self.files = FILES[:]
-		self.files.append(self.target_symlink)
-		self.test_filepaths = []
-		for file in self.files:
-			self.test_filepaths.append(os.path.join(self.workdir, file))
-
-	def tearDown(self):
-		for file in self.test_filepaths:
-			os.unlink(file)
-		#print("deleting workdir =", self.workdir)
-		os.rmdir(self.workdir)
-		#print("deleting tmpdir =", self.tmpdir)
-		os.rmdir(self.tmpdir)
-
-
+    def setUp(self):
+        # create the dist dir
+        self.tmpdir = mkdtemp()
+        # print("New tmpdir =", self.tmpdir)
+        os.chmod(self.tmpdir, dir_mode)
+        self.workdir = os.path.join(self.tmpdir, "distfiles")
+        dir = os.path.dirname(os.path.abspath(__file__))
+        file = os.path.join(dir, "testdistfiles.tar.gz")
+        command = "tar -xpf %s -C %s" % (file, self.tmpdir)
+        subprocess.call(command, shell=True)
+        # create a symlink as part of the test files
+        # print()
+        self.target_symlink = "symlink-1.0.0.tar.gz"
+        os.symlink(file, os.path.join(self.workdir, self.target_symlink))
+        self.files = FILES[:]
+        self.files.append(self.target_symlink)
+        self.test_filepaths = []
+        for file in self.files:
+            self.test_filepaths.append(os.path.join(self.workdir, file))
+
+    def tearDown(self):
+        for file in self.test_filepaths:
+            os.unlink(file)
+        # print("deleting workdir =", self.workdir)
+        os.rmdir(self.workdir)
+        # print("deleting tmpdir =", self.tmpdir)
+        os.rmdir(self.tmpdir)

diff --git a/pym/gentoolkit/test/eclean/test_clean.py b/pym/gentoolkit/test/eclean/test_clean.py
index 2f50e27..a2519d2 100755
--- a/pym/gentoolkit/test/eclean/test_clean.py
+++ b/pym/gentoolkit/test/eclean/test_clean.py
@@ -4,7 +4,7 @@
 # Copyright 2010 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
-__version__= "0.0.1"
+__version__ = "0.0.1"
 __author__ = "Brian Dolbec"
 __email__ = "brian.dolbec@gmail.com"
 
@@ -87,7 +87,6 @@ __email__ = "brian.dolbec@gmail.com"
 #                  data = controller.gathered_data
 
 
-
 #  def useage():
 #          """output run options"""
 #          print("Useage: test_clean [OPTONS] path=test-dir")
@@ -125,7 +124,6 @@ __email__ = "brian.dolbec@gmail.com"
 #                  sys.exit(1)
 
 
-
 #  def main(cmdline=False):
 #          """parse options and run the tests"""
 
@@ -141,5 +139,3 @@ __email__ = "brian.dolbec@gmail.com"
 #                  print("Aborted.")
 #                  sys.exit(130)
 #          sys.exit(0)
-
-

diff --git a/pym/gentoolkit/test/eclean/test_search.py b/pym/gentoolkit/test/eclean/test_search.py
index 5a20e38..714fbb5 100755
--- a/pym/gentoolkit/test/eclean/test_search.py
+++ b/pym/gentoolkit/test/eclean/test_search.py
@@ -12,8 +12,18 @@ import re
 import os
 
 from gentoolkit.test.eclean.distsupport import (
-    FILES, TestDisfiles, OutputSimulator, Dbapi, CPVS, CPVS2, CPVS3, PROPS,
-    PKGS, CLEAN_ME, get_props)
+    FILES,
+    TestDisfiles,
+    OutputSimulator,
+    Dbapi,
+    CPVS,
+    CPVS2,
+    CPVS3,
+    PROPS,
+    PKGS,
+    CLEAN_ME,
+    get_props,
+)
 import gentoolkit.eclean.search as search
 from gentoolkit.eclean.search import DistfilesSearch
 from gentoolkit.eclean.exclude import parseExcludeFile
@@ -22,602 +32,653 @@ from gentoolkit.eclean.exclude import parseExcludeFile
 
 
 class DistLimits(DistfilesSearch):
-	"""subclass the DistfilesSearch class in order to override a number of
-	functions to isolate & test"""
+    """subclass the DistfilesSearch class in order to override a number of
+    functions to isolate & test"""
 
-	def __init__(self,
-			output=lambda x: None,
-			portdb=None,
-			vardb=None,
-			):
-		DistfilesSearch.__init__(self, output, portdb, vardb)
-		self.data = None
+    def __init__(
+        self,
+        output=lambda x: None,
+        portdb=None,
+        vardb=None,
+    ):
+        DistfilesSearch.__init__(self, output, portdb, vardb)
+        self.data = None
 
-	def set_data(self, data):
-		"""sets the data for the functions to return for
-		the test being performed"""
-		self.data = data
+    def set_data(self, data):
+        """sets the data for the functions to return for
+        the test being performed"""
+        self.data = data
 
 
 class TestCheckLimits(unittest.TestCase):
-	"""Test the eclean.search.DistfilesSearch._check_limits() group.
-
-	it will test [ _get_default_checks(), _check_limits(),
-	_isreg_check_(), _size_check_(), _time_check_(), _filenames_check_()]
-	"""
-
-	test_excludes = {
-		'blank': {},
-		'filenames': {
-			'filenames': {'help2man-1.37.1.tar.gz': re.compile(r'help2man-1\.37\.1\.tar\.gz')}
-			}
-		}
-
-	def setUp(self):
-		self.testdata = [
-			# test is_reg_limit alone, will give a base clean_me
-			{ 'test': 'is_reg_limit',
-			'params': (0, 0, self.test_excludes['blank']),
-			'results': FILES[:],
-			'output': ["   - skipping size limit check",
-				"   - skipping time limit check",
-				"   - skipping exclude filenames check"
-				]
-			},
-			# test size_limit trip
-			{ 'test': 'size_limit',
-			'params': (1024000, 0, self.test_excludes['blank']),
-			'results': FILES[:3] + FILES[4:],
-			'output': [
-				"   - skipping time limit check",
-				"   - skipping exclude filenames check"
-				]
-			},
-			# test time_limit trip
-			{ 'test': 'time_limit',
-			'params': (0,1112671872, self.test_excludes['blank']),
-			'results': [FILES[4]], # + FILES[5:],
-			'output': ["   - skipping size limit check",
-				"   - skipping exclude filenames check"
-				]
-			},
-			# test filenames_limit trip
-			{ 'test': 'filenames_limit',
-			'params': (0, 0, self.test_excludes['filenames']),
-			'results': FILES[:1] + FILES[2:],
-			'output': ["   - skipping size limit check",
-				"   - skipping time limit check",
-				]
-			}
-		]
-
-		self.testwork = TestDisfiles()
-		self.testwork.setUp()
-		self.workdir = self.testwork.workdir
-		self.target_class = DistLimits() #DistCheckLimits()
-		self.output = OutputSimulator(self.callback)
-		self.target_class.output = self.output
-		self.callback_data = []
-		self.test_index = 0
-
-	def tearDown(self):
-		self.testwork.tearDown()
-		#pass
-
-	def get_test(self, num):
-		return self.testdata[num]
-
-	def callback(self, id, data):
-		self.callback_data.append(data)
-
-	def set_limits(self, test):
-		limit = {}
-		#set is_reg always to testdata[0]
-		t1 = self.testdata[0]
-		limit[t1['test']] = {}
-		name = test['test']
-		limit[name] = {}
-		limits = test['limits']
-		for i in range(6):
-			file = self.testwork.files[i]
-			limits = test['limits']
-			limit[t1['test']][file] = t1['limits'][i]
-			if name != t1['test']:
-				limit[name][file] = limits[i]
-		return limit
-
-
-	def test_check_limits(self):
-		"""Testing DistfilesSearch._check_limits()"""
-		# pass in output=self.output.einfo
-		self.target_class.output = self.output.einfo
-		run_callbacks = []
-		run_results = []
-		print()
-		# run the tests
-		for i in range(4):
-			clean_me = {}
-			test = self.get_test(i)
-			#print("test =", test['test'])
-			if not test:
-				print("Error getting test data for index:", i)
-			#self.target_class.set_data(self.set_limits(test))
-			size_chk, time_chk, exclude = test["params"]
-			checks = self.target_class._get_default_checks(size_chk, time_chk, exclude, False)
-			clean_me = self.target_class._check_limits(self.workdir, checks, clean_me)
-			results = sorted(clean_me)
-			run_results.append(results)
-			self.callback_data.sort()
-			run_callbacks.append(self.callback_data)
-			self.callback_data = []
-			results = None
-
-		# check results
-		for i in range(4):
-			test = self.get_test(i)
-			print("test =", test['test'])
-			if not test:
-				print("Error getting test data for index:", i)
-			test['results'].sort()
-			#print("actual=", run_results[i])
-			#print("should-be=", test['results'])
-			self.assertEqual(run_results[i], test["results"],
-				"/ntest_check_limits, test# %d, test=%s, diff=%s"
-				%(i, test['test'], str(set(run_results[i]).difference(test['results'])))
-			)
-			test['output'].sort()
-			self.assertEqual(run_callbacks[i], test['output'])
+    """Test the eclean.search.DistfilesSearch._check_limits() group.
+
+    it will test [ _get_default_checks(), _check_limits(),
+    _isreg_check_(), _size_check_(), _time_check_(), _filenames_check_()]
+    """
+
+    test_excludes = {
+        "blank": {},
+        "filenames": {
+            "filenames": {
+                "help2man-1.37.1.tar.gz": re.compile(r"help2man-1\.37\.1\.tar\.gz")
+            }
+        },
+    }
+
+    def setUp(self):
+        self.testdata = [
+            # test is_reg_limit alone, will give a base clean_me
+            {
+                "test": "is_reg_limit",
+                "params": (0, 0, self.test_excludes["blank"]),
+                "results": FILES[:],
+                "output": [
+                    "   - skipping size limit check",
+                    "   - skipping time limit check",
+                    "   - skipping exclude filenames check",
+                ],
+            },
+            # test size_limit trip
+            {
+                "test": "size_limit",
+                "params": (1024000, 0, self.test_excludes["blank"]),
+                "results": FILES[:3] + FILES[4:],
+                "output": [
+                    "   - skipping time limit check",
+                    "   - skipping exclude filenames check",
+                ],
+            },
+            # test time_limit trip
+            {
+                "test": "time_limit",
+                "params": (0, 1112671872, self.test_excludes["blank"]),
+                "results": [FILES[4]],  # + FILES[5:],
+                "output": [
+                    "   - skipping size limit check",
+                    "   - skipping exclude filenames check",
+                ],
+            },
+            # test filenames_limit trip
+            {
+                "test": "filenames_limit",
+                "params": (0, 0, self.test_excludes["filenames"]),
+                "results": FILES[:1] + FILES[2:],
+                "output": [
+                    "   - skipping size limit check",
+                    "   - skipping time limit check",
+                ],
+            },
+        ]
+
+        self.testwork = TestDisfiles()
+        self.testwork.setUp()
+        self.workdir = self.testwork.workdir
+        self.target_class = DistLimits()  # DistCheckLimits()
+        self.output = OutputSimulator(self.callback)
+        self.target_class.output = self.output
+        self.callback_data = []
+        self.test_index = 0
+
+    def tearDown(self):
+        self.testwork.tearDown()
+        # pass
+
+    def get_test(self, num):
+        return self.testdata[num]
+
+    def callback(self, id, data):
+        self.callback_data.append(data)
+
+    def set_limits(self, test):
+        limit = {}
+        # set is_reg always to testdata[0]
+        t1 = self.testdata[0]
+        limit[t1["test"]] = {}
+        name = test["test"]
+        limit[name] = {}
+        limits = test["limits"]
+        for i in range(6):
+            file = self.testwork.files[i]
+            limits = test["limits"]
+            limit[t1["test"]][file] = t1["limits"][i]
+            if name != t1["test"]:
+                limit[name][file] = limits[i]
+        return limit
+
+    def test_check_limits(self):
+        """Testing DistfilesSearch._check_limits()"""
+        # pass in output=self.output.einfo
+        self.target_class.output = self.output.einfo
+        run_callbacks = []
+        run_results = []
+        print()
+        # run the tests
+        for i in range(4):
+            clean_me = {}
+            test = self.get_test(i)
+            # print("test =", test['test'])
+            if not test:
+                print("Error getting test data for index:", i)
+            # self.target_class.set_data(self.set_limits(test))
+            size_chk, time_chk, exclude = test["params"]
+            checks = self.target_class._get_default_checks(
+                size_chk, time_chk, exclude, False
+            )
+            clean_me = self.target_class._check_limits(self.workdir, checks, clean_me)
+            results = sorted(clean_me)
+            run_results.append(results)
+            self.callback_data.sort()
+            run_callbacks.append(self.callback_data)
+            self.callback_data = []
+            results = None
+
+        # check results
+        for i in range(4):
+            test = self.get_test(i)
+            print("test =", test["test"])
+            if not test:
+                print("Error getting test data for index:", i)
+            test["results"].sort()
+            # print("actual=", run_results[i])
+            # print("should-be=", test['results'])
+            self.assertEqual(
+                run_results[i],
+                test["results"],
+                "/ntest_check_limits, test# %d, test=%s, diff=%s"
+                % (
+                    i,
+                    test["test"],
+                    str(set(run_results[i]).difference(test["results"])),
+                ),
+            )
+            test["output"].sort()
+            self.assertEqual(run_callbacks[i], test["output"])
 
 
 class TestFetchRestricted(unittest.TestCase):
-	"""Tests eclean.search.DistfilesSearch._fetch_restricted and _unrestricted
-	functions
-	"""
-
-	def setUp(self):
-		self.vardb = Dbapi(cp_all=[], cpv_all=CPVS,
-			props=PROPS, cp_list=[], name="FAKE VARDB")
-		self.portdb = Dbapi(cp_all=[], cpv_all=CPVS[:4],
-			props=get_props(CPVS[:4]), cp_list=[], name="FAKE PORTDB")
-		# set a fetch restricted pkg
-		self.portdb._props[CPVS[0]]["RESTRICT"] = 'fetch'
-		self.callback_data = []
-		self.output = self.output = OutputSimulator(self.callback)
-		self.target_class = DistfilesSearch(self.output.einfo, self.portdb, self.vardb)
-		self.target_class.portdb = self.portdb
-		self.target_class.portdb = self.portdb
-		self.results = {}
-		self.testdata = {
-			'fetch_restricted1':{
-					'deprecated':
-						{'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs':
-						{'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2'
-						},
-					'output': [
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n'
-						]
-					},
-			'fetch_restricted2':{
-					'deprecated':
-						{'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs':
-						{'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2'
-						},
-					'output': [
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
-						'   - Key Error looking up: app-portage/deprecated-pkg-1.0.0'
-						]
-					},
-			'unrestricted1':{
-					'deprecated':{
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs': {
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'sys-apps/help2man-1.37.1': 'mirror://gnu/help2man/help2man-1.37.1.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
-						]
-					},
-			'unrestricted2':{
-					'deprecated':{
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs': {
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'sys-apps/help2man-1.37.1': 'mirror://gnu/help2man/help2man-1.37.1.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
-						'   - Key Error looking up: app-portage/deprecated-pkg-1.0.0'
-						]
-					}
-			}
-
-
-	def callback(self, id, data):
-		self.callback_data.append(data)
-
-
-	def test__fetch_restricted(self):
-		self.results = {}
-		pkgs, deprecated = self.target_class._fetch_restricted(None, CPVS)
-		self.record_results('fetch_restricted1', pkgs, deprecated)
-
-		self.callback_data = []
-		cpvs = CPVS[:]
-		cpvs.append('app-portage/deprecated-pkg-1.0.0')
-		pkgs, deprecated = self.target_class._fetch_restricted(None, cpvs)
-		self.record_results('fetch_restricted2', pkgs, deprecated)
-		self.check_results("test_fetch_restricted")
-
-
-	def test_unrestricted(self):
-		self.results = {}
-		pkgs, deprecated = self.target_class._unrestricted(None, CPVS)
-		self.record_results('unrestricted1', pkgs, deprecated)
-		self.callback_data = []
-		cpvs = CPVS[:]
-		cpvs.append('app-portage/deprecated-pkg-1.0.0')
-		pkgs, deprecated = self.target_class._unrestricted(None, cpvs)
-		self.record_results('unrestricted2', pkgs, deprecated)
-		self.check_results("test_unrestricted")
-
-
-	def check_results(self, test_name):
-		print("\nChecking results for %s,............" %test_name)
-		for key in sorted(self.results):
-			testdata = self.testdata[key]
-			results = self.results[key]
-			for item in sorted(testdata):
-				if sorted(results[item]) == sorted(testdata[item]):
-					test = "OK"
-				else:
-					test = "FAILED"
-				print("comparing %s, %s" %(key, item), test)
-				self.assertEqual(sorted(testdata[item]), sorted(results[item]),
-					"\n%s: %s %s data does not match\nresult=" %(test_name, key, item) +\
-					str(results[item]) + "\ntestdata=" + str(testdata[item]))
-
-
-	def record_results(self, test, pkgs, deprecated):
-		self.results[test] = {'pkgs': pkgs,
-				'deprecated': deprecated,
-				'output': self.callback_data
-				}
-
-
-	def tearDown(self):
-		del self.portdb, self.vardb
+    """Tests eclean.search.DistfilesSearch._fetch_restricted and _unrestricted
+    functions
+    """
+
+    def setUp(self):
+        self.vardb = Dbapi(
+            cp_all=[], cpv_all=CPVS, props=PROPS, cp_list=[], name="FAKE VARDB"
+        )
+        self.portdb = Dbapi(
+            cp_all=[],
+            cpv_all=CPVS[:4],
+            props=get_props(CPVS[:4]),
+            cp_list=[],
+            name="FAKE PORTDB",
+        )
+        # set a fetch restricted pkg
+        self.portdb._props[CPVS[0]]["RESTRICT"] = "fetch"
+        self.callback_data = []
+        self.output = self.output = OutputSimulator(self.callback)
+        self.target_class = DistfilesSearch(self.output.einfo, self.portdb, self.vardb)
+        self.target_class.portdb = self.portdb
+        self.target_class.portdb = self.portdb
+        self.results = {}
+        self.testdata = {
+            "fetch_restricted1": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2"
+                },
+                "output": [
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n'
+                ],
+            },
+            "fetch_restricted2": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2"
+                },
+                "output": [
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
+                    "   - Key Error looking up: app-portage/deprecated-pkg-1.0.0",
+                ],
+            },
+            "unrestricted1": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "sys-apps/help2man-1.37.1": "mirror://gnu/help2man/help2man-1.37.1.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
+                ],
+            },
+            "unrestricted2": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "sys-apps/help2man-1.37.1": "mirror://gnu/help2man/help2man-1.37.1.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
+                    "   - Key Error looking up: app-portage/deprecated-pkg-1.0.0",
+                ],
+            },
+        }
+
+    def callback(self, id, data):
+        self.callback_data.append(data)
+
+    def test__fetch_restricted(self):
+        self.results = {}
+        pkgs, deprecated = self.target_class._fetch_restricted(None, CPVS)
+        self.record_results("fetch_restricted1", pkgs, deprecated)
+
+        self.callback_data = []
+        cpvs = CPVS[:]
+        cpvs.append("app-portage/deprecated-pkg-1.0.0")
+        pkgs, deprecated = self.target_class._fetch_restricted(None, cpvs)
+        self.record_results("fetch_restricted2", pkgs, deprecated)
+        self.check_results("test_fetch_restricted")
+
+    def test_unrestricted(self):
+        self.results = {}
+        pkgs, deprecated = self.target_class._unrestricted(None, CPVS)
+        self.record_results("unrestricted1", pkgs, deprecated)
+        self.callback_data = []
+        cpvs = CPVS[:]
+        cpvs.append("app-portage/deprecated-pkg-1.0.0")
+        pkgs, deprecated = self.target_class._unrestricted(None, cpvs)
+        self.record_results("unrestricted2", pkgs, deprecated)
+        self.check_results("test_unrestricted")
+
+    def check_results(self, test_name):
+        print("\nChecking results for %s,............" % test_name)
+        for key in sorted(self.results):
+            testdata = self.testdata[key]
+            results = self.results[key]
+            for item in sorted(testdata):
+                if sorted(results[item]) == sorted(testdata[item]):
+                    test = "OK"
+                else:
+                    test = "FAILED"
+                print("comparing %s, %s" % (key, item), test)
+                self.assertEqual(
+                    sorted(testdata[item]),
+                    sorted(results[item]),
+                    "\n%s: %s %s data does not match\nresult=" % (test_name, key, item)
+                    + str(results[item])
+                    + "\ntestdata="
+                    + str(testdata[item]),
+                )
+
+    def record_results(self, test, pkgs, deprecated):
+        self.results[test] = {
+            "pkgs": pkgs,
+            "deprecated": deprecated,
+            "output": self.callback_data,
+        }
+
+    def tearDown(self):
+        del self.portdb, self.vardb
 
 
 class TestNonDestructive(unittest.TestCase):
-	"""Tests eclean.search.DistfilesSearch._non_destructive and _destructive
-	functions, with addition useage tests of fetch_restricted() and _unrestricted()
-	"""
-
-	def setUp(self):
-		self.vardb = Dbapi(cp_all=[], cpv_all=CPVS,
-			props=PROPS, cp_list=[], name="FAKE VARDB")
-		self.portdb = Dbapi(cp_all=[], cpv_all=CPVS[:4],
-			props=get_props(CPVS[:4]), cp_list=[], name="FAKE PORTDB")
-		print(self.portdb)
-		# set a fetch restricted pkg
-		self.portdb._props[CPVS[0]]["RESTRICT"] = 'fetch'
-		self.callback_data = []
-		self.output = OutputSimulator(self.callback)
-		self.target_class = DistfilesSearch(self.output.einfo, self.portdb, self.vardb)
-		search.exclDictExpand = self.exclDictExpand
-		self.exclude = parseExcludeFile(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'distfiles.exclude'), self.output.einfo)
-		#print(self.callback_data)
-		#print(self.exclude)
-		self.callback_data = []
-		self.results = {}
-		self.testdata = {
-			'non_destructive1':{
-					'deprecated':
-						{'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs': {
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'sys-apps/help2man-1.37.1': 'mirror://gnu/help2man/help2man-1.37.1.tar.gz',
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'   - getting complete ebuild list',
-						'   - getting source file names for 5 ebuilds',
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n'
-						]
-					},
-			'non_destructive2':{
-					'deprecated': {
-						},
-					'pkgs': {
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'   - getting complete ebuild list',
-						'   - getting source file names for 3 installed ebuilds',
-						'   - getting fetch-restricted source file names for 2 remaining ebuilds'
-						]
-					},
-			'non_destructive3':{
-					'deprecated':{
-						},
-					'pkgs': {
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-						},
-					'output': [
-						'   - getting complete ebuild list',
-						'   - getting source file names for 2 installed ebuilds',
-						'   - getting fetch-restricted source file names for 3 remaining ebuilds'
-						]
-					},
-			'destructive1':{
-					'deprecated':{
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz'
-						},
-					'pkgs': {
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'sys-apps/help2man-1.37.1': 'mirror://gnu/help2man/help2man-1.37.1.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'app-emulation/emul-linux-x86-baselibs-20100220': 'mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'   - processing 5 installed ebuilds', '   - processing excluded',
-						'   - (5 of 0 total) additional excluded packages to get source filenames for',
-						'!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n'
-						]
-					},
-			'destructive2':{
-					'deprecated':{
-						},
-					'pkgs': {
-						},
-					'output': [
-						'   - processing 0 installed packages',
-						'   - processing excluded', '   - (0 of 0 total) additional excluded packages to get source filenames for'
-						]
-					},
-			'destructive3':{
-					'deprecated':{
-						},
-					'pkgs': {
-						'app-portage/gentoolkit-0.3.0_rc8-r1': 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'app-portage/gentoolkit-0.3.0_rc8': 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-						'app-portage/gentoolkit-0.2.4.6-r1': 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-						'app-portage/gentoolkit-0.3.0_rc7': 'mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz',
-						'app-portage/gentoolkit-0.2.4.6': 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-						'app-portage/eix-0.19.2': 'mirror://sourceforge/eix/eix-0.19.2.tar.xz',
-						'app-portage/gentoolkit-0.2.4.5': 'mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz',
-						'app-portage/gentoolkit-0.3.0_rc9': 'mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz',
-						'app-portage/eix-0.20.1': 'mirror://sourceforge/eix/eix-0.20.1.tar.xz',
-						'app-portage/eix-0.20.2': 'mirror://berlios/eix/eix-0.20.2.tar.xz'
-						},
-					'output': [
-						'   - processing excluded',
-						'   - (10 of 10 total) additional excluded packages to get source filenames for'
-						]
-					},
-			'destructive4':{
-					'deprecated':{
-						},
-					'pkgs': {
-						'sys-auth/consolekit-0.4.1':
-							'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'sys-apps/devicekit-power-014':
-							'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'media-libs/sdl-pango-0.1.2':
-							'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch'
-						},
-					'output': [
-						'   - processing 3 installed ebuilds',
-						'   - processing excluded',
-						'   - (3 of 0 total) additional excluded packages to get source filenames for'
-						]
-					},
-			'destructive5':{
-					'deprecated':{
-						},
-					'pkgs': {
-						'x11-base/xorg-server-1.7.5': 'http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.tar.bz2',
-						'app-portage/gentoolkit-0.3.0_rc8-r1': 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-						'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz',
-						'x11-misc/util-macros-1.6.0': 'http://xorg.freedesktop.org/releases/individual/util/util-macros-1.6.0.tar.bz2',
-						'app-portage/eix-0.19.2': 'mirror://sourceforge/eix/eix-0.19.2.tar.xz',
-						'app-portage/gentoolkit-0.3.0_rc8': 'mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz',
-						'app-portage/gentoolkit-0.2.4.6-r1': 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-						'app-portage/gentoolkit-0.3.0_rc7': 'mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz',
-						'sys-auth/consolekit-0.4.1': 'http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2',
-						'app-portage/gentoolkit-0.2.4.6': 'mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz',
-						'media-libs/sdl-pango-0.1.2': 'mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch',
-						'x11-libs/pixman-0.16.4': 'http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.4.tar.bz2',
-						'app-portage/gentoolkit-0.2.4.5': 'mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz',
-						'app-portage/gentoolkit-0.3.0_rc9': 'mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz',
-						'app-portage/eix-0.20.1': 'mirror://sourceforge/eix/eix-0.20.1.tar.xz',
-						'app-portage/eix-0.20.2': 'mirror://berlios/eix/eix-0.20.2.tar.xz'
-						},
-					'output': [
-						'   - processing 6 installed ebuilds',
-						'   - processing excluded',
-						'   - (16 of 10 total) additional excluded packages to get source filenames for'
-						]
-					}
-			}
-
-
-	def callback(self, id, data):
-		self.callback_data.append(data)
-
-
-	def exclDictExpand(self, exclude):
-		#print("Using Fake Testing exclDictExpand()")
-		return [
-			#'app-portage/layman',
-			'app-portage/eix',
-			'app-portage/gentoolkit',
-			#app-portage/portage-utils',
-			]
-
-
-	def test_non_destructive(self):
-		self.results = {}
-		pkgs, deprecated = self.target_class._non_destructive(destructive=False,
-			fetch_restricted=False, pkgs_=None)
-		self.record_results('non_destructive1', pkgs, deprecated)
-
-		pkgs = None
-		deprecated = None
-		self.callback_data = []
-		self.vardb._cpv_all=CPVS[:3]
-		self.vardb._props=get_props(CPVS[:3])
-		self.portdb._cpv_all=CPVS[:]
-		self.portdb._props=get_props(CPVS)
-		self.target_class.installed_cpvs = None
-		pkgs, deprecated = self.target_class._non_destructive(destructive=True,
-			fetch_restricted=True, pkgs_=None)
-		self.record_results('non_destructive2', pkgs, deprecated)
-
-		pkgs = None
-		deprecated = None
-		self.callback_data = []
-		self.vardb._cpv_all=CPVS[:2]
-		self.vardb._props=get_props(CPVS[:2])
-		self.portdb._cpv_all=CPVS[:]
-		self.portdb._props=get_props(CPVS)
-		# set a fetch restricted pkg
-		self.portdb._props[CPVS[4]]["RESTRICT"] = 'fetch'
-		pkgs = {'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz'}
-		pkgs, deprecated = self.target_class._non_destructive(destructive=True,
-			fetch_restricted=True, pkgs_=pkgs)
-		self.record_results('non_destructive3', pkgs, deprecated)
-		self.check_results("test_non_destructive")
-
-
-	def check_results(self, test_name):
-		print("\nChecking results for %s,............" %test_name)
-		for key in sorted(self.results):
-			testdata = self.testdata[key]
-			results = self.results[key]
-			for item in sorted(testdata):
-				if sorted(results[item]) == sorted(testdata[item]):
-					test = "OK"
-				else:
-					test = "FAILED"
-				print("comparing %s, %s..." %(key, item), test)
-				if test == "FAILED":
-					print("", sorted(results[item]), "\n",  sorted(testdata[item]))
-				self.assertEqual(sorted(testdata[item]), sorted(results[item]),
-					"\n%s: %s, %s data does not match\n"
-					%(test_name, key, item) + \
-					"result=" + str(results[item]) + "\ntestdata=" + str(testdata[item])
-				)
-
-
-	def record_results(self, test, pkgs, deprecated):
-		self.results[test] = {'pkgs': pkgs,
-				'deprecated': deprecated,
-				'output': self.callback_data
-				}
-
-	def test_destructive(self):
-		self.results = {}
-		pkgs, deprecated = self.target_class._destructive(package_names=False,
-			exclude={}, pkgs_=None, installed_included=False )
-		self.record_results('destructive1', pkgs, deprecated)
-
-		self.callback_data = []
-		self.vardb._cpv_all=CPVS[:3]
-		self.vardb._props=get_props(CPVS[:3])
-		self.portdb._cpv_all=CPVS[:]
-		self.portdb._props=get_props(CPVS)
-		pkgs, deprecated = self.target_class._destructive(package_names=True,
-			exclude={}, pkgs_=None, installed_included=False )
-		self.record_results('destructive2', pkgs, deprecated)
-
-		self.callback_data = []
-		cpvs = CPVS[2:4]
-		cpvs.extend(CPVS3)
-		self.vardb._cpv_all=sorted(cpvs)
-		self.vardb._props= PROPS.update(get_props(CPVS3))
-		self.portdb._cpv_all=sorted(CPVS + CPVS2)
-		self.portdb._props=get_props(CPVS+CPVS2)
-		# set a fetch restricted pkg
-		self.portdb._props[CPVS[4]]["RESTRICT"] = 'fetch'
-		pkgs = {'sys-apps/devicekit-power-014': 'http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz'}
-		pkgs, deprecated = self.target_class._destructive(package_names=True,
-			exclude={}, pkgs_=pkgs, installed_included=True )
-		self.record_results('destructive3', pkgs, deprecated)
-
-		self.callback_data = []
-		self.vardb._cpv_all=CPVS[:3]
-		self.vardb._props=get_props(CPVS[:3])
-		self.portdb._cpv_all=CPVS[:]
-		self.portdb._props=get_props(CPVS)
-		pkgs, deprecated = self.target_class._destructive(package_names=False,
-			exclude=self.exclude, pkgs_=None, installed_included=False )
-		self.record_results('destructive4', pkgs, deprecated)
-		self.check_results("test_destructive")
-
-		self.callback_data = []
-		self.vardb._cpv_all=CPVS[:3]
-		self.vardb._cpv_all.extend(CPVS3)
-		self.vardb._props=get_props(self.vardb._cpv_all)
-		self.portdb._cpv_all=CPVS2
-		#self.portdb._cpv_all.extend(CPVS2)
-		self.portdb._props=PROPS
-		pkgs, deprecated = self.target_class._destructive(package_names=False,
-			exclude=self.exclude, pkgs_=None, installed_included=False )
-		self.record_results('destructive5', pkgs, deprecated)
-		self.check_results("test_destructive")
-
-
-	def tearDown(self):
-		del self.portdb, self.vardb
+    """Tests eclean.search.DistfilesSearch._non_destructive and _destructive
+    functions, with addition useage tests of fetch_restricted() and _unrestricted()
+    """
+
+    def setUp(self):
+        self.vardb = Dbapi(
+            cp_all=[], cpv_all=CPVS, props=PROPS, cp_list=[], name="FAKE VARDB"
+        )
+        self.portdb = Dbapi(
+            cp_all=[],
+            cpv_all=CPVS[:4],
+            props=get_props(CPVS[:4]),
+            cp_list=[],
+            name="FAKE PORTDB",
+        )
+        print(self.portdb)
+        # set a fetch restricted pkg
+        self.portdb._props[CPVS[0]]["RESTRICT"] = "fetch"
+        self.callback_data = []
+        self.output = OutputSimulator(self.callback)
+        self.target_class = DistfilesSearch(self.output.einfo, self.portdb, self.vardb)
+        search.exclDictExpand = self.exclDictExpand
+        self.exclude = parseExcludeFile(
+            os.path.join(
+                os.path.dirname(os.path.abspath(__file__)), "distfiles.exclude"
+            ),
+            self.output.einfo,
+        )
+        # print(self.callback_data)
+        # print(self.exclude)
+        self.callback_data = []
+        self.results = {}
+        self.testdata = {
+            "non_destructive1": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "sys-apps/help2man-1.37.1": "mirror://gnu/help2man/help2man-1.37.1.tar.gz",
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    "   - getting complete ebuild list",
+                    "   - getting source file names for 5 ebuilds",
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
+                ],
+            },
+            "non_destructive2": {
+                "deprecated": {},
+                "pkgs": {
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    "   - getting complete ebuild list",
+                    "   - getting source file names for 3 installed ebuilds",
+                    "   - getting fetch-restricted source file names for 2 remaining ebuilds",
+                ],
+            },
+            "non_destructive3": {
+                "deprecated": {},
+                "pkgs": {
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+                },
+                "output": [
+                    "   - getting complete ebuild list",
+                    "   - getting source file names for 2 installed ebuilds",
+                    "   - getting fetch-restricted source file names for 3 remaining ebuilds",
+                ],
+            },
+            "destructive1": {
+                "deprecated": {
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz"
+                },
+                "pkgs": {
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "sys-apps/help2man-1.37.1": "mirror://gnu/help2man/help2man-1.37.1.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "app-emulation/emul-linux-x86-baselibs-20100220": "mirror://gentoo/emul-linux-x86-baselibs-20100220.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    "   - processing 5 installed ebuilds",
+                    "   - processing excluded",
+                    "   - (5 of 0 total) additional excluded packages to get source filenames for",
+                    '!!! "Deprecation Warning: Installed package: app-emulation/emul-linux-x86-baselibs-20100220\n\tIs no longer in the tree or an installed overlay\n',
+                ],
+            },
+            "destructive2": {
+                "deprecated": {},
+                "pkgs": {},
+                "output": [
+                    "   - processing 0 installed packages",
+                    "   - processing excluded",
+                    "   - (0 of 0 total) additional excluded packages to get source filenames for",
+                ],
+            },
+            "destructive3": {
+                "deprecated": {},
+                "pkgs": {
+                    "app-portage/gentoolkit-0.3.0_rc8-r1": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "app-portage/gentoolkit-0.3.0_rc8": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+                    "app-portage/gentoolkit-0.2.4.6-r1": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+                    "app-portage/gentoolkit-0.3.0_rc7": "mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz",
+                    "app-portage/gentoolkit-0.2.4.6": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+                    "app-portage/eix-0.19.2": "mirror://sourceforge/eix/eix-0.19.2.tar.xz",
+                    "app-portage/gentoolkit-0.2.4.5": "mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz",
+                    "app-portage/gentoolkit-0.3.0_rc9": "mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz",
+                    "app-portage/eix-0.20.1": "mirror://sourceforge/eix/eix-0.20.1.tar.xz",
+                    "app-portage/eix-0.20.2": "mirror://berlios/eix/eix-0.20.2.tar.xz",
+                },
+                "output": [
+                    "   - processing excluded",
+                    "   - (10 of 10 total) additional excluded packages to get source filenames for",
+                ],
+            },
+            "destructive4": {
+                "deprecated": {},
+                "pkgs": {
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                },
+                "output": [
+                    "   - processing 3 installed ebuilds",
+                    "   - processing excluded",
+                    "   - (3 of 0 total) additional excluded packages to get source filenames for",
+                ],
+            },
+            "destructive5": {
+                "deprecated": {},
+                "pkgs": {
+                    "x11-base/xorg-server-1.7.5": "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.7.5.tar.bz2",
+                    "app-portage/gentoolkit-0.3.0_rc8-r1": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+                    "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz",
+                    "x11-misc/util-macros-1.6.0": "http://xorg.freedesktop.org/releases/individual/util/util-macros-1.6.0.tar.bz2",
+                    "app-portage/eix-0.19.2": "mirror://sourceforge/eix/eix-0.19.2.tar.xz",
+                    "app-portage/gentoolkit-0.3.0_rc8": "mirror://gentoo/gentoolkit-0.3.0_rc8.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc8.tar.gz",
+                    "app-portage/gentoolkit-0.2.4.6-r1": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+                    "app-portage/gentoolkit-0.3.0_rc7": "mirror://gentoo/gentoolkit-0.3.0_rc7.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc7.tar.gz",
+                    "sys-auth/consolekit-0.4.1": "http://www.freedesktop.org/software/ConsoleKit/dist/ConsoleKit-0.4.1.tar.bz2",
+                    "app-portage/gentoolkit-0.2.4.6": "mirror://gentoo/gentoolkit-0.2.4.6.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.6.tar.gz",
+                    "media-libs/sdl-pango-0.1.2": "mirror://sourceforge/sdlpango/SDL_Pango-0.1.2.tar.gz http://zarb.org/~gc/t/SDL_Pango-0.1.2-API-adds.patch",
+                    "x11-libs/pixman-0.16.4": "http://xorg.freedesktop.org/releases/individual/lib/pixman-0.16.4.tar.bz2",
+                    "app-portage/gentoolkit-0.2.4.5": "mirror://gentoo/gentoolkit-0.2.4.5.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.2.4.5.tar.gz",
+                    "app-portage/gentoolkit-0.3.0_rc9": "mirror://gentoo/gentoolkit-0.3.0_rc9.tar.gz http://dev.gentoo.org/~fuzzyray/distfiles/gentoolkit-0.3.0_rc9.tar.gz",
+                    "app-portage/eix-0.20.1": "mirror://sourceforge/eix/eix-0.20.1.tar.xz",
+                    "app-portage/eix-0.20.2": "mirror://berlios/eix/eix-0.20.2.tar.xz",
+                },
+                "output": [
+                    "   - processing 6 installed ebuilds",
+                    "   - processing excluded",
+                    "   - (16 of 10 total) additional excluded packages to get source filenames for",
+                ],
+            },
+        }
+
+    def callback(self, id, data):
+        self.callback_data.append(data)
+
+    def exclDictExpand(self, exclude):
+        # print("Using Fake Testing exclDictExpand()")
+        return [
+            # 'app-portage/layman',
+            "app-portage/eix",
+            "app-portage/gentoolkit",
+            # app-portage/portage-utils',
+        ]
+
+    def test_non_destructive(self):
+        self.results = {}
+        pkgs, deprecated = self.target_class._non_destructive(
+            destructive=False, fetch_restricted=False, pkgs_=None
+        )
+        self.record_results("non_destructive1", pkgs, deprecated)
+
+        pkgs = None
+        deprecated = None
+        self.callback_data = []
+        self.vardb._cpv_all = CPVS[:3]
+        self.vardb._props = get_props(CPVS[:3])
+        self.portdb._cpv_all = CPVS[:]
+        self.portdb._props = get_props(CPVS)
+        self.target_class.installed_cpvs = None
+        pkgs, deprecated = self.target_class._non_destructive(
+            destructive=True, fetch_restricted=True, pkgs_=None
+        )
+        self.record_results("non_destructive2", pkgs, deprecated)
+
+        pkgs = None
+        deprecated = None
+        self.callback_data = []
+        self.vardb._cpv_all = CPVS[:2]
+        self.vardb._props = get_props(CPVS[:2])
+        self.portdb._cpv_all = CPVS[:]
+        self.portdb._props = get_props(CPVS)
+        # set a fetch restricted pkg
+        self.portdb._props[CPVS[4]]["RESTRICT"] = "fetch"
+        pkgs = {
+            "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz"
+        }
+        pkgs, deprecated = self.target_class._non_destructive(
+            destructive=True, fetch_restricted=True, pkgs_=pkgs
+        )
+        self.record_results("non_destructive3", pkgs, deprecated)
+        self.check_results("test_non_destructive")
+
+    def check_results(self, test_name):
+        print("\nChecking results for %s,............" % test_name)
+        for key in sorted(self.results):
+            testdata = self.testdata[key]
+            results = self.results[key]
+            for item in sorted(testdata):
+                if sorted(results[item]) == sorted(testdata[item]):
+                    test = "OK"
+                else:
+                    test = "FAILED"
+                print("comparing %s, %s..." % (key, item), test)
+                if test == "FAILED":
+                    print("", sorted(results[item]), "\n", sorted(testdata[item]))
+                self.assertEqual(
+                    sorted(testdata[item]),
+                    sorted(results[item]),
+                    "\n%s: %s, %s data does not match\n" % (test_name, key, item)
+                    + "result="
+                    + str(results[item])
+                    + "\ntestdata="
+                    + str(testdata[item]),
+                )
+
+    def record_results(self, test, pkgs, deprecated):
+        self.results[test] = {
+            "pkgs": pkgs,
+            "deprecated": deprecated,
+            "output": self.callback_data,
+        }
+
+    def test_destructive(self):
+        self.results = {}
+        pkgs, deprecated = self.target_class._destructive(
+            package_names=False, exclude={}, pkgs_=None, installed_included=False
+        )
+        self.record_results("destructive1", pkgs, deprecated)
+
+        self.callback_data = []
+        self.vardb._cpv_all = CPVS[:3]
+        self.vardb._props = get_props(CPVS[:3])
+        self.portdb._cpv_all = CPVS[:]
+        self.portdb._props = get_props(CPVS)
+        pkgs, deprecated = self.target_class._destructive(
+            package_names=True, exclude={}, pkgs_=None, installed_included=False
+        )
+        self.record_results("destructive2", pkgs, deprecated)
+
+        self.callback_data = []
+        cpvs = CPVS[2:4]
+        cpvs.extend(CPVS3)
+        self.vardb._cpv_all = sorted(cpvs)
+        self.vardb._props = PROPS.update(get_props(CPVS3))
+        self.portdb._cpv_all = sorted(CPVS + CPVS2)
+        self.portdb._props = get_props(CPVS + CPVS2)
+        # set a fetch restricted pkg
+        self.portdb._props[CPVS[4]]["RESTRICT"] = "fetch"
+        pkgs = {
+            "sys-apps/devicekit-power-014": "http://hal.freedesktop.org/releases/DeviceKit-power-014.tar.gz"
+        }
+        pkgs, deprecated = self.target_class._destructive(
+            package_names=True, exclude={}, pkgs_=pkgs, installed_included=True
+        )
+        self.record_results("destructive3", pkgs, deprecated)
+
+        self.callback_data = []
+        self.vardb._cpv_all = CPVS[:3]
+        self.vardb._props = get_props(CPVS[:3])
+        self.portdb._cpv_all = CPVS[:]
+        self.portdb._props = get_props(CPVS)
+        pkgs, deprecated = self.target_class._destructive(
+            package_names=False,
+            exclude=self.exclude,
+            pkgs_=None,
+            installed_included=False,
+        )
+        self.record_results("destructive4", pkgs, deprecated)
+        self.check_results("test_destructive")
+
+        self.callback_data = []
+        self.vardb._cpv_all = CPVS[:3]
+        self.vardb._cpv_all.extend(CPVS3)
+        self.vardb._props = get_props(self.vardb._cpv_all)
+        self.portdb._cpv_all = CPVS2
+        # self.portdb._cpv_all.extend(CPVS2)
+        self.portdb._props = PROPS
+        pkgs, deprecated = self.target_class._destructive(
+            package_names=False,
+            exclude=self.exclude,
+            pkgs_=None,
+            installed_included=False,
+        )
+        self.record_results("destructive5", pkgs, deprecated)
+        self.check_results("test_destructive")
+
+    def tearDown(self):
+        del self.portdb, self.vardb
 
 
 class TestRemoveProtected(unittest.TestCase):
-	"""tests the  eclean.search.DistfilesSearch._remove_protected()
-	"""
+    """tests the  eclean.search.DistfilesSearch._remove_protected()"""
+
+    def setUp(self):
+        self.target_class = DistfilesSearch(lambda x: None)
+        self.results = {
+            "layman-1.2.5.tar.gz": "/path/to/some/where/layman-1.2.5.tar.gz"
+        }
+
+    def test_remove_protected(self):
+        results = self.target_class._remove_protected(PKGS, CLEAN_ME)
+        self.assertEqual(
+            results,
+            self.results,
+            "\ntest_remove_protected: data does not match\nresult="
+            + str(results)
+            + "\ntestdata="
+            + str(self.results),
+        )
 
-	def setUp(self):
-		self.target_class = DistfilesSearch(lambda x: None)
-		self.results = {'layman-1.2.5.tar.gz': '/path/to/some/where/layman-1.2.5.tar.gz'}
 
-	def test_remove_protected(self):
-		results = self.target_class._remove_protected(PKGS, CLEAN_ME)
-		self.assertEqual(results, self.results,
-			"\ntest_remove_protected: data does not match\nresult=" +\
-			str(results) + "\ntestdata=" + str(self.results))
+def test_main():
+    suite = unittest.TestLoader()
+    suite.loadTestsFromTestCase(TestCheckLimits)
+    suite.loadTestsFromTestCase(TestFetchRestricted)
+    suite.loadTestsFromTestCase(TestNonDestructive)
+    suite.loadTestsFromTestCase(TestRemoveProtected)
+    unittest.TextTestRunner(verbosity=2).run(suite)
 
 
-def test_main():
-	suite = unittest.TestLoader()
-	suite.loadTestsFromTestCase(TestCheckLimits)
-	suite.loadTestsFromTestCase(TestFetchRestricted)
-	suite.loadTestsFromTestCase(TestNonDestructive)
-	suite.loadTestsFromTestCase(TestRemoveProtected)
-	unittest.TextTestRunner(verbosity=2).run(suite)
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/equery/test_init.py b/pym/gentoolkit/test/equery/test_init.py
index 4cad22e..c97c0fd 100644
--- a/pym/gentoolkit/test/equery/test_init.py
+++ b/pym/gentoolkit/test/equery/test_init.py
@@ -2,47 +2,47 @@ import unittest
 
 from gentoolkit import equery
 
-class TestEqueryInit(unittest.TestCase):
 
-	def setUp(self):
-		pass
-
-	def tearDown(self):
-		pass
-
-	def test_expand_module_name(self):
-		# Test that module names are properly expanded
-		name_map = {
-			'a': 'has',
-			'b': 'belongs',
-			'k': 'check',
-			'd': 'depends',
-			'g': 'depgraph',
-			'f': 'files',
-			'h': 'hasuse',
-			'y': 'keywords',
-			'l': 'list_',
-			'm': 'meta',
-			's': 'size',
-			'u': 'uses',
-			'w': 'which'
-		}
-		self.assertEqual(equery.NAME_MAP, name_map)
-		for short_name, long_name in zip(name_map, name_map.values()):
-			self.assertEqual(equery.expand_module_name(short_name),
-				long_name)
-			self.assertEqual(equery.expand_module_name(long_name),
-				long_name)
-		unused_keys = set(map(chr, range(0, 256))).difference(name_map.keys())
-		for key in unused_keys:
-			self.assertRaises(KeyError, equery.expand_module_name, key)
+class TestEqueryInit(unittest.TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_expand_module_name(self):
+        # Test that module names are properly expanded
+        name_map = {
+            "a": "has",
+            "b": "belongs",
+            "k": "check",
+            "d": "depends",
+            "g": "depgraph",
+            "f": "files",
+            "h": "hasuse",
+            "y": "keywords",
+            "l": "list_",
+            "m": "meta",
+            "s": "size",
+            "u": "uses",
+            "w": "which",
+        }
+        self.assertEqual(equery.NAME_MAP, name_map)
+        for short_name, long_name in zip(name_map, name_map.values()):
+            self.assertEqual(equery.expand_module_name(short_name), long_name)
+            self.assertEqual(equery.expand_module_name(long_name), long_name)
+        unused_keys = set(map(chr, range(0, 256))).difference(name_map.keys())
+        for key in unused_keys:
+            self.assertRaises(KeyError, equery.expand_module_name, key)
 
 
 def test_main():
-	suite = unittest.TestLoader().loadTestsFromTestCase(TestEqueryInit)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestEqueryInit)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_atom.py b/pym/gentoolkit/test/test_atom.py
index 6df52a9..f5a2a4b 100644
--- a/pym/gentoolkit/test/test_atom.py
+++ b/pym/gentoolkit/test/test_atom.py
@@ -10,132 +10,138 @@ from gentoolkit.cpv import CPV
 
 """Atom test suite (verbatim) from pkgcore."""
 
-class TestGentoolkitAtom(unittest.TestCase):
 
-	def assertEqual2(self, o1, o2):
-		# logic bugs hidden behind short circuiting comparisons for metadata
-		# is why we test the comparison *both* ways.
-		self.assertEqual(o1, o2)
-		self.assertEqual(o2, o1)
-
-	def assertNotEqual2(self, o1, o2):
-		# is why we test the comparison *both* ways.
-		self.assertNotEqual(o1, o2)
-		self.assertNotEqual(o2, o1)
-
-	def test_comparison(self):
-		self.assertEqual2(Atom('cat/pkg'), Atom('cat/pkg'))
-		self.assertNotEqual2(Atom('cat/pkg'), Atom('cat/pkgb'))
-		self.assertNotEqual2(Atom('cata/pkg'), Atom('cat/pkg'))
-		self.assertNotEqual2(Atom('cat/pkg'), Atom('!cat/pkg'))
-		self.assertEqual2(Atom('!cat/pkg'), Atom('!cat/pkg'))
-		self.assertNotEqual2(Atom('=cat/pkg-0.1:0'),
-			Atom('=cat/pkg-0.1'))
-		self.assertNotEqual2(Atom('=cat/pkg-1[foon]'),
-			Atom('=cat/pkg-1'))
-		self.assertEqual2(Atom('=cat/pkg-0'), Atom('=cat/pkg-0'))
-		self.assertNotEqual2(Atom('<cat/pkg-2'), Atom('>cat/pkg-2'))
-		self.assertNotEqual2(Atom('=cat/pkg-2*'), Atom('=cat/pkg-2'))
-		# Portage Atom doesn't have 'negate_version' capability
-		#self.assertNotEqual2(Atom('=cat/pkg-2', True), Atom('=cat/pkg-2'))
-
-		# use...
-		self.assertNotEqual2(Atom('cat/pkg[foo]'), Atom('cat/pkg'))
-		self.assertNotEqual2(Atom('cat/pkg[foo]'),
-							 Atom('cat/pkg[-foo]'))
-		self.assertEqual2(Atom('cat/pkg[foo,-bar]'),
-						  Atom('cat/pkg[-bar,foo]'))
-
-		# repoid not supported by Portage Atom yet
-		## repoid
-		#self.assertEqual2(Atom('cat/pkg::a'), Atom('cat/pkg::a'))
-		#self.assertNotEqual2(Atom('cat/pkg::a'), Atom('cat/pkg::b'))
-		#self.assertNotEqual2(Atom('cat/pkg::a'), Atom('cat/pkg'))
-
-		# slots.
-		self.assertNotEqual2(Atom('cat/pkg:1'), Atom('cat/pkg'))
-		self.assertEqual2(Atom('cat/pkg:2'), Atom('cat/pkg:2'))
-		# http://dev.gentoo.org/~tanderson/pms/eapi-2-approved/pms.html#x1-190002.1.2
-		self.assertEqual2(Atom('cat/pkg:AZaz09+_.-'), Atom('cat/pkg:AZaz09+_.-'))
-		for lesser, greater in (('0.1', '1'), ('1', '1-r1'), ('1.1', '1.2')):
-			self.assertTrue(Atom('=d/b-%s' % lesser) <
-				Atom('=d/b-%s' % greater),
-				msg="d/b-%s < d/b-%s" % (lesser, greater))
-			self.assertFalse(Atom('=d/b-%s' % lesser) >
-				Atom('=d/b-%s' % greater),
-				msg="!: d/b-%s < d/b-%s" % (lesser, greater))
-			self.assertTrue(Atom('=d/b-%s' % greater) >
-				Atom('=d/b-%s' % lesser),
-				msg="d/b-%s > d/b-%s" % (greater, lesser))
-			self.assertFalse(Atom('=d/b-%s' % greater) <
-				Atom('=d/b-%s' % lesser),
-				msg="!: d/b-%s > d/b-%s" % (greater, lesser))
-
-		#self.assertTrue(Atom("!!=d/b-1", eapi=2) > Atom("!=d/b-1"))
-		self.assertTrue(Atom("!=d/b-1") < Atom("!!=d/b-1"))
-		self.assertEqual(Atom("!=d/b-1"), Atom("!=d/b-1"))
-
-	def test_intersects(self):
-		for this, that, result in [
-			('cat/pkg', 'pkg/cat', False),
-			('cat/pkg', 'cat/pkg', True),
-			('cat/pkg:1', 'cat/pkg:1', True),
-			('cat/pkg:1', 'cat/pkg:2', False),
-			('cat/pkg:1', 'cat/pkg[foo]', True),
-			('cat/pkg[foo]', 'cat/pkg[-bar]', True),
-			('cat/pkg[foo]', 'cat/pkg[-foo]', False),
-			('>cat/pkg-3', '>cat/pkg-1', True),
-			('>cat/pkg-3', '<cat/pkg-3', False),
-			('>=cat/pkg-3', '<cat/pkg-3', False),
-			('>cat/pkg-2', '=cat/pkg-2*', True),
-			# Portage vercmp disagrees with this one:
-			#('<cat/pkg-2_alpha1', '=cat/pkg-2*', True),
-			('=cat/pkg-2', '=cat/pkg-2', True),
-			('=cat/pkg-3', '=cat/pkg-2', False),
-			('=cat/pkg-2', '>cat/pkg-2', False),
-			('=cat/pkg-2', '>=cat/pkg-2', True),
-			('~cat/pkg-2', '~cat/pkg-2', True),
-			('~cat/pkg-2', '~cat/pkg-2.1', False),
-			('=cat/pkg-2*', '=cat/pkg-2.3*', True),
-			('>cat/pkg-2.4', '=cat/pkg-2*', True),
-			('<cat/pkg-2.4', '=cat/pkg-2*', True),
-			('<cat/pkg-1', '=cat/pkg-2*', False),
-			('~cat/pkg-2', '>cat/pkg-2-r1', True),
-			('~cat/pkg-2', '<=cat/pkg-2', True),
-			('=cat/pkg-2-r2*', '<=cat/pkg-2-r20', True),
-			('=cat/pkg-2-r2*', '<cat/pkg-2-r20', True),
-			('=cat/pkg-2-r2*', '<=cat/pkg-2-r2', True),
-			('~cat/pkg-2', '<cat/pkg-2', False),
-			('=cat/pkg-1-r10*', '~cat/pkg-1', True),
-			('=cat/pkg-1-r1*', '<cat/pkg-1-r1', False),
-			('=cat/pkg-1*', '>cat/pkg-2', False),
-			('>=cat/pkg-8.4', '=cat/pkg-8.3.4*', False),
-			('cat/pkg::gentoo', 'cat/pkg', True),
-			('cat/pkg::gentoo', 'cat/pkg::foo', False),
-			('=sys-devel/gcc-4.1.1-r3', '=sys-devel/gcc-3.3*', False),
-			('=sys-libs/db-4*', '~sys-libs/db-4.3.29', True),
-		]:
-			this_atom = Atom(this)
-			that_atom = Atom(that)
-			self.assertEqual(
-				result, this_atom.intersects(that_atom),
-				'%s intersecting %s should be %s' % (this, that, result))
-			self.assertEqual(
-				result, that_atom.intersects(this_atom),
-				'%s intersecting %s should be %s' % (that, this, result))
-
-	def test_intersects_nameonly(self):
-		atom = Atom('cat/pkg')
-		self.assertTrue(atom.intersects(CPV('pkg')))
-		self.assertFalse(atom.intersects(CPV('other')))
-		self.assertFalse(atom.intersects(CPV('dkg')))
+class TestGentoolkitAtom(unittest.TestCase):
+    def assertEqual2(self, o1, o2):
+        # logic bugs hidden behind short circuiting comparisons for metadata
+        # is why we test the comparison *both* ways.
+        self.assertEqual(o1, o2)
+        self.assertEqual(o2, o1)
+
+    def assertNotEqual2(self, o1, o2):
+        # is why we test the comparison *both* ways.
+        self.assertNotEqual(o1, o2)
+        self.assertNotEqual(o2, o1)
+
+    def test_comparison(self):
+        self.assertEqual2(Atom("cat/pkg"), Atom("cat/pkg"))
+        self.assertNotEqual2(Atom("cat/pkg"), Atom("cat/pkgb"))
+        self.assertNotEqual2(Atom("cata/pkg"), Atom("cat/pkg"))
+        self.assertNotEqual2(Atom("cat/pkg"), Atom("!cat/pkg"))
+        self.assertEqual2(Atom("!cat/pkg"), Atom("!cat/pkg"))
+        self.assertNotEqual2(Atom("=cat/pkg-0.1:0"), Atom("=cat/pkg-0.1"))
+        self.assertNotEqual2(Atom("=cat/pkg-1[foon]"), Atom("=cat/pkg-1"))
+        self.assertEqual2(Atom("=cat/pkg-0"), Atom("=cat/pkg-0"))
+        self.assertNotEqual2(Atom("<cat/pkg-2"), Atom(">cat/pkg-2"))
+        self.assertNotEqual2(Atom("=cat/pkg-2*"), Atom("=cat/pkg-2"))
+        # Portage Atom doesn't have 'negate_version' capability
+        # self.assertNotEqual2(Atom('=cat/pkg-2', True), Atom('=cat/pkg-2'))
+
+        # use...
+        self.assertNotEqual2(Atom("cat/pkg[foo]"), Atom("cat/pkg"))
+        self.assertNotEqual2(Atom("cat/pkg[foo]"), Atom("cat/pkg[-foo]"))
+        self.assertEqual2(Atom("cat/pkg[foo,-bar]"), Atom("cat/pkg[-bar,foo]"))
+
+        # repoid not supported by Portage Atom yet
+        # repoid
+        # self.assertEqual2(Atom('cat/pkg::a'), Atom('cat/pkg::a'))
+        # self.assertNotEqual2(Atom('cat/pkg::a'), Atom('cat/pkg::b'))
+        # self.assertNotEqual2(Atom('cat/pkg::a'), Atom('cat/pkg'))
+
+        # slots.
+        self.assertNotEqual2(Atom("cat/pkg:1"), Atom("cat/pkg"))
+        self.assertEqual2(Atom("cat/pkg:2"), Atom("cat/pkg:2"))
+        # http://dev.gentoo.org/~tanderson/pms/eapi-2-approved/pms.html#x1-190002.1.2
+        self.assertEqual2(Atom("cat/pkg:AZaz09+_.-"), Atom("cat/pkg:AZaz09+_.-"))
+        for lesser, greater in (("0.1", "1"), ("1", "1-r1"), ("1.1", "1.2")):
+            self.assertTrue(
+                Atom("=d/b-%s" % lesser) < Atom("=d/b-%s" % greater),
+                msg="d/b-%s < d/b-%s" % (lesser, greater),
+            )
+            self.assertFalse(
+                Atom("=d/b-%s" % lesser) > Atom("=d/b-%s" % greater),
+                msg="!: d/b-%s < d/b-%s" % (lesser, greater),
+            )
+            self.assertTrue(
+                Atom("=d/b-%s" % greater) > Atom("=d/b-%s" % lesser),
+                msg="d/b-%s > d/b-%s" % (greater, lesser),
+            )
+            self.assertFalse(
+                Atom("=d/b-%s" % greater) < Atom("=d/b-%s" % lesser),
+                msg="!: d/b-%s > d/b-%s" % (greater, lesser),
+            )
+
+        # self.assertTrue(Atom("!!=d/b-1", eapi=2) > Atom("!=d/b-1"))
+        self.assertTrue(Atom("!=d/b-1") < Atom("!!=d/b-1"))
+        self.assertEqual(Atom("!=d/b-1"), Atom("!=d/b-1"))
+
+    def test_intersects(self):
+        for this, that, result in [
+            ("cat/pkg", "pkg/cat", False),
+            ("cat/pkg", "cat/pkg", True),
+            ("cat/pkg:1", "cat/pkg:1", True),
+            ("cat/pkg:1", "cat/pkg:2", False),
+            ("cat/pkg:1", "cat/pkg[foo]", True),
+            ("cat/pkg[foo]", "cat/pkg[-bar]", True),
+            ("cat/pkg[foo]", "cat/pkg[-foo]", False),
+            (">cat/pkg-3", ">cat/pkg-1", True),
+            (">cat/pkg-3", "<cat/pkg-3", False),
+            (">=cat/pkg-3", "<cat/pkg-3", False),
+            (">cat/pkg-2", "=cat/pkg-2*", True),
+            # Portage vercmp disagrees with this one:
+            # ('<cat/pkg-2_alpha1', '=cat/pkg-2*', True),
+            ("=cat/pkg-2", "=cat/pkg-2", True),
+            ("=cat/pkg-3", "=cat/pkg-2", False),
+            ("=cat/pkg-2", ">cat/pkg-2", False),
+            ("=cat/pkg-2", ">=cat/pkg-2", True),
+            ("~cat/pkg-2", "~cat/pkg-2", True),
+            ("~cat/pkg-2", "~cat/pkg-2.1", False),
+            ("=cat/pkg-2*", "=cat/pkg-2.3*", True),
+            (">cat/pkg-2.4", "=cat/pkg-2*", True),
+            ("<cat/pkg-2.4", "=cat/pkg-2*", True),
+            ("<cat/pkg-1", "=cat/pkg-2*", False),
+            ("~cat/pkg-2", ">cat/pkg-2-r1", True),
+            ("~cat/pkg-2", "<=cat/pkg-2", True),
+            ("=cat/pkg-2-r2*", "<=cat/pkg-2-r20", True),
+            ("=cat/pkg-2-r2*", "<cat/pkg-2-r20", True),
+            ("=cat/pkg-2-r2*", "<=cat/pkg-2-r2", True),
+            ("~cat/pkg-2", "<cat/pkg-2", False),
+            ("=cat/pkg-1-r10*", "~cat/pkg-1", True),
+            ("=cat/pkg-1-r1*", "<cat/pkg-1-r1", False),
+            ("=cat/pkg-1*", ">cat/pkg-2", False),
+            (">=cat/pkg-8.4", "=cat/pkg-8.3.4*", False),
+            ("cat/pkg::gentoo", "cat/pkg", True),
+            ("cat/pkg::gentoo", "cat/pkg::foo", False),
+            ("=sys-devel/gcc-4.1.1-r3", "=sys-devel/gcc-3.3*", False),
+            ("=sys-libs/db-4*", "~sys-libs/db-4.3.29", True),
+        ]:
+            this_atom = Atom(this)
+            that_atom = Atom(that)
+            self.assertEqual(
+                result,
+                this_atom.intersects(that_atom),
+                "%s intersecting %s should be %s" % (this, that, result),
+            )
+            self.assertEqual(
+                result,
+                that_atom.intersects(this_atom),
+                "%s intersecting %s should be %s" % (that, this, result),
+            )
+
+    def test_intersects_nameonly(self):
+        atom = Atom("cat/pkg")
+        self.assertTrue(atom.intersects(CPV("pkg")))
+        self.assertFalse(atom.intersects(CPV("other")))
+        self.assertFalse(atom.intersects(CPV("dkg")))
 
 
 def test_main():
-		suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitAtom)
-		unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitAtom)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_cpv.py b/pym/gentoolkit/test/test_cpv.py
index 92ffba5..c45e37a 100644
--- a/pym/gentoolkit/test/test_cpv.py
+++ b/pym/gentoolkit/test/test_cpv.py
@@ -8,117 +8,132 @@ import unittest
 
 from gentoolkit.cpv import CPV, compare_strs
 
-class TestGentoolkitCPV(unittest.TestCase):
 
-	def assertEqual2(self, o1, o2):
-		# logic bugs hidden behind short circuiting comparisons for metadata
-		# is why we test the comparison *both* ways.
-		self.assertEqual(o1, o2)
-		self.assertEqual(o2, o1)
+class TestGentoolkitCPV(unittest.TestCase):
+    def assertEqual2(self, o1, o2):
+        # logic bugs hidden behind short circuiting comparisons for metadata
+        # is why we test the comparison *both* ways.
+        self.assertEqual(o1, o2)
+        self.assertEqual(o2, o1)
 
-	def assertNotEqual2(self, o1, o2):
-		# is why we test the comparison *both* ways.
-		self.assertNotEqual(o1, o2)
-		self.assertNotEqual(o2, o1)
+    def assertNotEqual2(self, o1, o2):
+        # is why we test the comparison *both* ways.
+        self.assertNotEqual(o1, o2)
+        self.assertNotEqual(o2, o1)
 
-	def test_comparison(self):
-		self.assertEqual2(CPV('pkg'), CPV('pkg'))
-		self.assertNotEqual2(CPV('pkg'), CPV('pkg1'))
-		self.assertEqual2(CPV('cat/pkg'), CPV('cat/pkg'))
-		self.assertNotEqual2(CPV('cat/pkg'), CPV('cat/pkgb'))
-		self.assertNotEqual2(CPV('cata/pkg'), CPV('cat/pkg'))
-		self.assertEqual2(CPV('cat/pkg-0.1'), CPV('cat/pkg-0.1'))
-		self.assertNotEqual2(CPV('cat/pkg-1.0'), CPV('cat/pkg-1'))
-		self.assertEqual2(CPV('cat/pkg-0'), CPV('cat/pkg-0'))
-		self.assertEqual2(CPV('cat/pkg-1-r1'), CPV('cat/pkg-1-r1'))
-		self.assertNotEqual2(CPV('cat/pkg-2-r1'), CPV('cat/pkg-2-r10'))
-		self.assertEqual2(CPV('cat/pkg-1_rc2'), CPV('cat/pkg-1_rc2'))
-		self.assertNotEqual2(CPV('cat/pkg-2_rc2-r1'), CPV('cat/pkg-2_rc1-r1'))
+    def test_comparison(self):
+        self.assertEqual2(CPV("pkg"), CPV("pkg"))
+        self.assertNotEqual2(CPV("pkg"), CPV("pkg1"))
+        self.assertEqual2(CPV("cat/pkg"), CPV("cat/pkg"))
+        self.assertNotEqual2(CPV("cat/pkg"), CPV("cat/pkgb"))
+        self.assertNotEqual2(CPV("cata/pkg"), CPV("cat/pkg"))
+        self.assertEqual2(CPV("cat/pkg-0.1"), CPV("cat/pkg-0.1"))
+        self.assertNotEqual2(CPV("cat/pkg-1.0"), CPV("cat/pkg-1"))
+        self.assertEqual2(CPV("cat/pkg-0"), CPV("cat/pkg-0"))
+        self.assertEqual2(CPV("cat/pkg-1-r1"), CPV("cat/pkg-1-r1"))
+        self.assertNotEqual2(CPV("cat/pkg-2-r1"), CPV("cat/pkg-2-r10"))
+        self.assertEqual2(CPV("cat/pkg-1_rc2"), CPV("cat/pkg-1_rc2"))
+        self.assertNotEqual2(CPV("cat/pkg-2_rc2-r1"), CPV("cat/pkg-2_rc1-r1"))
 
-	def test_compare_strs(self):
-		# Test ordering of package strings, Portage has test for vercmp,
-		# so just do the rest
-		version_tests = [
-			# different categories
-			('sys-apps/portage-2.1.6.8', 'sys-auth/pambase-20080318'),
-			# different package names
-			('sys-apps/pkgcore-0.4.7.15-r1', 'sys-apps/portage-2.1.6.8'),
-			# different package versions
-			('sys-apps/portage-2.1.6.8', 'sys-apps/portage-2.2_rc25')
-		]
-		# Check less than
-		for vt in version_tests:
-			self.assertTrue(compare_strs(vt[0], vt[1]) == -1)
-		# Check greater than
-		for vt in version_tests:
-			self.assertTrue(compare_strs(vt[1], vt[0]) == 1)
-		# Check equal
-		vt = ('sys-auth/pambase-20080318', 'sys-auth/pambase-20080318')
-		self.assertTrue(compare_strs(vt[0], vt[1]) == 0)
+    def test_compare_strs(self):
+        # Test ordering of package strings, Portage has test for vercmp,
+        # so just do the rest
+        version_tests = [
+            # different categories
+            ("sys-apps/portage-2.1.6.8", "sys-auth/pambase-20080318"),
+            # different package names
+            ("sys-apps/pkgcore-0.4.7.15-r1", "sys-apps/portage-2.1.6.8"),
+            # different package versions
+            ("sys-apps/portage-2.1.6.8", "sys-apps/portage-2.2_rc25"),
+        ]
+        # Check less than
+        for vt in version_tests:
+            self.assertTrue(compare_strs(vt[0], vt[1]) == -1)
+        # Check greater than
+        for vt in version_tests:
+            self.assertTrue(compare_strs(vt[1], vt[0]) == 1)
+        # Check equal
+        vt = ("sys-auth/pambase-20080318", "sys-auth/pambase-20080318")
+        self.assertTrue(compare_strs(vt[0], vt[1]) == 0)
 
-	def test_chunk_splitting(self):
-		all_tests = [
-			# simple
-			('sys-apps/portage-2.2', {
-				'category': 'sys-apps',
-				'name': 'portage',
-				'cp': 'sys-apps/portage',
-				'version': '2.2',
-				'revision': '',
-				'fullversion': '2.2'
-			}),
-			# with rc
-			('sys-apps/portage-2.2_rc10', {
-				'category': 'sys-apps',
-				'name': 'portage',
-				'cp': 'sys-apps/portage',
-				'version': '2.2_rc10',
-				'revision': '',
-				'fullversion': '2.2_rc10'
-			}),
-			# with revision
-			('sys-apps/portage-2.2_rc10-r1', {
-				'category': 'sys-apps',
-				'name': 'portage',
-				'cp': 'sys-apps/portage',
-				'version': '2.2_rc10',
-				'revision': 'r1',
-				'fullversion': '2.2_rc10-r1'
-			}),
-			# with dash (-) in name (Bug #316961)
-			('c-portage', {
-				'category': '',
-				'name': 'c-portage',
-				'cp': 'c-portage',
-				'version': '',
-				'revision': '',
-				'fullversion': ''
-			}),
-			# with dash (-) in name (Bug #316961)
-			('sys-apps/c-portage-2.2_rc10-r1', {
-				'category': 'sys-apps',
-				'name': 'c-portage',
-				'cp': 'sys-apps/c-portage',
-				'version': '2.2_rc10',
-				'revision': 'r1',
-				'fullversion': '2.2_rc10-r1'
-			}),
-		]
+    def test_chunk_splitting(self):
+        all_tests = [
+            # simple
+            (
+                "sys-apps/portage-2.2",
+                {
+                    "category": "sys-apps",
+                    "name": "portage",
+                    "cp": "sys-apps/portage",
+                    "version": "2.2",
+                    "revision": "",
+                    "fullversion": "2.2",
+                },
+            ),
+            # with rc
+            (
+                "sys-apps/portage-2.2_rc10",
+                {
+                    "category": "sys-apps",
+                    "name": "portage",
+                    "cp": "sys-apps/portage",
+                    "version": "2.2_rc10",
+                    "revision": "",
+                    "fullversion": "2.2_rc10",
+                },
+            ),
+            # with revision
+            (
+                "sys-apps/portage-2.2_rc10-r1",
+                {
+                    "category": "sys-apps",
+                    "name": "portage",
+                    "cp": "sys-apps/portage",
+                    "version": "2.2_rc10",
+                    "revision": "r1",
+                    "fullversion": "2.2_rc10-r1",
+                },
+            ),
+            # with dash (-) in name (Bug #316961)
+            (
+                "c-portage",
+                {
+                    "category": "",
+                    "name": "c-portage",
+                    "cp": "c-portage",
+                    "version": "",
+                    "revision": "",
+                    "fullversion": "",
+                },
+            ),
+            # with dash (-) in name (Bug #316961)
+            (
+                "sys-apps/c-portage-2.2_rc10-r1",
+                {
+                    "category": "sys-apps",
+                    "name": "c-portage",
+                    "cp": "sys-apps/c-portage",
+                    "version": "2.2_rc10",
+                    "revision": "r1",
+                    "fullversion": "2.2_rc10-r1",
+                },
+            ),
+        ]
 
-		for test in all_tests:
-			cpv = CPV(test[0])
-			keys = ('category', 'name', 'cp', 'version', 'revision', 'fullversion')
-			for k in keys:
-				self.assertEqual(
-					getattr(cpv, k), test[1][k]
-				)
+        for test in all_tests:
+            cpv = CPV(test[0])
+            keys = ("category", "name", "cp", "version", "revision", "fullversion")
+            for k in keys:
+                self.assertEqual(getattr(cpv, k), test[1][k])
 
 
 def test_main():
-	suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitCPV)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitCPV)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_helpers.py b/pym/gentoolkit/test/test_helpers.py
index be27835..e92568d 100644
--- a/pym/gentoolkit/test/test_helpers.py
+++ b/pym/gentoolkit/test/test_helpers.py
@@ -7,70 +7,71 @@ from gentoolkit import helpers
 
 
 class TestFileOwner(unittest.TestCase):
-
-	def setUp(self):
-		pass
-
-	def tearDown(self):
-		pass
-
-	def test_expand_abspaths(self):
-		expand_abspaths = helpers.FileOwner.expand_abspaths
-
-		initial_file_list = ['foo0', '/foo1', '~/foo2', './foo3']
-		# This function should only effect foo3, and not ordering:
-
-		final_file_list = [
-			'foo0',
-			'/foo1',
-			'~/foo2',
-			os.path.join(os.getcwd(), os.path.normpath(initial_file_list[3]))
-		]
-
-		self.assertEqual(expand_abspaths(initial_file_list), final_file_list)
-
-	def test_extend_realpaths(self):
-		extend_realpaths = helpers.FileOwner.extend_realpaths
-
-		# Test that symlinks's realpaths are extended
-		f1 = NamedTemporaryFile(prefix='equeryunittest')
-		f2 = NamedTemporaryFile(prefix='equeryunittest')
-		f3 = NamedTemporaryFile(prefix='equeryunittest')
-		with warnings.catch_warnings():
-			warnings.simplefilter("ignore")
-			sym1 = mktemp()
-			os.symlink(f1.name, sym1)
-			sym2 = mktemp()
-			os.symlink(f3.name, sym2)
-		# We've created 3 files and 2 symlinks for testing. We're going to pass
-		# in only the first two files and both symlinks. sym1 points to f1.
-		# Since f1 is already in the list, sym1's realpath should not be added.
-		# sym2 points to f3, but f3's not in our list, so sym2's realpath
-		# should be added to the list.
-		p = [f1.name, f2.name, sym1, sym2]
-		p_xr = extend_realpaths(p)
-
-		self.assertEqual(p_xr[0], f1.name)
-		self.assertEqual(p_xr[1], f2.name)
-		self.assertEqual(p_xr[2], sym1)
-		self.assertEqual(p_xr[3], sym2)
-		self.assertEqual(p_xr[4], f3.name)
-
-		# Clean up
-		os.unlink(sym1)
-		os.unlink(sym2)
-
-		# Make sure we raise an exception if we don't get acceptable input
-		self.assertRaises(AttributeError, extend_realpaths, 'str')
-		self.assertRaises(AttributeError, extend_realpaths, set())
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_expand_abspaths(self):
+        expand_abspaths = helpers.FileOwner.expand_abspaths
+
+        initial_file_list = ["foo0", "/foo1", "~/foo2", "./foo3"]
+        # This function should only effect foo3, and not ordering:
+
+        final_file_list = [
+            "foo0",
+            "/foo1",
+            "~/foo2",
+            os.path.join(os.getcwd(), os.path.normpath(initial_file_list[3])),
+        ]
+
+        self.assertEqual(expand_abspaths(initial_file_list), final_file_list)
+
+    def test_extend_realpaths(self):
+        extend_realpaths = helpers.FileOwner.extend_realpaths
+
+        # Test that symlinks's realpaths are extended
+        f1 = NamedTemporaryFile(prefix="equeryunittest")
+        f2 = NamedTemporaryFile(prefix="equeryunittest")
+        f3 = NamedTemporaryFile(prefix="equeryunittest")
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore")
+            sym1 = mktemp()
+            os.symlink(f1.name, sym1)
+            sym2 = mktemp()
+            os.symlink(f3.name, sym2)
+        # We've created 3 files and 2 symlinks for testing. We're going to pass
+        # in only the first two files and both symlinks. sym1 points to f1.
+        # Since f1 is already in the list, sym1's realpath should not be added.
+        # sym2 points to f3, but f3's not in our list, so sym2's realpath
+        # should be added to the list.
+        p = [f1.name, f2.name, sym1, sym2]
+        p_xr = extend_realpaths(p)
+
+        self.assertEqual(p_xr[0], f1.name)
+        self.assertEqual(p_xr[1], f2.name)
+        self.assertEqual(p_xr[2], sym1)
+        self.assertEqual(p_xr[3], sym2)
+        self.assertEqual(p_xr[4], f3.name)
+
+        # Clean up
+        os.unlink(sym1)
+        os.unlink(sym2)
+
+        # Make sure we raise an exception if we don't get acceptable input
+        self.assertRaises(AttributeError, extend_realpaths, "str")
+        self.assertRaises(AttributeError, extend_realpaths, set())
 
 
 def test_main():
-	suite = unittest.TestLoader()
-	suite.loadTestsFromTestCase(TestFileOwner)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader()
+    suite.loadTestsFromTestCase(TestFileOwner)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_keyword.py b/pym/gentoolkit/test/test_keyword.py
index 8ba5e30..5ded638 100644
--- a/pym/gentoolkit/test/test_keyword.py
+++ b/pym/gentoolkit/test/test_keyword.py
@@ -2,49 +2,62 @@ import unittest
 
 from gentoolkit import keyword
 
-class TestGentoolkitKeyword(unittest.TestCase):
 
-	def test_compare_strs(self):
-		compare_strs = keyword.compare_strs
-
-		# Test ordering of keyword strings
-		version_tests = [
-			# different archs
-			('amd64', 'x86'),
-			# stable vs. unstable
-			('amd64-linux', '~amd64-linux'),
-			# different OSes
-			('~x86-linux', '~x86-solaris'),
-			# OS vs. no OS
-			('x86', '~amd64-linux')
-		]
-		# Check less than
-		for vt in version_tests:
-			self.assertTrue(compare_strs(vt[0], vt[1]) == -1)
-		# Check greater than
-		for vt in version_tests:
-			self.assertTrue(compare_strs(vt[1], vt[0]) == 1)
-		# Check equal
-		vt = ('~amd64-linux', '~amd64-linux')
-		self.assertTrue(compare_strs(vt[0], vt[1]) == 0)
-
-		kwds_presort = [
-			'~amd64', '~amd64-linux', '~ppc', '~ppc-macos', '~x86',
-			'~x86-linux', '~x86-macos', '~x86-solaris'
-		]
-		kwds_postsort = [
-			'~amd64', '~ppc', '~x86', '~amd64-linux', '~x86-linux',
-			'~ppc-macos', '~x86-macos', '~x86-solaris'
-		]
-		self.assertEqual(sorted(kwds_presort, key = keyword.Keyword), kwds_postsort)
+class TestGentoolkitKeyword(unittest.TestCase):
+    def test_compare_strs(self):
+        compare_strs = keyword.compare_strs
+
+        # Test ordering of keyword strings
+        version_tests = [
+            # different archs
+            ("amd64", "x86"),
+            # stable vs. unstable
+            ("amd64-linux", "~amd64-linux"),
+            # different OSes
+            ("~x86-linux", "~x86-solaris"),
+            # OS vs. no OS
+            ("x86", "~amd64-linux"),
+        ]
+        # Check less than
+        for vt in version_tests:
+            self.assertTrue(compare_strs(vt[0], vt[1]) == -1)
+        # Check greater than
+        for vt in version_tests:
+            self.assertTrue(compare_strs(vt[1], vt[0]) == 1)
+        # Check equal
+        vt = ("~amd64-linux", "~amd64-linux")
+        self.assertTrue(compare_strs(vt[0], vt[1]) == 0)
+
+        kwds_presort = [
+            "~amd64",
+            "~amd64-linux",
+            "~ppc",
+            "~ppc-macos",
+            "~x86",
+            "~x86-linux",
+            "~x86-macos",
+            "~x86-solaris",
+        ]
+        kwds_postsort = [
+            "~amd64",
+            "~ppc",
+            "~x86",
+            "~amd64-linux",
+            "~x86-linux",
+            "~ppc-macos",
+            "~x86-macos",
+            "~x86-solaris",
+        ]
+        self.assertEqual(sorted(kwds_presort, key=keyword.Keyword), kwds_postsort)
 
 
 def test_main():
-	suite = unittest.TestLoader().loadTestsFromTestCase(
-		TestGentoolkitKeyword)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitKeyword)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_profile.py b/pym/gentoolkit/test/test_profile.py
index f91de6d..04d4d7b 100644
--- a/pym/gentoolkit/test/test_profile.py
+++ b/pym/gentoolkit/test/test_profile.py
@@ -10,52 +10,52 @@ import unittest
 from gentoolkit.profile import load_profile_data
 
 
-TESTDIR = os.path.join(os.path.dirname(__file__), '../ekeyword/tests')
+TESTDIR = os.path.join(os.path.dirname(__file__), "../ekeyword/tests")
 
 
 class TestLoadProfileData(unittest.TestCase):
-	"""Tests for load_profile_data"""
-
-	def _test(self, subdir):
-		portdir = os.path.join(TESTDIR, 'profiles', subdir)
-		return load_profile_data(portdir=portdir)
-
-	def testLoadBoth(self):
-		"""Test loading both arch.list and profiles.desc"""
-		ret = self._test('both')
-		self.assertIn('arm', ret)
-		self.assertEqual(ret['arm'], ('stable', 'arch'))
-		self.assertIn('arm64', ret)
-		self.assertEqual(ret['arm64'], ('exp', 'arch'))
-
-	def testLoadArchOnly(self):
-		"""Test loading only arch.list"""
-		ret = self._test('arch-only')
-		self.assertIn('arm', ret)
-		self.assertEqual(ret['arm'], (None, 'arch'))
-		self.assertIn('x86-solaris', ret)
-
-	def testLoadProfilesOnly(self):
-		"""Test loading only profiles.desc"""
-		ret = self._test('profiles-only')
-		self.assertIn('arm', ret)
-		self.assertEqual(ret['arm'], ('stable', 'arch'))
-		self.assertIn('arm64', ret)
-		self.assertEqual(ret['arm64'], ('exp', 'arch'))
-
-	def testLoadArchesDesc(self):
-		"""Test loading arch.list, arches.desc and profiles.desc"""
-		ret = self._test('arches-desc')
-		self.assertIn('arm', ret)
-		self.assertEqual(ret['arm'], ('stable', 'arch'))
-		self.assertIn('arm64', ret)
-		self.assertEqual(ret['arm64'], ('exp', 'arch'))
-		self.assertIn('alpha', ret)
-		self.assertEqual(ret['alpha'], ('stable', '~arch'))
-		self.assertIn('sparc-fbsd', ret)
-		self.assertEqual(ret['sparc-fbsd'], ('exp', '~arch'))
-
-	def testLoadNone(self):
-		"""Test running when neither files exists"""
-		ret = self._test('none')
-		self.assertEqual(ret, {})
+    """Tests for load_profile_data"""
+
+    def _test(self, subdir):
+        portdir = os.path.join(TESTDIR, "profiles", subdir)
+        return load_profile_data(portdir=portdir)
+
+    def testLoadBoth(self):
+        """Test loading both arch.list and profiles.desc"""
+        ret = self._test("both")
+        self.assertIn("arm", ret)
+        self.assertEqual(ret["arm"], ("stable", "arch"))
+        self.assertIn("arm64", ret)
+        self.assertEqual(ret["arm64"], ("exp", "arch"))
+
+    def testLoadArchOnly(self):
+        """Test loading only arch.list"""
+        ret = self._test("arch-only")
+        self.assertIn("arm", ret)
+        self.assertEqual(ret["arm"], (None, "arch"))
+        self.assertIn("x86-solaris", ret)
+
+    def testLoadProfilesOnly(self):
+        """Test loading only profiles.desc"""
+        ret = self._test("profiles-only")
+        self.assertIn("arm", ret)
+        self.assertEqual(ret["arm"], ("stable", "arch"))
+        self.assertIn("arm64", ret)
+        self.assertEqual(ret["arm64"], ("exp", "arch"))
+
+    def testLoadArchesDesc(self):
+        """Test loading arch.list, arches.desc and profiles.desc"""
+        ret = self._test("arches-desc")
+        self.assertIn("arm", ret)
+        self.assertEqual(ret["arm"], ("stable", "arch"))
+        self.assertIn("arm64", ret)
+        self.assertEqual(ret["arm64"], ("exp", "arch"))
+        self.assertIn("alpha", ret)
+        self.assertEqual(ret["alpha"], ("stable", "~arch"))
+        self.assertIn("sparc-fbsd", ret)
+        self.assertEqual(ret["sparc-fbsd"], ("exp", "~arch"))
+
+    def testLoadNone(self):
+        """Test running when neither files exists"""
+        ret = self._test("none")
+        self.assertEqual(ret, {})

diff --git a/pym/gentoolkit/test/test_query.py b/pym/gentoolkit/test/test_query.py
index 8eb0159..6b8cf14 100644
--- a/pym/gentoolkit/test/test_query.py
+++ b/pym/gentoolkit/test/test_query.py
@@ -5,99 +5,96 @@ from gentoolkit import errors
 
 
 class TestQuery(unittest.TestCase):
-
-	def setUp(self):
-		pass
-
-	def tearDown(self):
-		pass
-
-	def test_init(self):
-		# valid queries must have at least one ascii letter or digit or
-                # '*'
-		invalid_queries = [
-			'',
-			'/',
-		]
-		for q in invalid_queries:
-			self.assertRaises(errors.GentoolkitInvalidPackage,
-				query.Query, q
-			)
-
-		q1 = query.Query('gentoolkit')
-		q1_tests = [
-			(q1.query, 'gentoolkit'),
-			(q1.is_regex, False),
-			(q1.repo_filter, None),
-			(q1.query_type, "simple")
-		]
-		for t in q1_tests:
-			self.assertEqual(t[0], t[1])
-
-		q2 = query.Query('gentoolkit-.*', is_regex=True)
-		q2_tests = [
-			(q2.query, 'gentoolkit-.*'),
-			(q2.is_regex, True),
-			(q2.repo_filter, None),
-			(q2.query_type, "complex")
-		]
-		for t in q2_tests:
-			self.assertEqual(t[0], t[1])
-
-		q3 = query.Query('*::gentoo')
-		q3_tests = [
-			(q3.query, '*'),
-			(q3.is_regex, False),
-			(q3.repo_filter, 'gentoo'),
-			(q3.query_type, "complex")
-		]
-		for t in q3_tests:
-			self.assertEqual(t[0], t[1])
-
-		q4 = query.Query('gcc:4.3')
-		q4_tests = [
-			(q4.query, 'gcc:4.3'),
-			(q4.is_regex, False),
-			(q4.repo_filter, None),
-			(q4.query_type, "simple")
-		]
-		for t in q4_tests:
-			self.assertEqual(t[0], t[1])
-
-		q5 = query.Query('@system')
-		q5_tests = [
-			(q5.query, '@system'),
-			(q5.is_regex, False),
-			(q5.repo_filter, None),
-			(q5.query_type, "set")
-		]
-		for t in q5_tests:
-			self.assertEqual(t[0], t[1])
-
-	def test_uses_globbing(self):
-		globbing_tests = [
-			('sys-apps/portage-2.1.6.13', False),
-			('>=sys-apps/portage-2.1.6.13', False),
-			('<=sys-apps/portage-2.1.6.13', False),
-			('~sys-apps/portage-2.1.6.13', False),
-			('=sys-apps/portage-2*', False),
-			('sys-*/*-2.1.6.13', True),
-			('sys-app?/portage-2.1.6.13', True),
-			('sys-apps/[bp]ortage-2.1.6.13', True),
-			('sys-apps/[!p]ortage*', True)
-		]
-
-		for gt in globbing_tests:
-			self.assertTrue(
-				query.Query(gt[0]).uses_globbing() == gt[1]
-			)
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_init(self):
+        # valid queries must have at least one ascii letter or digit or
+        # '*'
+        invalid_queries = [
+            "",
+            "/",
+        ]
+        for q in invalid_queries:
+            self.assertRaises(errors.GentoolkitInvalidPackage, query.Query, q)
+
+        q1 = query.Query("gentoolkit")
+        q1_tests = [
+            (q1.query, "gentoolkit"),
+            (q1.is_regex, False),
+            (q1.repo_filter, None),
+            (q1.query_type, "simple"),
+        ]
+        for t in q1_tests:
+            self.assertEqual(t[0], t[1])
+
+        q2 = query.Query("gentoolkit-.*", is_regex=True)
+        q2_tests = [
+            (q2.query, "gentoolkit-.*"),
+            (q2.is_regex, True),
+            (q2.repo_filter, None),
+            (q2.query_type, "complex"),
+        ]
+        for t in q2_tests:
+            self.assertEqual(t[0], t[1])
+
+        q3 = query.Query("*::gentoo")
+        q3_tests = [
+            (q3.query, "*"),
+            (q3.is_regex, False),
+            (q3.repo_filter, "gentoo"),
+            (q3.query_type, "complex"),
+        ]
+        for t in q3_tests:
+            self.assertEqual(t[0], t[1])
+
+        q4 = query.Query("gcc:4.3")
+        q4_tests = [
+            (q4.query, "gcc:4.3"),
+            (q4.is_regex, False),
+            (q4.repo_filter, None),
+            (q4.query_type, "simple"),
+        ]
+        for t in q4_tests:
+            self.assertEqual(t[0], t[1])
+
+        q5 = query.Query("@system")
+        q5_tests = [
+            (q5.query, "@system"),
+            (q5.is_regex, False),
+            (q5.repo_filter, None),
+            (q5.query_type, "set"),
+        ]
+        for t in q5_tests:
+            self.assertEqual(t[0], t[1])
+
+    def test_uses_globbing(self):
+        globbing_tests = [
+            ("sys-apps/portage-2.1.6.13", False),
+            (">=sys-apps/portage-2.1.6.13", False),
+            ("<=sys-apps/portage-2.1.6.13", False),
+            ("~sys-apps/portage-2.1.6.13", False),
+            ("=sys-apps/portage-2*", False),
+            ("sys-*/*-2.1.6.13", True),
+            ("sys-app?/portage-2.1.6.13", True),
+            ("sys-apps/[bp]ortage-2.1.6.13", True),
+            ("sys-apps/[!p]ortage*", True),
+        ]
+
+        for gt in globbing_tests:
+            self.assertTrue(query.Query(gt[0]).uses_globbing() == gt[1])
 
 
 def test_main():
-	suite = unittest.TestLoader().loadTestsFromTestCase(TestQuery)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestQuery)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/test/test_syntax.py b/pym/gentoolkit/test/test_syntax.py
index 7a990ca..525bcc1 100644
--- a/pym/gentoolkit/test/test_syntax.py
+++ b/pym/gentoolkit/test/test_syntax.py
@@ -2,36 +2,36 @@ import unittest
 import py_compile
 
 import os
+
 osp = os.path
 
 """Does a basic syntax check by compiling all modules. From Portage."""
 
 pym_dirs = os.walk(osp.dirname(osp.dirname(osp.dirname(__file__))))
-blacklist_dirs = frozenset(('.svn', 'test'))
+blacklist_dirs = frozenset((".svn", "test"))
 
-class TestForSyntaxErrors(unittest.TestCase):
 
-	def test_compileability(self):
-		compileables = []
-		for thisdir, subdirs, files in pym_dirs:
-			if os.path.basename(thisdir) in blacklist_dirs:
-				continue
-			compileables.extend([
-				osp.join(thisdir, f)
-				for f in files
-				if osp.splitext(f)[1] == '.py'
-			])
+class TestForSyntaxErrors(unittest.TestCase):
+    def test_compileability(self):
+        compileables = []
+        for thisdir, subdirs, files in pym_dirs:
+            if os.path.basename(thisdir) in blacklist_dirs:
+                continue
+            compileables.extend(
+                [osp.join(thisdir, f) for f in files if osp.splitext(f)[1] == ".py"]
+            )
 
-		for c in compileables:
-			py_compile.compile(c, doraise=True)
+        for c in compileables:
+            py_compile.compile(c, doraise=True)
 
 
 def test_main():
-	suite = unittest.TestLoader().loadTestsFromTestCase(
-		TestForSyntaxErrors)
-	unittest.TextTestRunner(verbosity=2).run(suite)
+    suite = unittest.TestLoader().loadTestsFromTestCase(TestForSyntaxErrors)
+    unittest.TextTestRunner(verbosity=2).run(suite)
+
+
 test_main.__test__ = False
 
 
-if __name__ == '__main__':
-	test_main()
+if __name__ == "__main__":
+    test_main()

diff --git a/pym/gentoolkit/textwrap_.py b/pym/gentoolkit/textwrap_.py
index 07c0831..fd28c6a 100644
--- a/pym/gentoolkit/textwrap_.py
+++ b/pym/gentoolkit/textwrap_.py
@@ -5,95 +5,97 @@ prevent the splitting of ANSI colors as well as package names and versions."""
 import re
 import textwrap
 
+
 class TextWrapper(textwrap.TextWrapper):
-	"""Ignore ANSI escape codes while wrapping text"""
-
-	def _split(self, text):
-		"""_split(text : string) -> [string]
-
-		Split the text to wrap into indivisible chunks.
-		"""
-		# Only split on whitespace to avoid mangling ANSI escape codes or
-		# package names.
-		wordsep_re = re.compile(r'(\s+)')
-		chunks = wordsep_re.split(text)
-		chunks = [x for x in chunks if x is not None]
-		return chunks
-
-	def _wrap_chunks(self, chunks):
-		"""_wrap_chunks(chunks : [string]) -> [string]
-
-		Wrap a sequence of text chunks and return a list of lines of
-		length 'self.width' or less.  (If 'break_long_words' is false,
-		some lines may be longer than this.)  Chunks correspond roughly
-		to words and the whitespace between them: each chunk is
-		indivisible (modulo 'break_long_words'), but a line break can
-		come between any two chunks.  Chunks should not have internal
-		whitespace; ie. a chunk is either all whitespace or a "word".
-		Whitespace chunks will be removed from the beginning and end of
-		lines, but apart from that whitespace is preserved.
-		"""
-		lines = []
-		if self.width <= 0:
-			raise ValueError("invalid width %r (must be > 0)" % self.width)
-
-		# Arrange in reverse order so items can be efficiently popped
-		# from a stack of chunks.
-		chunks.reverse()
-
-		# Regex to strip ANSI escape codes. It's only used for the
-		# length calculations of indent and each chuck.
-		ansi_re = re.compile(r'\x1b\[[0-9;]*m')
-
-		while chunks:
-
-			# Start the list of chunks that will make up the current line.
-			# cur_len is just the length of all the chunks in cur_line.
-			cur_line = []
-			cur_len = 0
-
-			# Figure out which static string will prefix this line.
-			if lines:
-				indent = self.subsequent_indent
-			else:
-				indent = self.initial_indent
-
-			# Maximum width for this line. Ingore ANSI escape codes.
-			width = self.width - len(re.sub(ansi_re, '', indent))
-
-			# First chunk on line is whitespace -- drop it, unless this
-			# is the very beginning of the text (ie. no lines started yet).
-			if chunks[-1].strip() == '' and lines:
-				del chunks[-1]
-
-			while chunks:
-				# Ignore ANSI escape codes.
-				chunk_len = len(re.sub(ansi_re, '', chunks[-1]))
-
-				# Can at least squeeze this chunk onto the current line.
-				if cur_len + chunk_len <= width:
-					cur_line.append(chunks.pop())
-					cur_len += chunk_len
-
-				# Nope, this line is full.
-				else:
-					break
-
-			# The current line is full, and the next chunk is too big to
-			# fit on *any* line (not just this one).
-			# Ignore ANSI escape codes.
-			if chunks and len(re.sub(ansi_re, '', chunks[-1])) > width:
-				self._handle_long_word(chunks, cur_line, cur_len, width)
-
-			# If the last chunk on this line is all whitespace, drop it.
-			if cur_line and cur_line[-1].strip() == '':
-				del cur_line[-1]
-
-			# Convert current line back to a string and store it in list
-			# of all lines (return value).
-			if cur_line:
-				lines.append(indent + ''.join(cur_line))
-
-		return lines
+    """Ignore ANSI escape codes while wrapping text"""
+
+    def _split(self, text):
+        """_split(text : string) -> [string]
+
+        Split the text to wrap into indivisible chunks.
+        """
+        # Only split on whitespace to avoid mangling ANSI escape codes or
+        # package names.
+        wordsep_re = re.compile(r"(\s+)")
+        chunks = wordsep_re.split(text)
+        chunks = [x for x in chunks if x is not None]
+        return chunks
+
+    def _wrap_chunks(self, chunks):
+        """_wrap_chunks(chunks : [string]) -> [string]
+
+        Wrap a sequence of text chunks and return a list of lines of
+        length 'self.width' or less.  (If 'break_long_words' is false,
+        some lines may be longer than this.)  Chunks correspond roughly
+        to words and the whitespace between them: each chunk is
+        indivisible (modulo 'break_long_words'), but a line break can
+        come between any two chunks.  Chunks should not have internal
+        whitespace; ie. a chunk is either all whitespace or a "word".
+        Whitespace chunks will be removed from the beginning and end of
+        lines, but apart from that whitespace is preserved.
+        """
+        lines = []
+        if self.width <= 0:
+            raise ValueError("invalid width %r (must be > 0)" % self.width)
+
+        # Arrange in reverse order so items can be efficiently popped
+        # from a stack of chunks.
+        chunks.reverse()
+
+        # Regex to strip ANSI escape codes. It's only used for the
+        # length calculations of indent and each chuck.
+        ansi_re = re.compile(r"\x1b\[[0-9;]*m")
+
+        while chunks:
+
+            # Start the list of chunks that will make up the current line.
+            # cur_len is just the length of all the chunks in cur_line.
+            cur_line = []
+            cur_len = 0
+
+            # Figure out which static string will prefix this line.
+            if lines:
+                indent = self.subsequent_indent
+            else:
+                indent = self.initial_indent
+
+            # Maximum width for this line. Ingore ANSI escape codes.
+            width = self.width - len(re.sub(ansi_re, "", indent))
+
+            # First chunk on line is whitespace -- drop it, unless this
+            # is the very beginning of the text (ie. no lines started yet).
+            if chunks[-1].strip() == "" and lines:
+                del chunks[-1]
+
+            while chunks:
+                # Ignore ANSI escape codes.
+                chunk_len = len(re.sub(ansi_re, "", chunks[-1]))
+
+                # Can at least squeeze this chunk onto the current line.
+                if cur_len + chunk_len <= width:
+                    cur_line.append(chunks.pop())
+                    cur_len += chunk_len
+
+                # Nope, this line is full.
+                else:
+                    break
+
+            # The current line is full, and the next chunk is too big to
+            # fit on *any* line (not just this one).
+            # Ignore ANSI escape codes.
+            if chunks and len(re.sub(ansi_re, "", chunks[-1])) > width:
+                self._handle_long_word(chunks, cur_line, cur_len, width)
+
+            # If the last chunk on this line is all whitespace, drop it.
+            if cur_line and cur_line[-1].strip() == "":
+                del cur_line[-1]
+
+            # Convert current line back to a string and store it in list
+            # of all lines (return value).
+            if cur_line:
+                lines.append(indent + "".join(cur_line))
+
+        return lines
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/pym/gentoolkit/versionmatch.py b/pym/gentoolkit/versionmatch.py
index 9287c13..48c12ad 100644
--- a/pym/gentoolkit/versionmatch.py
+++ b/pym/gentoolkit/versionmatch.py
@@ -21,112 +21,109 @@ from gentoolkit.cpv import CPV
 # Classes
 # =======
 
+
 class VersionMatch:
-	"""Gentoo version comparison object from pkgcore.ebuild.atom_restricts.
-
-	Any overriding of this class *must* maintain numerical order of
-	self.vals, see intersect for reason why. vals also must be a tuple.
-	"""
-	_convert_op2int = {(-1,):"<", (-1, 0): "<=", (0,):"=",
-		(0, 1):">=", (1,):">"}
-
-	_convert_int2op = dict([(v, k) for k, v in _convert_op2int.items()])
-
-	def __init__(self, cpv, op='='):
-		"""Initialize a VersionMatch instance.
-
-		@type cpv: L{gentoolkit.cpv.CPV}
-		@param cpv: cpv object
-		@type op: str
-		@keyword op: operator
-		"""
-
-		if not isinstance(cpv, (CPV, self.__class__)):
-			err = "cpv must be a gentoolkit.cpv.CPV "
-			err += "or gentoolkit.versionmatch.VersionMatch instance"
-			raise ValueError(err)
-		self.cpv = cpv
-		self.operator = op
-		self.version = cpv.version
-		self.revision = cpv.revision
-		self.fullversion = cpv.fullversion
-
-		if self.operator != "~" and self.operator not in self._convert_int2op:
-			raise errors.GentoolkitInvalidVersion(
-				"invalid operator '%s'" % self.operator)
-
-		if self.operator == "~":
-			if not self.version:
-				raise errors.GentoolkitInvalidVersion(
-					"for ~ op, ver must be specified")
-			self.droprevision = True
-			self.values = (0,)
-		else:
-			self.droprevision = False
-			self.values = self._convert_int2op[self.operator]
-
-	def match(self, other):
-		"""See whether a passed in VersionMatch or CPV instance matches self.
-
-		Example usage:
-			>>> from gentoolkit.versionmatch import VersionMatch
-			>>> from gentoolkit.cpv import CPV
-			>>> VersionMatch(CPV('foo/bar-1.5'), op='>').match(
-			... VersionMatch(CPV('foo/bar-2.0')))
-			True
-
-		@type other: gentoolkit.versionmatch.VersionMatch OR
-		   gentoolkit.cpv.CPV
-		@param other: version to compare with self's version
-		@rtype: bool
-		"""
-
-		if self.droprevision:
-			ver1, ver2 = self.version, other.version
-		else:
-			ver1, ver2 = self.fullversion, other.fullversion
-
-		return vercmp(ver2, ver1) in self.values
-
-	def __str__(self):
-		operator = self._convert_op2int[self.values]
-
-		if self.droprevision or not self.revision:
-			return "ver %s %s" % (operator, self.version)
-		return "ver-rev %s %s-%s" % (
-			operator, self.version, self.revision
-		)
-
-	def __repr__(self):
-		return "<%s %r>" % (self.__class__.__name__, str(self))
-
-	@staticmethod
-	def _convert_ops(inst):
-		if inst.droprevision:
-			return inst.values
-		return tuple(sorted(set((-1, 0, 1)).difference(inst.values)))
-
-	def __eq__(self, other):
-		if self is other:
-			return True
-		if isinstance(other, self.__class__):
-			if (self.droprevision != other.droprevision or
-				self.version != other.version or
-				self.revision != other.revision):
-				return False
-			return self._convert_ops(self) == self._convert_ops(other)
-
-		return False
-
-	def __ne__(self, other):
-		return not self == other
-
-	def __hash__(self):
-		return hash((
-			self.droprevision,
-			self.version,
-			self.revision,
-			self.values
-		))
+    """Gentoo version comparison object from pkgcore.ebuild.atom_restricts.
+
+    Any overriding of this class *must* maintain numerical order of
+    self.vals, see intersect for reason why. vals also must be a tuple.
+    """
+
+    _convert_op2int = {(-1,): "<", (-1, 0): "<=", (0,): "=", (0, 1): ">=", (1,): ">"}
+
+    _convert_int2op = dict([(v, k) for k, v in _convert_op2int.items()])
+
+    def __init__(self, cpv, op="="):
+        """Initialize a VersionMatch instance.
+
+        @type cpv: L{gentoolkit.cpv.CPV}
+        @param cpv: cpv object
+        @type op: str
+        @keyword op: operator
+        """
+
+        if not isinstance(cpv, (CPV, self.__class__)):
+            err = "cpv must be a gentoolkit.cpv.CPV "
+            err += "or gentoolkit.versionmatch.VersionMatch instance"
+            raise ValueError(err)
+        self.cpv = cpv
+        self.operator = op
+        self.version = cpv.version
+        self.revision = cpv.revision
+        self.fullversion = cpv.fullversion
+
+        if self.operator != "~" and self.operator not in self._convert_int2op:
+            raise errors.GentoolkitInvalidVersion(
+                "invalid operator '%s'" % self.operator
+            )
+
+        if self.operator == "~":
+            if not self.version:
+                raise errors.GentoolkitInvalidVersion("for ~ op, ver must be specified")
+            self.droprevision = True
+            self.values = (0,)
+        else:
+            self.droprevision = False
+            self.values = self._convert_int2op[self.operator]
+
+    def match(self, other):
+        """See whether a passed in VersionMatch or CPV instance matches self.
+
+        Example usage:
+                >>> from gentoolkit.versionmatch import VersionMatch
+                >>> from gentoolkit.cpv import CPV
+                >>> VersionMatch(CPV('foo/bar-1.5'), op='>').match(
+                ... VersionMatch(CPV('foo/bar-2.0')))
+                True
+
+        @type other: gentoolkit.versionmatch.VersionMatch OR
+           gentoolkit.cpv.CPV
+        @param other: version to compare with self's version
+        @rtype: bool
+        """
+
+        if self.droprevision:
+            ver1, ver2 = self.version, other.version
+        else:
+            ver1, ver2 = self.fullversion, other.fullversion
+
+        return vercmp(ver2, ver1) in self.values
+
+    def __str__(self):
+        operator = self._convert_op2int[self.values]
+
+        if self.droprevision or not self.revision:
+            return "ver %s %s" % (operator, self.version)
+        return "ver-rev %s %s-%s" % (operator, self.version, self.revision)
+
+    def __repr__(self):
+        return "<%s %r>" % (self.__class__.__name__, str(self))
+
+    @staticmethod
+    def _convert_ops(inst):
+        if inst.droprevision:
+            return inst.values
+        return tuple(sorted(set((-1, 0, 1)).difference(inst.values)))
+
+    def __eq__(self, other):
+        if self is other:
+            return True
+        if isinstance(other, self.__class__):
+            if (
+                self.droprevision != other.droprevision
+                or self.version != other.version
+                or self.revision != other.revision
+            ):
+                return False
+            return self._convert_ops(self) == self._convert_ops(other)
+
+        return False
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __hash__(self):
+        return hash((self.droprevision, self.version, self.revision, self.values))
+
 
 # vim: set ts=4 sw=4 tw=79:

diff --git a/setup.py b/setup.py
index a9cd80c..23e9b36 100755
--- a/setup.py
+++ b/setup.py
@@ -10,144 +10,167 @@ from glob import glob
 import os
 import io
 
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pym'))
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "pym"))
 
-if ( len(sys.argv) > 2 ) and ( sys.argv[1] == "set_version" ):
-	__version__ = sys.argv[2]
+if (len(sys.argv) > 2) and (sys.argv[1] == "set_version"):
+    __version__ = sys.argv[2]
 else:
-	__version__ = os.getenv('VERSION', default=os.getenv('PVR', default='9999'))
+    __version__ = os.getenv("VERSION", default=os.getenv("PVR", default="9999"))
 
 cwd = os.getcwd()
 
 # Load EPREFIX from Portage, fall back to the empty string if it fails
 try:
-	from portage.const import EPREFIX
+    from portage.const import EPREFIX
 except ImportError:
-	EPREFIX=''
+    EPREFIX = ""
 
 
 # Bash files that need `VERSION=""` subbed, relative to this dir:
-bash_scripts = [(os.path.join(cwd, path), 'VERSION=') for path in (
-	'bin/ebump',
-	'bin/euse',
-	'bin/revdep-rebuild.sh'
-)]
+bash_scripts = [
+    (os.path.join(cwd, path), "VERSION=")
+    for path in ("bin/ebump", "bin/euse", "bin/revdep-rebuild.sh")
+]
 
 # Python files that need `__version__ = ""` subbed, relative to this dir:
-python_scripts = [(os.path.join(cwd, path), '__version__ = ') for path in (
-	'bin/eclean',
-	'bin/eclean-dist',
-	'bin/eclean-pkg',
-	'bin/epkginfo',
-	'pym/gentoolkit/eclean/cli.py',
-	'pym/gentoolkit/enalyze/__init__.py',
-	'pym/gentoolkit/ekeyword/ekeyword.py',
-	'pym/gentoolkit/equery/__init__.py',
-	'pym/gentoolkit/eshowkw/__init__.py',
-	'pym/gentoolkit/imlate/imlate.py',
-	'pym/gentoolkit/revdep_rebuild/__init__.py'
-)]
-
-manpages = [(os.path.join(cwd, path[0]), path[1]) for path in (
-	('man/ebump.1', 'EBUMP'),
-	('man/eclean.1', 'ECLEAN'),
-	('man/enalyze.1', 'ENALYZE'),
-	('man/epkginfo.1', 'EPKGINFO'),
-	('man/equery.1', 'EQUERY'),
-	('man/eread.1', 'EREAD'),
-	('man/eshowkw.1', 'ESHOWKW'),
-	('man/euse.1', 'EUSE'),
-	('man/imlate.1', 'IMLATE'),
-	('man/revdep-rebuild.1', 'REVDEP-REBUILD'),
-)]
+python_scripts = [
+    (os.path.join(cwd, path), "__version__ = ")
+    for path in (
+        "bin/eclean",
+        "bin/eclean-dist",
+        "bin/eclean-pkg",
+        "bin/epkginfo",
+        "pym/gentoolkit/eclean/cli.py",
+        "pym/gentoolkit/enalyze/__init__.py",
+        "pym/gentoolkit/ekeyword/ekeyword.py",
+        "pym/gentoolkit/equery/__init__.py",
+        "pym/gentoolkit/eshowkw/__init__.py",
+        "pym/gentoolkit/imlate/imlate.py",
+        "pym/gentoolkit/revdep_rebuild/__init__.py",
+    )
+]
+
+manpages = [
+    (os.path.join(cwd, path[0]), path[1])
+    for path in (
+        ("man/ebump.1", "EBUMP"),
+        ("man/eclean.1", "ECLEAN"),
+        ("man/enalyze.1", "ENALYZE"),
+        ("man/epkginfo.1", "EPKGINFO"),
+        ("man/equery.1", "EQUERY"),
+        ("man/eread.1", "EREAD"),
+        ("man/eshowkw.1", "ESHOWKW"),
+        ("man/euse.1", "EUSE"),
+        ("man/imlate.1", "IMLATE"),
+        ("man/revdep-rebuild.1", "REVDEP-REBUILD"),
+    )
+]
+
 
 class set_version(core.Command):
-	"""Set python __version__ and bash VERSION to our __version__."""
-	description = "hardcode scripts' version using VERSION from environment"
-	user_options = []  # [(long_name, short_name, desc),]
-
-	def initialize_options (self):
-		pass
-
-	def finalize_options (self):
-		pass
-
-	def run(self):
-		ver = 'git' if __version__ == '9999' else __version__
-		print("Setting version to %s" % ver)
-		def sub(files, pattern):
-			for f in files:
-				updated_file = []
-				with io.open(f[0], 'r', 1, 'utf_8') as s:
-					for line in s:
-						newline = re.sub(pattern %f[1], '"%s"' % ver, line, 1)
-						updated_file.append(newline)
-				with io.open(f[0], 'w', 1, 'utf_8') as s:
-					s.writelines(updated_file)
-
-		quote = r'[\'"]{1}'
-		bash_re = r'(?<=%s)' + quote + '[^\'"]*' + quote
-		sub(bash_scripts, bash_re)
-		python_re = r'(?<=^%s)' + quote + '[^\'"]*' + quote
-		sub(python_scripts, python_re)
-		man_re = r'(?<=^.TH "%s" "[0-9]" )' + quote + '[^\'"]*' + quote
-		sub(manpages, man_re)
+    """Set python __version__ and bash VERSION to our __version__."""
+
+    description = "hardcode scripts' version using VERSION from environment"
+    user_options = []  # [(long_name, short_name, desc),]
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        ver = "git" if __version__ == "9999" else __version__
+        print("Setting version to %s" % ver)
+
+        def sub(files, pattern):
+            for f in files:
+                updated_file = []
+                with io.open(f[0], "r", 1, "utf_8") as s:
+                    for line in s:
+                        newline = re.sub(pattern % f[1], '"%s"' % ver, line, 1)
+                        updated_file.append(newline)
+                with io.open(f[0], "w", 1, "utf_8") as s:
+                    s.writelines(updated_file)
+
+        quote = r'[\'"]{1}'
+        bash_re = r"(?<=%s)" + quote + "[^'\"]*" + quote
+        sub(bash_scripts, bash_re)
+        python_re = r"(?<=^%s)" + quote + "[^'\"]*" + quote
+        sub(python_scripts, python_re)
+        man_re = r'(?<=^.TH "%s" "[0-9]" )' + quote + "[^'\"]*" + quote
+        sub(manpages, man_re)
 
 
 class TestCommand(Command):
-	user_options = []
+    user_options = []
 
-	def initialize_options(self):
-		pass
+    def initialize_options(self):
+        pass
 
-	def finalize_options(self):
-		pass
+    def finalize_options(self):
+        pass
 
-	def run(self):
-		args = [sys.executable, '-m', 'unittest', 'discover', 'pym']
-		raise SystemExit(subprocess.call(args))
+    def run(self):
+        args = [sys.executable, "-m", "unittest", "discover", "pym"]
+        raise SystemExit(subprocess.call(args))
 
 
 packages = [
-	str('.'.join(root.split(os.sep)[1:]))
-	for root, dirs, files in os.walk('pym/gentoolkit')
-	if '__init__.py' in files
+    str(".".join(root.split(os.sep)[1:]))
+    for root, dirs, files in os.walk("pym/gentoolkit")
+    if "__init__.py" in files
 ]
 
 test_data = {
-	'gentoolkit': [
-		'test/eclean/Packages',
-		'test/eclean/testdistfiles.tar.gz',
-		'test/eclean/distfiles.exclude'
-	]
+    "gentoolkit": [
+        "test/eclean/Packages",
+        "test/eclean/testdistfiles.tar.gz",
+        "test/eclean/distfiles.exclude",
+    ]
 }
 
 core.setup(
-	name='gentoolkit',
-	version=__version__,
-	description='Set of tools that work with and enhance portage.',
-	author='',
-	author_email='',
-	maintainer='Gentoo Portage Tools Team',
-	maintainer_email='tools-portage@gentoo.org',
-	url='http://www.gentoo.org/proj/en/portage/tools/index.xml',
-	download_url='http://distfiles.gentoo.org/distfiles/gentoolkit-%s.tar.gz'\
-		% __version__,
-	package_dir={'': 'pym'},
-	packages=packages,
-	package_data = test_data,
-	scripts=(glob('bin/*')),
-	data_files=(
-		(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'etc/env.d'), ['data/99gentoolkit-env']),
-		(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'etc/revdep-rebuild'), ['data/revdep-rebuild/99revdep-rebuild']),
-		(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'etc/eclean'), glob('data/eclean/*')),
-		(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'usr/share/man/man1'), glob('man/*')),
-		(os.path.join(os.sep, EPREFIX.lstrip(os.sep), 'usr/lib/tmpfiles.d'), ['data/tmpfiles.d/revdep-rebuild.conf']),
-	),
-	cmdclass={
-		'test': TestCommand,
-		'set_version': set_version,
-	},
+    name="gentoolkit",
+    version=__version__,
+    description="Set of tools that work with and enhance portage.",
+    author="",
+    author_email="",
+    maintainer="Gentoo Portage Tools Team",
+    maintainer_email="tools-portage@gentoo.org",
+    url="http://www.gentoo.org/proj/en/portage/tools/index.xml",
+    download_url="http://distfiles.gentoo.org/distfiles/gentoolkit-%s.tar.gz"
+    % __version__,
+    package_dir={"": "pym"},
+    packages=packages,
+    package_data=test_data,
+    scripts=(glob("bin/*")),
+    data_files=(
+        (
+            os.path.join(os.sep, EPREFIX.lstrip(os.sep), "etc/env.d"),
+            ["data/99gentoolkit-env"],
+        ),
+        (
+            os.path.join(os.sep, EPREFIX.lstrip(os.sep), "etc/revdep-rebuild"),
+            ["data/revdep-rebuild/99revdep-rebuild"],
+        ),
+        (
+            os.path.join(os.sep, EPREFIX.lstrip(os.sep), "etc/eclean"),
+            glob("data/eclean/*"),
+        ),
+        (
+            os.path.join(os.sep, EPREFIX.lstrip(os.sep), "usr/share/man/man1"),
+            glob("man/*"),
+        ),
+        (
+            os.path.join(os.sep, EPREFIX.lstrip(os.sep), "usr/lib/tmpfiles.d"),
+            ["data/tmpfiles.d/revdep-rebuild.conf"],
+        ),
+    ),
+    cmdclass={
+        "test": TestCommand,
+        "set_version": set_version,
+    },
 )
 
 # vim: set ts=4 sw=4 tw=79:


^ permalink raw reply related	[flat|nested] only message in thread

only message in thread, other threads:[~2021-09-20 22:58 UTC | newest]

Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2021-09-20 22:57 [gentoo-commits] proj/gentoolkit:master commit in: pym/gentoolkit/, pym/gentoolkit/test/eclean/, pym/gentoolkit/ekeyword/, Matt Turner

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox