* [gentoo-commits] proj/portage:master commit in: lib/portage/package/ebuild/, lib/portage/_emirrordist/, lib/portage/dep/, ...
@ 2023-01-10 15:12 Sam James
0 siblings, 0 replies; only message in thread
From: Sam James @ 2023-01-10 15:12 UTC (permalink / raw
To: gentoo-commits
commit: 3a5b0362f634e9514ed3a48efe77089f6457e048
Author: Sam James <sam <AT> gentoo <DOT> org>
AuthorDate: Tue Jan 10 13:20:51 2023 +0000
Commit: Sam James <sam <AT> gentoo <DOT> org>
CommitDate: Tue Jan 10 15:12:38 2023 +0000
URL: https://gitweb.gentoo.org/proj/portage.git/commit/?id=3a5b0362
*/*: convert to f-strings (run flynt)
Signed-off-by: Sam James <sam <AT> gentoo.org>
NEWS | 2 +
bin/archive-conf | 2 +-
bin/binhost-snapshot | 14 +-
bin/chpathtool.py | 2 +-
bin/clean_locks | 6 +-
bin/dispatch-conf | 4 +-
bin/dohtml.py | 15 +-
bin/doins.py | 8 +-
bin/ebuild | 24 +-
bin/ebuild-ipc.py | 4 +-
bin/egencache | 90 +++---
bin/emerge | 4 +-
bin/filter-bash-environment.py | 2 +-
bin/fixpackages | 4 +-
bin/glsa-check | 16 +-
bin/gpkg-helper.py | 12 +-
bin/portageq | 98 +++---
bin/quickpkg | 46 +--
bin/regenworld | 16 +-
bin/socks5-server.py | 2 +-
bin/xattr-helper.py | 2 +-
bin/xpak-helper.py | 12 +-
lib/_emerge/AbstractEbuildProcess.py | 6 +-
lib/_emerge/AsynchronousLock.py | 4 +-
lib/_emerge/AsynchronousTask.py | 2 +-
lib/_emerge/Binpkg.py | 14 +-
| 2 +-
lib/_emerge/BinpkgFetcher.py | 2 +-
lib/_emerge/BinpkgVerifier.py | 4 +-
lib/_emerge/BlockerCache.py | 2 +-
lib/_emerge/BlockerDB.py | 4 +-
lib/_emerge/CompositeTask.py | 2 +-
lib/_emerge/DependencyArg.py | 2 +-
lib/_emerge/EbuildBuild.py | 12 +-
lib/_emerge/EbuildBuildDir.py | 4 +-
lib/_emerge/EbuildFetcher.py | 12 +-
lib/_emerge/EbuildFetchonly.py | 4 +-
lib/_emerge/EbuildIpcDaemon.py | 2 +-
lib/_emerge/EbuildPhase.py | 18 +-
lib/_emerge/JobStatusDisplay.py | 12 +-
lib/_emerge/MergeListItem.py | 2 +-
lib/_emerge/MetadataRegen.py | 11 +-
lib/_emerge/Package.py | 24 +-
lib/_emerge/PackageMerge.py | 2 +-
lib/_emerge/PackageUninstall.py | 8 +-
lib/_emerge/Scheduler.py | 41 ++-
lib/_emerge/SubProcess.py | 4 +-
lib/_emerge/UseFlagDisplay.py | 4 +-
lib/_emerge/UserQuery.py | 2 +-
lib/_emerge/actions.py | 217 +++++++------
lib/_emerge/chk_updated_cfg_files.py | 2 +-
lib/_emerge/create_depgraph_params.py | 2 +-
lib/_emerge/depgraph.py | 340 ++++++++++-----------
lib/_emerge/emergelog.py | 4 +-
lib/_emerge/is_valid_package_atom.py | 2 +-
lib/_emerge/post_emerge.py | 8 +-
lib/_emerge/resolver/circular_dependency.py | 16 +-
lib/_emerge/resolver/output.py | 28 +-
lib/_emerge/resolver/output_helpers.py | 30 +-
lib/_emerge/resolver/slot_collision.py | 28 +-
lib/_emerge/search.py | 13 +-
lib/_emerge/show_invalid_depstring_notice.py | 10 +-
lib/_emerge/unmerge.py | 18 +-
lib/portage/_compat_upgrade/binpkg_compression.py | 2 +-
.../_compat_upgrade/binpkg_multi_instance.py | 6 +-
lib/portage/_compat_upgrade/default_locations.py | 8 +-
lib/portage/_emirrordist/Config.py | 4 +-
lib/portage/_emirrordist/ContentDB.py | 10 +-
lib/portage/_emirrordist/DeletionIterator.py | 4 +-
lib/portage/_emirrordist/DeletionTask.py | 8 +-
lib/portage/_emirrordist/FetchIterator.py | 12 +-
lib/portage/_emirrordist/FetchTask.py | 59 ++--
lib/portage/_emirrordist/MirrorDistTask.py | 4 +-
lib/portage/_emirrordist/main.py | 8 +-
lib/portage/_global_updates.py | 2 +-
lib/portage/_selinux.py | 4 +-
lib/portage/_sets/ProfilePackageSet.py | 2 +-
lib/portage/_sets/dbapi.py | 12 +-
lib/portage/_sets/files.py | 12 +-
lib/portage/_sets/libs.py | 2 +-
lib/portage/_sets/profiles.py | 8 +-
lib/portage/_sets/security.py | 2 +-
lib/portage/_sets/shell.py | 2 +-
lib/portage/cache/cache_errors.py | 11 +-
lib/portage/cache/ebuild_xattr.py | 6 +-
lib/portage/cache/flat_hash.py | 4 +-
lib/portage/cache/fs_template.py | 2 +-
lib/portage/cache/index/pkg_desc_index.py | 4 +-
lib/portage/cache/sql_template.py | 13 +-
lib/portage/cache/sqlite.py | 18 +-
lib/portage/cache/template.py | 20 +-
lib/portage/checksum.py | 4 +-
lib/portage/data.py | 4 +-
lib/portage/dbapi/_MergeProcess.py | 2 +-
lib/portage/dbapi/_VdbMetadataDelta.py | 2 +-
lib/portage/dbapi/__init__.py | 6 +-
lib/portage/dbapi/bintree.py | 52 ++--
lib/portage/dbapi/porttree.py | 18 +-
lib/portage/dbapi/vartree.py | 104 +++----
lib/portage/debug.py | 4 +-
lib/portage/dep/__init__.py | 25 +-
lib/portage/dep/_dnf.py | 14 +-
lib/portage/dep/_slot_operator.py | 5 +-
lib/portage/dep/dep_check.py | 30 +-
lib/portage/dep/soname/SonameAtom.py | 8 +-
lib/portage/dep/soname/multilib_category.py | 2 +-
lib/portage/elog/__init__.py | 8 +-
lib/portage/elog/mod_mail.py | 2 +-
lib/portage/elog/mod_mail_summary.py | 4 +-
lib/portage/elog/mod_save.py | 2 +-
lib/portage/elog/mod_save_summary.py | 2 +-
lib/portage/elog/mod_syslog.py | 2 +-
lib/portage/emaint/main.py | 23 +-
lib/portage/emaint/modules/binhost/binhost.py | 6 +-
lib/portage/emaint/modules/config/config.py | 4 +-
lib/portage/emaint/modules/logs/logs.py | 4 +-
lib/portage/emaint/modules/merges/merges.py | 22 +-
lib/portage/emaint/modules/move/move.py | 9 +-
lib/portage/emaint/modules/resume/resume.py | 6 +-
lib/portage/emaint/modules/sync/sync.py | 3 +-
lib/portage/emaint/modules/world/world.py | 6 +-
lib/portage/getbinpkg.py | 4 +-
lib/portage/gpkg.py | 12 +-
lib/portage/locks.py | 8 +-
lib/portage/output.py | 12 +-
.../package/ebuild/_config/LocationsManager.py | 2 +-
lib/portage/package/ebuild/_ipc/QueryCommand.py | 22 +-
lib/portage/package/ebuild/_metadata_invalid.py | 2 +-
.../ebuild/_parallel_manifest/ManifestProcess.py | 6 +-
lib/portage/package/ebuild/config.py | 22 +-
lib/portage/package/ebuild/digestcheck.py | 2 +-
lib/portage/package/ebuild/digestgen.py | 8 +-
lib/portage/package/ebuild/doebuild.py | 81 +++--
lib/portage/package/ebuild/fetch.py | 23 +-
lib/portage/package/ebuild/getmaskingstatus.py | 6 +-
lib/portage/package/ebuild/prepare_build_dirs.py | 20 +-
lib/portage/process.py | 10 +-
lib/portage/proxy/lazyimport.py | 2 +-
lib/portage/repository/config.py | 6 +-
.../repository/storage/hardlink_quarantine.py | 6 +-
lib/portage/repository/storage/hardlink_rcu.py | 4 +-
lib/portage/sync/controller.py | 12 +-
lib/portage/sync/modules/git/git.py | 33 +-
lib/portage/sync/modules/mercurial/mercurial.py | 8 +-
lib/portage/sync/modules/rsync/rsync.py | 48 ++-
lib/portage/sync/modules/svn/svn.py | 4 +-
lib/portage/sync/modules/webrsync/webrsync.py | 7 +-
lib/portage/sync/old_tree_timestamp.py | 6 +-
lib/portage/sync/syncbase.py | 23 +-
lib/portage/tests/__init__.py | 18 +-
lib/portage/tests/bin/test_doins.py | 4 +-
lib/portage/tests/bin/test_eapi7_ver_funcs.py | 24 +-
lib/portage/tests/dbapi/test_auxdb.py | 4 +-
lib/portage/tests/dbapi/test_fakedbapi.py | 4 +-
lib/portage/tests/dbapi/test_portdb_cache.py | 4 +-
lib/portage/tests/dep/testAtom.py | 18 +-
lib/portage/tests/dep/testStandalone.py | 4 +-
lib/portage/tests/dep/test_dep_getusedeps.py | 2 +-
lib/portage/tests/dep/test_get_operator.py | 2 +-
.../tests/dep/test_get_required_use_flags.py | 2 +-
lib/portage/tests/dep/test_isjustname.py | 4 +-
lib/portage/tests/dep/test_isvalidatom.py | 2 +-
lib/portage/tests/dep/test_match_from_list.py | 2 +-
lib/portage/tests/dep/test_use_reduce.py | 2 +-
lib/portage/tests/ebuild/test_fetch.py | 20 +-
lib/portage/tests/ebuild/test_spawn.py | 2 +-
.../tests/ebuild/test_use_expand_incremental.py | 4 +-
lib/portage/tests/emerge/test_config_protect.py | 2 +-
.../emerge/test_emerge_blocker_file_collision.py | 4 +-
lib/portage/tests/emerge/test_emerge_slot_abi.py | 4 +-
lib/portage/tests/emerge/test_simple.py | 6 +-
.../tests/env/config/test_PackageKeywordsFile.py | 2 +-
.../tests/env/config/test_PackageUseFile.py | 2 +-
.../tests/env/config/test_PortageModulesFile.py | 2 +-
lib/portage/tests/lint/test_import_modules.py | 2 +-
lib/portage/tests/news/test_NewsItem.py | 14 +-
lib/portage/tests/process/test_PipeLogger.py | 2 +-
lib/portage/tests/process/test_PopenProcess.py | 4 +-
.../tests/process/test_PopenProcessBlockingIO.py | 2 +-
lib/portage/tests/process/test_poll.py | 2 +-
lib/portage/tests/process/test_unshare_net.py | 2 +-
lib/portage/tests/resolver/ResolverPlayground.py | 48 +--
.../test_build_id_profile_format.py | 2 +-
.../binpkg_multi_instance/test_rebuilt_binaries.py | 2 +-
.../tests/resolver/soname/test_autounmask.py | 2 +-
.../tests/resolver/soname/test_downgrade.py | 4 +-
.../tests/resolver/soname/test_or_choices.py | 2 +-
.../tests/resolver/soname/test_reinstall.py | 2 +-
.../tests/resolver/soname/test_skip_update.py | 2 +-
.../soname/test_slot_conflict_reinstall.py | 16 +-
.../resolver/soname/test_slot_conflict_update.py | 2 +-
.../tests/resolver/soname/test_soname_provided.py | 2 +-
.../tests/resolver/soname/test_unsatisfiable.py | 2 +-
.../tests/resolver/soname/test_unsatisfied.py | 2 +-
.../tests/resolver/test_autounmask_binpkg_use.py | 2 +-
lib/portage/tests/resolver/test_bdeps.py | 2 +-
.../resolver/test_binary_pkg_ebuild_visibility.py | 2 +-
lib/portage/tests/resolver/test_changed_deps.py | 2 +-
...test_complete_if_new_subslot_without_revbump.py | 2 +-
.../resolver/test_disjunctive_depend_order.py | 2 +-
lib/portage/tests/resolver/test_multirepo.py | 4 +-
lib/portage/tests/resolver/test_package_tracker.py | 2 +-
.../tests/resolver/test_profile_default_eapi.py | 2 +-
.../tests/resolver/test_profile_package_set.py | 2 +-
.../test_regular_slot_change_without_revbump.py | 2 +-
lib/portage/tests/resolver/test_simple.py | 2 +-
lib/portage/tests/resolver/test_slot_abi.py | 6 +-
.../tests/resolver/test_slot_abi_downgrade.py | 4 +-
.../resolver/test_slot_change_without_revbump.py | 2 +-
.../tests/resolver/test_slot_conflict_rebuild.py | 8 +-
.../resolver/test_slot_operator_autounmask.py | 2 +-
.../tests/resolver/test_slot_operator_bdeps.py | 4 +-
.../tests/resolver/test_slot_operator_rebuild.py | 2 +-
.../tests/resolver/test_slot_operator_unsolved.py | 2 +-
lib/portage/tests/resolver/test_useflags.py | 2 +-
lib/portage/tests/sets/shell/testShell.py | 2 +-
lib/portage/tests/sync/test_sync_local.py | 16 +-
lib/portage/tests/unicode/test_string_format.py | 12 +-
lib/portage/tests/update/test_move_ent.py | 2 +-
lib/portage/tests/update/test_move_slot_ent.py | 2 +-
lib/portage/tests/update/test_update_dbentry.py | 2 +-
.../util/futures/asyncio/test_wakeup_fd_sigchld.py | 2 +-
lib/portage/tests/util/test_getconfig.py | 4 +-
lib/portage/tests/util/test_install_mask.py | 4 +-
lib/portage/tests/util/test_varExpand.py | 13 +-
lib/portage/tests/util/test_xattr.py | 2 +-
lib/portage/tests/versions/test_vercmp.py | 8 +-
lib/portage/update.py | 6 +-
| 2 +-
lib/portage/util/__init__.py | 40 ++-
lib/portage/util/_dyn_libs/LinkageMapELF.py | 29 +-
.../util/_dyn_libs/PreservedLibsRegistry.py | 2 +-
.../util/_dyn_libs/display_preserved_libs.py | 8 +-
lib/portage/util/_dyn_libs/soname_deps.py | 4 +-
lib/portage/util/_info_files.py | 2 +-
lib/portage/util/_path.py | 4 +-
lib/portage/util/_pty.py | 2 +-
lib/portage/util/_xattr.py | 10 +-
lib/portage/util/changelog.py | 2 +-
lib/portage/util/configparser.py | 4 +-
lib/portage/util/digraph.py | 8 +-
lib/portage/util/env_update.py | 20 +-
lib/portage/util/futures/executor/fork.py | 6 +-
lib/portage/util/hooks.py | 2 +-
lib/portage/util/locale.py | 6 +-
lib/portage/util/movefile.py | 34 +--
lib/portage/util/whirlpool.py | 6 +-
lib/portage/versions.py | 4 +-
lib/portage/xml/metadata.py | 10 +-
runtests | 17 +-
setup.py | 33 +-
251 files changed, 1398 insertions(+), 1590 deletions(-)
diff --git a/NEWS b/NEWS
index 23a652278..60782fbb8 100644
--- a/NEWS
+++ b/NEWS
@@ -5,6 +5,8 @@ Features:
* emerge: Show time taken to calculate dependency resolution with
emerge --verbose.
+* cleanups: Use flynt on the codebase to upgrade to Python f-strings everywhere.
+
Bug fixes:
* ebuild: the PATH variable exported to ebuilds has been changed:
The PATH setting from /etc/profile.env is appended to portage-internal
diff --git a/bin/archive-conf b/bin/archive-conf
index b068212b4..b2c381bc6 100755
--- a/bin/archive-conf
+++ b/bin/archive-conf
@@ -62,7 +62,7 @@ def archive_conf():
contents = open(filename)
except OSError as e:
print(
- "archive-conf: Unable to open {}: {}".format(filename, e),
+ f"archive-conf: Unable to open {filename}: {e}",
file=sys.stderr,
)
sys.exit(1)
diff --git a/bin/binhost-snapshot b/bin/binhost-snapshot
index 8c3225216..5fac0b46c 100755
--- a/bin/binhost-snapshot
+++ b/bin/binhost-snapshot
@@ -66,7 +66,7 @@ def parse_args(argv):
options, args = parser.parse_known_args(argv[1:])
if len(args) != 4:
- parser.error("Required 4 arguments, got %d" % (len(args),))
+ parser.error(f"Required 4 arguments, got {len(args)}")
return parser, options, args
@@ -78,20 +78,20 @@ def main(argv):
src_pkgs_index = os.path.join(src_pkg_dir, "Packages")
if not os.path.isdir(src_pkg_dir):
- parser.error("src_pkg_dir is not a directory: '{}'".format(src_pkg_dir))
+ parser.error(f"src_pkg_dir is not a directory: '{src_pkg_dir}'")
if not os.path.isfile(src_pkgs_index):
parser.error(
"src_pkg_dir does not contain a "
- + "'Packages' index: '{}'".format(src_pkg_dir)
+ + f"'Packages' index: '{src_pkg_dir}'"
)
parse_result = urlparse(snapshot_uri)
if not (parse_result.scheme and parse_result.netloc and parse_result.path):
- parser.error("snapshot_uri is not a valid URI: '{}'".format(snapshot_uri))
+ parser.error(f"snapshot_uri is not a valid URI: '{snapshot_uri}'")
if os.path.isdir(snapshot_dir):
- parser.error("snapshot_dir already exists: '%s'" % snapshot_dir)
+ parser.error(f"snapshot_dir already exists: '{snapshot_dir}'")
try:
os.makedirs(os.path.dirname(snapshot_dir))
@@ -108,7 +108,7 @@ def main(argv):
except OSError:
pass
if not os.path.isdir(binhost_dir):
- parser.error("binhost_dir could not be created: '%s'" % binhost_dir)
+ parser.error(f"binhost_dir could not be created: '{binhost_dir}'")
cp_opts = "RP"
if options.hardlinks == "n":
@@ -147,7 +147,7 @@ def main(argv):
else:
if not line.strip():
# end of header
- outfile.write("URI: %s\n\n" % snapshot_uri)
+ outfile.write(f"URI: {snapshot_uri}\n\n")
break
outfile.write(line)
diff --git a/bin/chpathtool.py b/bin/chpathtool.py
index 8c67a2e7f..bd7493c45 100755
--- a/bin/chpathtool.py
+++ b/bin/chpathtool.py
@@ -71,7 +71,7 @@ def chpath_inplace(filename, is_text_file, old, new):
try:
orig_mode = stat.S_IMODE(os.lstat(filename).st_mode)
except OSError as e:
- sys.stderr.write("{}: {}\n".format(e, filename))
+ sys.stderr.write(f"{e}: {filename}\n")
return
temp_mode = 0o200 | orig_mode
os.chmod(filename, temp_mode)
diff --git a/bin/clean_locks b/bin/clean_locks
index 2a5e4ff0f..cf88365ee 100755
--- a/bin/clean_locks
+++ b/bin/clean_locks
@@ -23,8 +23,8 @@ if not sys.argv[1:] or "--help" in sys.argv or "-h" in sys.argv:
print("of the locks, even if we can't establish if they are in use.")
print("Please attempt cleaning without force first.")
print()
- print("{} {}/.locks".format(sys.argv[0], portage.settings["DISTDIR"]))
- print("{} --force {}/.locks".format(sys.argv[0], portage.settings["DISTDIR"]))
+ print(f"{sys.argv[0]} {portage.settings['DISTDIR']}/.locks")
+ print(f"{sys.argv[0]} --force {portage.settings['DISTDIR']}/.locks")
print()
sys.exit(1)
@@ -42,7 +42,7 @@ for x in sys.argv[1:]:
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
- print("!!! %s is not a directory or does not exist" % x)
+ print(f"!!! {x} is not a directory or does not exist")
else:
raise
sys.exit(e.errno)
diff --git a/bin/dispatch-conf b/bin/dispatch-conf
index e5a10473d..9ca711b47 100755
--- a/bin/dispatch-conf
+++ b/bin/dispatch-conf
@@ -415,14 +415,14 @@ class dispatch:
os.unlink(mrgconf)
break
else:
- raise AssertionError("Invalid Input: %s" % c)
+ raise AssertionError(f"Invalid Input: {c}")
if auto_zapped:
print()
print(" One or more updates are frozen and have been automatically zapped:")
print()
for frozen in auto_zapped:
- writemsg_stdout(" * '%s'\n" % frozen, noiselevel=-1)
+ writemsg_stdout(f" * '{frozen}'\n", noiselevel=-1)
print()
perform_conf_update_session_hooks("post-session")
diff --git a/bin/dohtml.py b/bin/dohtml.py
index 7aebd2af7..4ebee7779 100755
--- a/bin/dohtml.py
+++ b/bin/dohtml.py
@@ -53,9 +53,9 @@ def dofile(src, dst):
def eqawarn(lines):
- cmd = "source '%s/isolated-functions.sh' ; " % os.environ["PORTAGE_BIN_PATH"]
+ cmd = f"source '{os.environ['PORTAGE_BIN_PATH']}/isolated-functions.sh' ; "
for line in lines:
- cmd += 'eqawarn "%s" ; ' % line
+ cmd += f'eqawarn "{line}" ; '
os.spawnlp(os.P_WAIT, "bash", "bash", "-c", cmd)
@@ -96,7 +96,7 @@ def install(basename, dirname, options, prefix=""):
).rstrip(os.sep)
if not os.path.exists(fullpath):
- sys.stderr.write("!!! dohtml: %s does not exist\n" % fullpath)
+ sys.stderr.write(f"!!! dohtml: {fullpath} does not exist\n")
return False
elif os.path.isfile(fullpath):
ext = os.path.splitext(basename)[1][1:]
@@ -119,8 +119,7 @@ def install(basename, dirname, options, prefix=""):
i = _unicode_decode(i, errors="strict")
except UnicodeDecodeError:
writemsg(
- "dohtml: argument is not encoded as UTF-8: %s\n"
- % _unicode_decode(i),
+ f"dohtml: argument is not encoded as UTF-8: {_unicode_decode(i)}\n",
noiselevel=-1,
)
sys.exit(1)
@@ -200,7 +199,7 @@ def parse_args():
argv[x] = _unicode_decode(arg, errors="strict")
except UnicodeDecodeError:
writemsg(
- "dohtml: argument is not encoded as UTF-8: %s\n" % _unicode_decode(arg),
+ f"dohtml: argument is not encoded as UTF-8: {_unicode_decode(arg)}\n",
noiselevel=-1,
)
sys.exit(1)
@@ -271,9 +270,9 @@ def main():
success |= install(basename, dirname, options)
for x in skipped_directories:
- eqawarn(["QA Notice: dohtml on directory '%s' without recursion option" % x])
+ eqawarn([f"QA Notice: dohtml on directory '{x}' without recursion option"])
for x in skipped_files:
- eqawarn(["dohtml: skipped file '%s'" % x])
+ eqawarn([f"dohtml: skipped file '{x}'"])
if success:
retcode = 0
diff --git a/bin/doins.py b/bin/doins.py
index fba8d7e55..67b54d527 100644
--- a/bin/doins.py
+++ b/bin/doins.py
@@ -36,7 +36,7 @@ def _warn(helper, msg):
helper: helper executable name.
msg: Message to be output.
"""
- print("!!! {}: {}\n".format(helper, msg), file=sys.stderr)
+ print(f"!!! {helper}: {msg}\n", file=sys.stderr)
def _parse_group(group):
@@ -111,7 +111,7 @@ def _parse_install_options(
# Because parsing '--mode' option is partially supported. If unknown
# arg for --mode is passed, namespace.mode is set to None.
if remaining or namespace.mode is None:
- _warn(helper, "Unknown install options: {}, {!r}".format(options, remaining))
+ _warn(helper, f"Unknown install options: {options}, {remaining!r}")
if is_strict:
sys.exit(1)
_warn(
@@ -253,7 +253,7 @@ class _InsInProcessInstallRunner:
):
return True
- _warn(self._helper, "{} and {} are same file.".format(source, dest))
+ _warn(self._helper, f"{source} and {dest} are same file.")
return False
@@ -534,7 +534,7 @@ def _install_dir(opts, install_runner, source):
"""
if not opts.recursive:
if opts.helper == "dodoc":
- _warn(opts.helper, "{} is a directory".format(source))
+ _warn(opts.helper, f"{source} is a directory")
return False
# Neither success nor fail. Return None to indicate skipped.
return None
diff --git a/bin/ebuild b/bin/ebuild
index 8f73b8684..dab82ab2e 100755
--- a/bin/ebuild
+++ b/bin/ebuild
@@ -96,7 +96,7 @@ try:
opts, pargs = parser.parse_known_args(args=sys.argv[1:])
def err(txt):
- portage.writemsg("ebuild: {}\n".format(txt), noiselevel=-1)
+ portage.writemsg(f"ebuild: {txt}\n", noiselevel=-1)
sys.exit(1)
if opts.version:
@@ -146,7 +146,7 @@ try:
pf = os.path.basename(ebuild)[:-7]
if pf is None:
- err("{}: does not end with '.ebuild'".format(ebuild))
+ err(f"{ebuild}: does not end with '.ebuild'")
if not os.path.isabs(ebuild):
mycwd = os.getcwd()
@@ -173,7 +173,7 @@ try:
portdir_overlay + " " + _shell_quote(ebuild_portdir)
)
- print("Appending %s to PORTDIR_OVERLAY..." % ebuild_portdir)
+ print(f"Appending {ebuild_portdir} to PORTDIR_OVERLAY...")
portage._reset_legacy_globals()
myrepo = None
@@ -181,10 +181,10 @@ try:
myrepo = portage.portdb.getRepositoryName(ebuild_portdir)
if not os.path.exists(ebuild):
- err("{}: does not exist".format(ebuild))
+ err(f"{ebuild}: does not exist")
ebuild_split = ebuild.split("/")
- cpv = "{}/{}".format(ebuild_split[-3], pf)
+ cpv = f"{ebuild_split[-3]}/{pf}"
with open(
_unicode_encode(ebuild, encoding=_encodings["fs"], errors="strict"),
@@ -195,7 +195,7 @@ try:
if eapi is None:
eapi = "0"
if not portage.catpkgsplit(cpv, eapi=eapi):
- err("{}: {}: does not follow correct package syntax".format(ebuild, cpv))
+ err(f"{ebuild}: {cpv}: does not follow correct package syntax")
if ebuild.startswith(vdb_path):
mytree = "vartree"
@@ -206,7 +206,7 @@ try:
)
if os.path.realpath(portage_ebuild) != ebuild:
- err("Portage seems to think that {} is at {}".format(cpv, portage_ebuild))
+ err(f"Portage seems to think that {cpv} is at {portage_ebuild}")
else:
mytree = "porttree"
@@ -215,7 +215,7 @@ try:
portage_ebuild = portage.portdb.findname(cpv, myrepo=myrepo)
if not portage_ebuild or portage_ebuild != ebuild:
- err("{}: does not seem to have a valid PORTDIR structure".format(ebuild))
+ err(f"{ebuild}: does not seem to have a valid PORTDIR structure")
if len(pargs) > 1 and "config" in pargs:
other_phases = set(pargs)
@@ -381,7 +381,7 @@ try:
) % (tmpsettings["PF"],)
msg = textwrap.wrap(msg, 70)
for x in msg:
- portage.writemsg(">>> %s\n" % x)
+ portage.writemsg(f">>> {x}\n")
if ebuild_changed:
open(
@@ -418,13 +418,13 @@ try:
msg = textwrap.wrap(str(e), 70)
del e
for x in msg:
- portage.writemsg("!!! %s\n" % x, noiselevel=-1)
+ portage.writemsg(f"!!! {x}\n", noiselevel=-1)
a = 1
except PortagePackageException as e:
- portage.writemsg("!!! {}\n".format(e), noiselevel=-1)
+ portage.writemsg(f"!!! {e}\n", noiselevel=-1)
a = 1
except PermissionDenied as e:
- portage.writemsg("!!! Permission Denied: {}\n".format(e), noiselevel=-1)
+ portage.writemsg(f"!!! Permission Denied: {e}\n", noiselevel=-1)
a = 1
if a is None:
print("Could not run the required binary?")
diff --git a/bin/ebuild-ipc.py b/bin/ebuild-ipc.py
index fc632e015..fa61d1ccc 100755
--- a/bin/ebuild-ipc.py
+++ b/bin/ebuild-ipc.py
@@ -239,7 +239,7 @@ try:
if not buf:
portage.util.writemsg_level(
- "ebuild-ipc: {}\n".format(portage.localization._("read failed")),
+ f"ebuild-ipc: {portage.localization._('read failed')}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -254,7 +254,7 @@ try:
# The pickle module can raise practically
# any exception when given corrupt data.
portage.util.writemsg_level(
- "ebuild-ipc: {}\n".format(e), level=logging.ERROR, noiselevel=-1
+ f"ebuild-ipc: {e}\n", level=logging.ERROR, noiselevel=-1
)
else:
diff --git a/bin/egencache b/bin/egencache
index 8c18edaeb..090b9b1ed 100755
--- a/bin/egencache
+++ b/bin/egencache
@@ -240,7 +240,7 @@ try:
jobs = -1
if jobs < 1:
- parser.error("Invalid: --jobs='{}'".format(options.jobs))
+ parser.error(f"Invalid: --jobs='{options.jobs}'")
options.jobs = jobs
@@ -255,7 +255,7 @@ try:
if load_average <= 0.0:
parser.error(
- "Invalid: --load-average='{}'".format(options.load_average)
+ f"Invalid: --load-average='{options.load_average}'"
)
options.load_average = load_average
@@ -266,27 +266,27 @@ try:
options.config_root = options.portage_configroot
if options.config_root is not None and not os.path.isdir(options.config_root):
parser.error(
- "Not a directory: --config-root='{}'".format(options.config_root)
+ f"Not a directory: --config-root='{options.config_root}'"
)
if options.cache_dir is not None:
if not os.path.isdir(options.cache_dir):
parser.error(
- "Not a directory: --cache-dir='{}'".format(options.cache_dir)
+ f"Not a directory: --cache-dir='{options.cache_dir}'"
)
if not os.access(options.cache_dir, os.W_OK):
parser.error(
- "Write access denied: --cache-dir='{}'".format(options.cache_dir)
+ f"Write access denied: --cache-dir='{options.cache_dir}'"
)
for atom in args:
try:
atom = portage.dep.Atom(atom)
except portage.exception.InvalidAtom:
- parser.error("Invalid atom: {}".format(atom))
+ parser.error(f"Invalid atom: {atom}")
if not isjustname(atom):
- parser.error("Atom is too specific: {}".format(atom))
+ parser.error(f"Atom is too specific: {atom}")
if options.update_use_local_desc:
try:
@@ -406,7 +406,7 @@ try:
try:
chf = trg_cache.validation_chf
- metadata["_%s_" % chf] = getattr(ebuild_hash, chf)
+ metadata[f"_{chf}_"] = getattr(ebuild_hash, chf)
try:
trg_cache[cpv] = metadata
except StatCollision as sc:
@@ -434,7 +434,7 @@ try:
except OSError as e:
self.returncode |= 1
writemsg_level(
- "{} writing target: {}\n".format(cpv, e),
+ f"{cpv} writing target: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -447,7 +447,7 @@ try:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "{} writing target: {}\n".format(cpv, ce),
+ f"{cpv} writing target: {ce}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -472,7 +472,7 @@ try:
if cp is None:
self.returncode |= 1
writemsg_level(
- "Unable to parse cp for '{}'\n".format(cpv),
+ f"Unable to parse cp for '{cpv}'\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -482,7 +482,7 @@ try:
self.returncode |= 1
writemsg_level(
"Error listing cache entries for "
- + "'{}': {}, continuing...\n".format(trg_cache.location, ce),
+ + f"'{trg_cache.location}': {ce}, continuing...\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -495,7 +495,7 @@ try:
if cp is None:
self.returncode |= 1
writemsg_level(
- "Unable to parse cp for '{}'\n".format(cpv),
+ f"Unable to parse cp for '{cpv}'\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -507,7 +507,7 @@ try:
self.returncode |= 1
writemsg_level(
"Error listing cache entries for "
- + "'{}': {}, continuing...\n".format(trg_cache.location, ce),
+ + f"'{trg_cache.location}': {ce}, continuing...\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -516,7 +516,7 @@ try:
self.returncode |= 1
for cp in sorted(cp_missing):
writemsg_level(
- "No ebuilds or cache entries found for '{}'\n".format(cp),
+ f"No ebuilds or cache entries found for '{cp}'\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -531,7 +531,7 @@ try:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "{} deleting stale cache: {}\n".format(k, ce),
+ f"{k} deleting stale cache: {ce}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -542,7 +542,7 @@ try:
except CacheError as ce:
self.returncode |= 1
writemsg_level(
- "committing target: {}\n".format(ce),
+ f"committing target: {ce}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -604,9 +604,7 @@ try:
version = self._portdb.xmatch(
"bestmatch-visible",
Atom(
- "{}{}{}".format(
- pkg_desc.cp, _repo_separator, self._repo_config.name
- )
+ f"{pkg_desc.cp}{_repo_separator}{self._repo_config.name}"
),
)
if not version:
@@ -694,9 +692,7 @@ try:
except OSError as e:
if not self._preserve_comments or os.path.isfile(desc_path):
writemsg_level(
- "ERROR: failed to open output file {}: {}\n".format(
- desc_path, e
- ),
+ f"ERROR: failed to open output file {desc_path}: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -708,7 +704,7 @@ try:
# preserve_comments mode now.
writemsg_level(
"WARNING: --preserve-comments enabled, but "
- + "output file not found: {}\n".format(desc_path),
+ + f"output file not found: {desc_path}\n",
level=logging.WARNING,
noiselevel=-1,
)
@@ -724,9 +720,7 @@ try:
)
except OSError as e:
writemsg_level(
- "ERROR: failed to open output file {}: {}\n".format(
- desc_path, e
- ),
+ f"ERROR: failed to open output file {desc_path}: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -804,7 +798,7 @@ try:
pass
except (ExpatError, OSError) as e:
writemsg_level(
- "ERROR: failed parsing {}/metadata.xml: {}\n".format(cp, e),
+ f"ERROR: failed parsing {cp}/metadata.xml: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -814,7 +808,7 @@ try:
usedict = parse_metadata_use(metadata)
except portage.exception.ParseError as e:
writemsg_level(
- "ERROR: failed parsing {}/metadata.xml: {}\n".format(cp, e),
+ f"ERROR: failed parsing {cp}/metadata.xml: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -865,7 +859,7 @@ try:
)
resdesc = resdict[reskeys[resatoms[-1]]]
- output.write("{}:{} - {}\n".format(cp, flag, resdesc))
+ output.write(f"{cp}:{flag} - {resdesc}\n")
output.close()
if prev_mtime is not None and prev_md5 == portage.checksum.perform_md5(
@@ -907,7 +901,7 @@ try:
# If work-tree is not passed, Git tries to use the shared
# parent of the current directory and the ${GIT_DIR}, which can
# be outside the root of the checkout.
- self._work_tree = "--work-tree=%s" % self._repo_path
+ self._work_tree = f"--work-tree={self._repo_path}"
@staticmethod
def grab(cmd):
@@ -947,11 +941,7 @@ try:
)
except OSError as e:
writemsg_level(
- "ERROR: failed to open ChangeLog for %s: %s\n"
- % (
- cp,
- e,
- ),
+ f"ERROR: failed to open ChangeLog for {cp}: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -997,7 +987,7 @@ try:
"--no-renames",
"--format=%ct %cN <%cE>%n%B",
"--root",
- "--relative={}".format(cp),
+ f"--relative={cp}",
"-r",
c,
"--",
@@ -1037,11 +1027,7 @@ try:
changed.append(ChangeLogTypeSort("", f[1]))
else:
writemsg_level(
- "ERROR: unexpected git file status for %s: %s\n"
- % (
- cp,
- f,
- ),
+ f"ERROR: unexpected git file status for {cp}: {f}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -1059,7 +1045,7 @@ try:
# Reverse the sort order for headers.
for c in reversed(changed):
if c.startswith("+") and c.endswith(".ebuild"):
- output.write("*{} ({})\n".format(c[1:-7], date))
+ output.write(f"*{c[1:-7]} ({date})\n")
wroteheader = True
if wroteheader:
output.write("\n")
@@ -1087,7 +1073,7 @@ try:
self._wrapper.break_on_hyphens = False
output.write(
self._wrapper.fill(
- "{}; {} {}:".format(date, author, ", ".join(changed))
+ f"{date}; {author} {', '.join(changed)}:"
)
)
# but feel free to break commit messages there
@@ -1191,7 +1177,7 @@ try:
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
- parser.error("Unable to locate repository named '{}'".format(options.repo))
+ parser.error(f"Unable to locate repository named '{options.repo}'")
return 1
repo_config = settings.repositories.get_repo_for_location(repo_path)
@@ -1231,9 +1217,7 @@ try:
if not os.access(settings["PORTAGE_DEPCACHEDIR"], os.W_OK):
writemsg_level(
"ecachegen: error: "
- + "write access denied: {}\n".format(
- settings["PORTAGE_DEPCACHEDIR"]
- ),
+ + f"write access denied: {settings['PORTAGE_DEPCACHEDIR']}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -1351,12 +1335,8 @@ try:
)
if not options.external_cache_only:
msg = [
- "WARNING: Repository is not writable: {}".format(
- repo_config.location
- ),
- " Using cache directory instead: {}".format(
- writable_location
- ),
+ f"WARNING: Repository is not writable: {repo_config.location}",
+ f" Using cache directory instead: {writable_location}",
]
msg = "".join(line + "\n" for line in msg)
writemsg_level(msg, level=logging.WARNING, noiselevel=-1)
@@ -1424,7 +1404,7 @@ try:
try:
portage.util.write_atomic(
timestamp_path,
- time.strftime("%s\n" % TIMESTAMP_FORMAT, time.gmtime()),
+ time.strftime(f"{TIMESTAMP_FORMAT}\n", time.gmtime()),
)
except (OSError, portage.exception.PortageException):
ret.append(os.EX_IOERR)
diff --git a/bin/emerge b/bin/emerge
index a16c5039a..cb1c36eea 100755
--- a/bin/emerge
+++ b/bin/emerge
@@ -57,7 +57,7 @@ try:
try:
retval = emerge_main()
except PermissionDenied as e:
- sys.stderr.write("Permission denied: '%s'\n" % str(e))
+ sys.stderr.write(f"Permission denied: '{str(e)}'\n")
sys.exit(e.errno)
except IsADirectory as e:
sys.stderr.write(
@@ -67,7 +67,7 @@ try:
)
sys.exit(e.errno)
except ParseError as e:
- sys.stderr.write("%s\n" % str(e))
+ sys.stderr.write(f"{str(e)}\n")
sys.exit(1)
except (KeyboardInterrupt, SystemExit):
raise
diff --git a/bin/filter-bash-environment.py b/bin/filter-bash-environment.py
index 86cb22948..8114490b0 100755
--- a/bin/filter-bash-environment.py
+++ b/bin/filter-bash-environment.py
@@ -127,7 +127,7 @@ if __name__ == "__main__":
+ "intact. The PATTERN is a space separated list of variable names"
+ " and it supports python regular expression syntax."
)
- usage = "usage: %s PATTERN" % os.path.basename(sys.argv[0])
+ usage = f"usage: {os.path.basename(sys.argv[0])} PATTERN"
args = sys.argv[1:]
if "-h" in args or "--help" in args:
diff --git a/bin/fixpackages b/bin/fixpackages
index 6f78b174d..6f88bea7c 100755
--- a/bin/fixpackages
+++ b/bin/fixpackages
@@ -48,9 +48,9 @@ try:
os.nice(int(mysettings.get("PORTAGE_NICENESS", "0")))
except (OSError, ValueError) as e:
portage.writemsg(
- "!!! Failed to change nice value to '%s'\n" % mysettings["PORTAGE_NICENESS"]
+ f"!!! Failed to change nice value to '{mysettings['PORTAGE_NICENESS']}'\n"
)
- portage.writemsg("!!! %s\n" % str(e))
+ portage.writemsg(f"!!! {str(e)}\n")
del e
_global_updates(mytrees, mtimedb["updates"], if_mtime_changed=False)
diff --git a/bin/glsa-check b/bin/glsa-check
index 753c13891..436038702 100755
--- a/bin/glsa-check
+++ b/bin/glsa-check
@@ -211,7 +211,7 @@ if "affected" in params:
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- "invalid GLSA: {} (error message was: {})\n".format(x, e)
+ f"invalid GLSA: {x} (error message was: {e})\n"
)
continue
if myglsa.isVulnerable():
@@ -250,7 +250,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr, encoding="utf-8"):
myglsa = Glsa(myid, portage.settings, vardb, portdb)
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
- fd2.write("invalid GLSA: {} (error message was: {})\n".format(myid, e))
+ fd2.write(f"invalid GLSA: {myid} (error message was: {e})\n")
continue
if myglsa.isInjected():
status = "[A]"
@@ -320,7 +320,7 @@ if mode in ["dump", "fix", "inject", "pretend"]:
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- "invalid GLSA: {} (error message was: {})\n".format(myid, e)
+ f"invalid GLSA: {myid} (error message was: {e})\n"
)
continue
if mode == "dump":
@@ -414,7 +414,7 @@ if mode == "test":
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- "invalid GLSA: {} (error message was: {})\n".format(myid, e)
+ f"invalid GLSA: {myid} (error message was: {e})\n"
)
continue
if myglsa.isVulnerable():
@@ -450,13 +450,13 @@ if mode == "mail":
else:
myfrom = "glsa-check"
- mysubject = "[glsa-check] Summary for %s" % socket.getfqdn()
+ mysubject = f"[glsa-check] Summary for {socket.getfqdn()}"
# need a file object for summarylist()
myfd = BytesIO()
- line = "GLSA Summary report for host %s\n" % socket.getfqdn()
+ line = f"GLSA Summary report for host {socket.getfqdn()}\n"
myfd.write(line.encode("utf-8"))
- line = "(Command was: %s)\n\n" % " ".join(sys.argv)
+ line = f"(Command was: {' '.join(sys.argv)})\n\n"
myfd.write(line.encode("utf-8"))
summarylist(glsalist, fd1=myfd, fd2=myfd)
summary = myfd.getvalue().decode("utf-8")
@@ -469,7 +469,7 @@ if mode == "mail":
except (GlsaTypeException, GlsaFormatException) as e:
if verbose:
sys.stderr.write(
- "invalid GLSA: {} (error message was: {})\n".format(myid, e)
+ f"invalid GLSA: {myid} (error message was: {e})\n"
)
continue
myfd = BytesIO()
diff --git a/bin/gpkg-helper.py b/bin/gpkg-helper.py
index 4481f4b93..ddb13e454 100755
--- a/bin/gpkg-helper.py
+++ b/bin/gpkg-helper.py
@@ -18,19 +18,19 @@ def command_compose(args):
if len(args) != 4:
sys.stderr.write(usage)
- sys.stderr.write("4 arguments are required, got %s\n" % len(args))
+ sys.stderr.write(f"4 arguments are required, got {len(args)}\n")
return 1
basename, binpkg_path, metadata_dir, image_dir = args
if not os.path.isdir(metadata_dir):
sys.stderr.write(usage)
- sys.stderr.write("Argument 3 is not a directory: '%s'\n" % metadata_dir)
+ sys.stderr.write(f"Argument 3 is not a directory: '{metadata_dir}'\n")
return 1
if not os.path.isdir(image_dir):
sys.stderr.write(usage)
- sys.stderr.write("Argument 4 is not a directory: '%s'\n" % image_dir)
+ sys.stderr.write(f"Argument 4 is not a directory: '{image_dir}'\n")
return 1
try:
@@ -51,7 +51,7 @@ def main(argv):
valid_commands = ("compress",)
description = "Perform metadata operations on a binary package."
- usage = "usage: %s COMMAND [args]" % os.path.basename(argv[0])
+ usage = f"usage: {os.path.basename(argv[0])} COMMAND [args]"
parser = argparse.ArgumentParser(description=description, usage=usage)
options, args = parser.parse_known_args(argv[1:])
@@ -62,12 +62,12 @@ def main(argv):
command = args[0]
if command not in valid_commands:
- parser.error("invalid command: '%s'" % command)
+ parser.error(f"invalid command: '{command}'")
if command == "compress":
rval = command_compose(args[1:])
else:
- raise AssertionError("invalid command: '%s'" % command)
+ raise AssertionError(f"invalid command: '{command}'")
return rval
diff --git a/bin/portageq b/bin/portageq
index 20a2f6646..97eb849a5 100755
--- a/bin/portageq
+++ b/bin/portageq
@@ -138,7 +138,7 @@ try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo)
except portage.exception.InvalidAtom:
if atom_validate_strict:
- portage.writemsg("ERROR: Invalid atom: '%s'\n" % argv[1], noiselevel=-1)
+ portage.writemsg(f"ERROR: Invalid atom: '{argv[1]}'\n", noiselevel=-1)
return 2
else:
atom = argv[1]
@@ -147,7 +147,7 @@ try:
try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
except portage.exception.InvalidAtom as e:
- warnings.append("QA Notice: {}: {}".format("has_version", e))
+ warnings.append(f"QA Notice: has_version: {e}")
atom = eval_atom_use(atom)
if warnings:
@@ -162,7 +162,7 @@ try:
except KeyError:
return 1
except portage.exception.InvalidAtom:
- portage.writemsg("ERROR: Invalid atom: '%s'\n" % argv[1], noiselevel=-1)
+ portage.writemsg(f"ERROR: Invalid atom: '{argv[1]}'\n", noiselevel=-1)
return 2
docstrings[
@@ -185,7 +185,7 @@ try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo)
except portage.exception.InvalidAtom:
if atom_validate_strict:
- portage.writemsg("ERROR: Invalid atom: '%s'\n" % argv[1], noiselevel=-1)
+ portage.writemsg(f"ERROR: Invalid atom: '{argv[1]}'\n", noiselevel=-1)
return 2
else:
atom = argv[1]
@@ -194,7 +194,7 @@ try:
try:
atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
except portage.exception.InvalidAtom as e:
- warnings.append("QA Notice: {}: {}".format("best_version", e))
+ warnings.append(f"QA Notice: best_version: {e}")
atom = eval_atom_use(atom)
if warnings:
@@ -221,7 +221,7 @@ try:
try:
for pack in argv[1:]:
mylist = portage.db[argv[0]]["vartree"].dbapi.match(pack)
- print("{}:{}".format(pack, portage.best(mylist)))
+ print(f"{pack}:{portage.best(mylist)}")
except KeyError:
return 1
@@ -242,7 +242,7 @@ try:
metakeys = argv[3:]
type_map = {"ebuild": "porttree", "binary": "bintree", "installed": "vartree"}
if pkgtype not in type_map:
- print("Unrecognized package type: '%s'" % pkgtype, file=sys.stderr)
+ print(f"Unrecognized package type: '{pkgtype}'", file=sys.stderr)
return 1
trees = portage.db
repo = portage.dep.dep_getrepo(pkgspec)
@@ -251,39 +251,37 @@ try:
values = trees[eroot][type_map[pkgtype]].dbapi.aux_get(
pkgspec, metakeys, myrepo=repo
)
- writemsg_stdout("".join("%s\n" % x for x in values), noiselevel=-1)
+ writemsg_stdout("".join(f"{x}\n" for x in values), noiselevel=-1)
except KeyError:
- print("Package not found: '%s'" % pkgspec, file=sys.stderr)
+ print(f"Package not found: '{pkgspec}'", file=sys.stderr)
return 1
docstrings[
"metadata"
- ] = """
- <eroot> <pkgtype> <category/package> [<key>]+
- Returns metadata values for the specified package.
- Available keys: %s
- """ % ",".join(
- sorted(x for x in portage.auxdbkeys)
- )
+ ] = f"""
+\t<eroot> <pkgtype> <category/package> [<key>]+
+\tReturns metadata values for the specified package.
+\tAvailable keys: {','.join(sorted(x for x in portage.auxdbkeys))}
+\t"""
metadata.__doc__ = docstrings["metadata"]
@uses_eroot
def contents(argv):
if len(argv) != 2:
- print("ERROR: expected 2 parameters, got %d!" % len(argv))
+ print(f"ERROR: expected 2 parameters, got {len(argv)}!")
return 2
root, cpv = argv
vartree = portage.db[root]["vartree"]
if not vartree.dbapi.cpv_exists(cpv):
- sys.stderr.write("Package not found: '%s'\n" % cpv)
+ sys.stderr.write(f"Package not found: '{cpv}'\n")
return 1
cat, pkg = portage.catsplit(cpv)
db = portage.dblink(
cat, pkg, root, vartree.settings, treetype="vartree", vartree=vartree
)
writemsg_stdout(
- "".join("%s\n" % x for x in sorted(db.getcontents())), noiselevel=-1
+ "".join(f"{x}\n" for x in sorted(db.getcontents())), noiselevel=-1
)
docstrings[
@@ -341,10 +339,10 @@ try:
msg = []
for pkg, owned_files in owners.items():
cpv = pkg.mycpv
- msg.append("%s\n" % cpv)
+ msg.append(f"{cpv}\n")
for f in sorted(owned_files):
f_abs = os.path.join(root, f.lstrip(os.path.sep))
- msg.append("\t{}\n".format(f_abs))
+ msg.append(f"\t{f_abs}\n")
orphan_abs_paths.discard(f_abs)
if orphan_basenames:
orphan_basenames.discard(os.path.basename(f_abs))
@@ -359,7 +357,7 @@ try:
msg = []
msg.append("None of the installed packages claim these files:\n")
for f in orphans:
- msg.append("\t{}\n".format(f))
+ msg.append(f"\t{f}\n")
sys.stderr.write("".join(msg))
sys.stderr.flush()
@@ -381,7 +379,7 @@ try:
@uses_eroot
def is_protected(argv):
if len(argv) != 2:
- sys.stderr.write("ERROR: expected 2 parameters, got %d!\n" % len(argv))
+ sys.stderr.write(f"ERROR: expected 2 parameters, got {len(argv)}!\n")
sys.stderr.flush()
return 2
@@ -434,7 +432,7 @@ try:
@uses_eroot
def filter_protected(argv):
if len(argv) != 1:
- sys.stderr.write("ERROR: expected 1 parameter, got %d!\n" % len(argv))
+ sys.stderr.write(f"ERROR: expected 1 parameter, got {len(argv)}!\n")
sys.stderr.flush()
return 2
@@ -480,7 +478,7 @@ try:
continue
if protect_obj.isprotected(f):
- out.write("%s\n" % filename)
+ out.write(f"{filename}\n")
out.flush()
if errors:
@@ -512,7 +510,7 @@ try:
type_map = {"ebuild": "porttree", "binary": "bintree", "installed": "vartree"}
if pkgtype not in type_map:
- writemsg("Unrecognized package type: '%s'\n" % pkgtype, noiselevel=-1)
+ writemsg(f"Unrecognized package type: '{pkgtype}'\n", noiselevel=-1)
return 2
eroot = argv[0]
@@ -521,7 +519,7 @@ try:
try:
atom = portage.dep_expand(atom, mydb=db, settings=portage.settings)
except portage.exception.InvalidAtom:
- writemsg("ERROR: Invalid atom: '%s'\n" % atom, noiselevel=-1)
+ writemsg(f"ERROR: Invalid atom: '{atom}'\n", noiselevel=-1)
return 2
root_config = RootConfig(portage.settings, portage.db[eroot], None)
@@ -568,7 +566,7 @@ try:
continue
if pkg.visible:
- writemsg_stdout("{}\n".format(pkg.cpv), noiselevel=-1)
+ writemsg_stdout(f"{pkg.cpv}\n", noiselevel=-1)
return os.EX_OK
# No package found, write out an empty line.
@@ -598,7 +596,7 @@ try:
if argv[0] in type_map:
pkgtype = argv.pop(0)
for pack in argv:
- writemsg_stdout("%s:" % pack, noiselevel=-1)
+ writemsg_stdout(f"{pack}:", noiselevel=-1)
best_visible([root, pkgtype, pack])
except KeyError:
return 1
@@ -635,7 +633,7 @@ try:
@uses_eroot
def match(argv):
if len(argv) != 2:
- print("ERROR: expected 2 parameters, got %d!" % len(argv))
+ print(f"ERROR: expected 2 parameters, got {len(argv)}!")
return 2
root, atom = argv
if not atom:
@@ -688,7 +686,7 @@ try:
def expand_virtual(argv):
if len(argv) != 2:
writemsg(
- "ERROR: expected 2 parameters, got %d!\n" % len(argv), noiselevel=-1
+ f"ERROR: expected 2 parameters, got {len(argv)}!\n", noiselevel=-1
)
return 2
@@ -697,13 +695,13 @@ try:
try:
results = list(expand_new_virt(portage.db[root]["vartree"].dbapi, atom))
except portage.exception.InvalidAtom:
- writemsg("ERROR: Invalid atom: '%s'\n" % atom, noiselevel=-1)
+ writemsg(f"ERROR: Invalid atom: '{atom}'\n", noiselevel=-1)
return 2
results.sort()
for x in results:
if not x.blocker:
- writemsg_stdout("{}\n".format(x))
+ writemsg_stdout(f"{x}\n")
return os.EX_OK
@@ -929,7 +927,7 @@ try:
return 3
for arg in argv[1:]:
if portage.dep._repo_name_re.match(arg) is None:
- print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
+ print(f"ERROR: invalid repository: {arg}", file=sys.stderr)
return 2
try:
repo = portage.db[argv[0]]["vartree"].settings.repositories[arg]
@@ -967,7 +965,7 @@ try:
return 3
for arg in argv[1:]:
if portage.dep._repo_name_re.match(arg) is None:
- print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
+ print(f"ERROR: invalid repository: {arg}", file=sys.stderr)
return 2
path = portage.db[argv[0]]["vartree"].settings.repositories.treemap.get(arg)
if path is None:
@@ -989,7 +987,7 @@ try:
return 3
for arg in argv[1:]:
if portage.dep._repo_name_re.match(arg) is None:
- print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
+ print(f"ERROR: invalid repository: {arg}", file=sys.stderr)
return 2
try:
repo = portage.db[argv[0]]["vartree"].settings.repositories[arg]
@@ -1012,7 +1010,7 @@ try:
print("ERROR: insufficient parameters!", file=sys.stderr)
return 3
if portage.dep._repo_name_re.match(argv[1]) is None:
- print("ERROR: invalid repository: %s" % argv[1], file=sys.stderr)
+ print(f"ERROR: invalid repository: {argv[1]}", file=sys.stderr)
return 2
try:
repo = portage.db[argv[0]]["vartree"].settings.repositories[argv[1]]
@@ -1044,7 +1042,7 @@ try:
print("ERROR: insufficient parameters!", file=sys.stderr)
return 3
if portage.dep._repo_name_re.match(argv[1]) is None:
- print("ERROR: invalid repository: %s" % argv[1], file=sys.stderr)
+ print(f"ERROR: invalid repository: {argv[1]}", file=sys.stderr)
return 2
try:
repo = portage.db[argv[0]]["vartree"].settings.repositories[argv[1]]
@@ -1105,7 +1103,7 @@ try:
class MaintainerEmailMatcher:
def __init__(self, maintainer_emails):
- self._re = re.compile("^(%s)$" % "|".join(maintainer_emails), re.I)
+ self._re = re.compile(f"^({'|'.join(maintainer_emails)})$", re.I)
def __call__(self, metadata_xml):
match = False
@@ -1152,7 +1150,7 @@ try:
if "/" not in arg.split(":")[0]:
atom = insert_category_into_atom(arg, "*")
if atom is None:
- writemsg("ERROR: Invalid atom: '%s'\n" % arg, noiselevel=-1)
+ writemsg(f"ERROR: Invalid atom: '{arg}'\n", noiselevel=-1)
return 2
else:
atom = arg
@@ -1160,7 +1158,7 @@ try:
try:
atom = portage.dep.Atom(atom, allow_wildcard=True, allow_repo=True)
except portage.exception.InvalidAtom:
- writemsg("ERROR: Invalid atom: '%s'\n" % arg, noiselevel=-1)
+ writemsg(f"ERROR: Invalid atom: '{arg}'\n", noiselevel=-1)
return 2
if atom.slot is not None:
@@ -1290,12 +1288,12 @@ try:
continue
if no_version:
- writemsg_stdout("{}\n".format(cp), noiselevel=-1)
+ writemsg_stdout(f"{cp}\n", noiselevel=-1)
else:
matches = list(set(matches))
portdb._cpv_sort_ascending(matches)
for cpv in matches:
- writemsg_stdout("{}\n".format(cpv), noiselevel=-1)
+ writemsg_stdout(f"{cpv}\n", noiselevel=-1)
return os.EX_OK
@@ -1400,7 +1398,7 @@ try:
def usage(argv):
print(">>> Portage information query tool")
- print(">>> %s" % portage.VERSION)
+ print(f">>> {portage.VERSION}")
print(">>> Usage: portageq <command> [<option> ...]")
print("")
print("Available commands:")
@@ -1445,10 +1443,10 @@ try:
def elog(elog_funcname, lines):
cmd = (
- "source '%s/isolated-functions.sh' ; " % os.environ["PORTAGE_BIN_PATH"]
+ f"source '{os.environ['PORTAGE_BIN_PATH']}/isolated-functions.sh' ; "
)
for line in lines:
- cmd += "{} {} ; ".format(elog_funcname, portage._shell_quote(line))
+ cmd += f"{elog_funcname} {portage._shell_quote(line)} ; "
subprocess.call([portage.const.BASH_BINARY, "-c", cmd])
else:
@@ -1509,7 +1507,7 @@ try:
uses_eroot = getattr(function, "uses_eroot", False) and len(argv) > 2
if uses_eroot:
if not os.path.isdir(argv[2]):
- sys.stderr.write("Not a directory: '%s'\n" % argv[2])
+ sys.stderr.write(f"Not a directory: '{argv[2]}'\n")
sys.stderr.write("Run portageq with --help for info\n")
sys.stderr.flush()
sys.exit(os.EX_USAGE)
@@ -1554,10 +1552,10 @@ try:
if retval:
sys.exit(retval)
except portage.exception.PermissionDenied as e:
- sys.stderr.write("Permission denied: '%s'\n" % str(e))
+ sys.stderr.write(f"Permission denied: '{str(e)}'\n")
sys.exit(e.errno)
except portage.exception.ParseError as e:
- sys.stderr.write("%s\n" % str(e))
+ sys.stderr.write(f"{str(e)}\n")
sys.exit(1)
except portage.exception.AmbiguousPackageName as e:
# Multiple matches thrown from cpv_expand
@@ -1568,7 +1566,7 @@ try:
noiselevel=-1,
)
for pkg in pkgs:
- portage.writemsg("* %s\n" % pkg, noiselevel=-1)
+ portage.writemsg(f"* {pkg}\n", noiselevel=-1)
portage.writemsg("\nPlease use a more specific atom.\n", noiselevel=-1)
sys.exit(1)
diff --git a/bin/quickpkg b/bin/quickpkg
index 6aeabba93..247dee375 100755
--- a/bin/quickpkg
+++ b/bin/quickpkg
@@ -62,17 +62,17 @@ def quickpkg_atom(options, infos, arg, eout):
atom = dep_expand(arg, mydb=vardb, settings=vartree.settings)
except AmbiguousPackageName as e:
# Multiple matches thrown from cpv_expand
- eout.eerror("Please use a more specific atom: %s" % " ".join(e.args[0]))
+ eout.eerror(f"Please use a more specific atom: {' '.join(e.args[0])}")
del e
infos["missing"].append(arg)
return 1
except (InvalidAtom, InvalidData):
- eout.eerror("Invalid atom: {}".format(arg))
+ eout.eerror(f"Invalid atom: {arg}")
infos["missing"].append(arg)
return 1
if atom[:1] == "=" and arg[:1] != "=":
# dep_expand() allows missing '=' but it's really invalid
- eout.eerror("Invalid atom: {}".format(arg))
+ eout.eerror(f"Invalid atom: {arg}")
infos["missing"].append(arg)
return 1
@@ -103,17 +103,17 @@ def quickpkg_atom(options, infos, arg, eout):
except InvalidDependString as e:
eout.eerror(
"Invalid RESTRICT metadata "
- + "for '{}': {}; skipping".format(cpv, str(e))
+ + f"for '{cpv}': {str(e)}; skipping"
)
del e
continue
if "bindist" in iuse and "bindist" not in use:
- eout.ewarn("%s: package was emerged with USE=-bindist!" % cpv)
- eout.ewarn("%s: it might not be legal to redistribute this." % cpv)
+ eout.ewarn(f"{cpv}: package was emerged with USE=-bindist!")
+ eout.ewarn(f"{cpv}: it might not be legal to redistribute this.")
elif "bindist" in restrict:
- eout.ewarn("%s: package has RESTRICT=bindist!" % cpv)
- eout.ewarn("%s: it might not be legal to redistribute this." % cpv)
- eout.ebegin("Building package for %s" % cpv)
+ eout.ewarn(f"{cpv}: package has RESTRICT=bindist!")
+ eout.ewarn(f"{cpv}: it might not be legal to redistribute this.")
+ eout.ebegin(f"Building package for {cpv}")
pkgs_for_arg += 1
existing_metadata = dict(
zip(fix_metadata_keys, vardb.aux_get(cpv, fix_metadata_keys))
@@ -144,7 +144,7 @@ def quickpkg_atom(options, infos, arg, eout):
except KeyError as e:
if binpkg_compression:
eout.eerror(
- "Invalid or unsupported compression method: %s" % e.args[0]
+ f"Invalid or unsupported compression method: {e.args[0]}"
)
return 1
# Empty BINPKG_COMPRESS disables compression.
@@ -171,7 +171,7 @@ def quickpkg_atom(options, infos, arg, eout):
)[0]
except IndexError as e:
eout.eerror(
- "Invalid or unsupported compression method: %s" % e.args[0]
+ f"Invalid or unsupported compression method: {e.args[0]}"
)
return 1
if find_binary(compression_binary) is None:
@@ -199,7 +199,7 @@ def quickpkg_atom(options, infos, arg, eout):
proc.stdin.close()
if proc.wait() != os.EX_OK:
eout.eend(1)
- eout.eerror("Compressor failed for package %s" % cpv)
+ eout.eerror(f"Compressor failed for package {cpv}")
retval |= 1
try:
os.unlink(binpkg_tmpfile)
@@ -251,9 +251,9 @@ def quickpkg_atom(options, infos, arg, eout):
infos["successes"].append((cpv, s.st_size))
infos["config_files_excluded"] += len(excluded_config_files)
for filename in excluded_config_files:
- eout.ewarn("Excluded config: '%s'" % filename)
+ eout.ewarn(f"Excluded config: '{filename}'")
if not pkgs_for_arg:
- eout.eerror("Could not find anything " + "to match '%s'; skipping" % arg)
+ eout.eerror("Could not find anything " + f"to match '{arg}'; skipping")
infos["missing"].append(arg)
retval |= 1
return retval
@@ -271,7 +271,7 @@ def quickpkg_set(options, infos, arg, eout):
set_name = arg[1:]
if not set_name in sets:
- eout.eerror("Package set not found: '{}'; skipping".format(arg))
+ eout.eerror(f"Package set not found: '{arg}'; skipping")
infos["missing"].append(arg)
return 1
@@ -279,8 +279,8 @@ def quickpkg_set(options, infos, arg, eout):
atoms = setconfig.getSetAtoms(set_name)
except PackageSetNotFound as e:
eout.eerror(
- "Failed to process package set '%s' because " % set_name
- + "it contains the non-existent package set '%s'; skipping" % e
+ f"Failed to process package set '{set_name}' because "
+ + f"it contains the non-existent package set '{e}'; skipping"
)
infos["missing"].append(arg)
return 1
@@ -299,7 +299,7 @@ def quickpkg_extended_atom(options, infos, atom, eout):
require_metadata = atom.slot or atom.repo
atoms = []
for cpv in vardb.cpv_all():
- cpv_atom = Atom("=%s" % cpv)
+ cpv_atom = Atom(f"={cpv}")
if atom == "*/*":
atoms.append(cpv_atom)
@@ -332,7 +332,7 @@ def quickpkg_main(options, args, eout):
except portage.exception.PortageException:
pass
if not os.access(bintree.pkgdir, os.W_OK):
- eout.eerror("No write access to '%s'" % bintree.pkgdir)
+ eout.eerror(f"No write access to '{bintree.pkgdir}'")
return errno.EACCES
if "xattr" in portage.settings.features and not _xattr.XATTRS_WORKS:
@@ -370,7 +370,7 @@ def quickpkg_main(options, args, eout):
eout.eerror("No packages found")
return 1
print()
- eout.einfo("Packages now in '%s':" % bintree.pkgdir)
+ eout.einfo(f"Packages now in '{bintree.pkgdir}':")
units = {10: "K", 20: "M", 30: "G", 40: "T", 50: "P", 60: "E", 70: "Z", 80: "Y"}
for cpv, size in infos["successes"]:
if not size:
@@ -382,14 +382,14 @@ def quickpkg_main(options, args, eout):
unit = units.get(power_of_2)
if unit:
size = float(size) / (2**power_of_2)
- size_str = "%.1f" % size
+ size_str = f"{size:.1f}"
if len(size_str) > 4:
# emulate `du -h`, don't show too many sig figs
size_str = str(int(size))
size_str += unit
else:
size_str = str(size)
- eout.einfo("{}: {}".format(cpv, size_str))
+ eout.einfo(f"{cpv}: {size_str}")
if infos["config_files_excluded"]:
print()
eout.ewarn("Excluded config files: %d" % infos["config_files_excluded"])
@@ -438,7 +438,7 @@ if __name__ == "__main__":
try:
umask = int(options.umask, 8)
except ValueError:
- parser.error("invalid umask: %s" % options.umask)
+ parser.error(f"invalid umask: {options.umask}")
# We need to ensure a sane umask for the packages that will be created.
old_umask = os.umask(umask)
eout = portage.output.EOutput()
diff --git a/bin/regenworld b/bin/regenworld
index f07cfffe8..9b58fbe2e 100755
--- a/bin/regenworld
+++ b/bin/regenworld
@@ -67,7 +67,7 @@ if len(sys.argv) >= 2 and sys.argv[1] in ["-h", "--help"]:
print("This script regenerates the portage world file by checking the portage")
print("logfile for all actions that you've done in the past. It ignores any")
print("arguments except --help. It is recommended that you make a backup of")
- print("your existing world file (%s) before using this tool." % world_file)
+ print(f"your existing world file ({world_file}) before using this tool.")
sys.exit(0)
worldlist = portage.grabfile(world_file)
@@ -106,17 +106,15 @@ for mykey in biglist:
mylist = portage.db[eroot]["vartree"].dbapi.match(mykey)
except (portage.exception.InvalidAtom, KeyError):
if "--debug" in sys.argv:
- print("* ignoring broken log entry for %s (likely injected)" % mykey)
+ print(f"* ignoring broken log entry for {mykey} (likely injected)")
except ValueError as e:
try:
print(
- "* {} is an ambiguous package name, candidates are:\n{}".format(
- mykey, e
- )
+ f"* {mykey} is an ambiguous package name, candidates are:\n{e}"
)
except AttributeError:
# FIXME: Find out what causes this (bug #344845).
- print("* {} is an ambiguous package name".format(mykey))
+ print(f"* {mykey} is an ambiguous package name")
continue
if mylist:
# print "mylist:",mylist
@@ -157,7 +155,7 @@ else:
portage.util.writemsg_stdout("\n", noiselevel=-1)
for line in textwrap.wrap(msg, 65):
- portage.util.writemsg_stdout("%s\n" % line, noiselevel=-1)
+ portage.util.writemsg_stdout(f"{line}\n", noiselevel=-1)
portage.util.writemsg_stdout("\n", noiselevel=-1)
- portage.util.writemsg_stdout(" old: %s\n\n" % old_world, noiselevel=-1)
- portage.util.writemsg_stdout(" new: %s\n\n" % tmp_filename, noiselevel=-1)
+ portage.util.writemsg_stdout(f" old: {old_world}\n\n", noiselevel=-1)
+ portage.util.writemsg_stdout(f" new: {tmp_filename}\n\n", noiselevel=-1)
diff --git a/bin/socks5-server.py b/bin/socks5-server.py
index 442e96942..e898835ff 100644
--- a/bin/socks5-server.py
+++ b/bin/socks5-server.py
@@ -220,7 +220,7 @@ class Socks5Server:
if __name__ == "__main__":
if len(sys.argv) != 2:
- print("Usage: %s <socket-path>" % sys.argv[0])
+ print(f"Usage: {sys.argv[0]} <socket-path>")
sys.exit(1)
loop = asyncio.new_event_loop()
diff --git a/bin/xattr-helper.py b/bin/xattr-helper.py
index 6e50ac487..f14f7fc9d 100755
--- a/bin/xattr-helper.py
+++ b/bin/xattr-helper.py
@@ -27,7 +27,7 @@ _FS_ENCODING = sys.getfilesystemencoding()
def octal_quote_byte(b):
- return ("\\%03o" % ord(b)).encode("ascii")
+ return (f"\\{ord(b):03o}").encode("ascii")
def unicode_encode(s):
diff --git a/bin/xpak-helper.py b/bin/xpak-helper.py
index ac29995e7..6e874025c 100755
--- a/bin/xpak-helper.py
+++ b/bin/xpak-helper.py
@@ -16,19 +16,19 @@ def command_recompose(args):
if len(args) != 2:
sys.stderr.write(usage)
- sys.stderr.write("2 arguments are required, got %s\n" % len(args))
+ sys.stderr.write(f"2 arguments are required, got {len(args)}\n")
return 1
binpkg_path, metadata_dir = args
if not os.path.isfile(binpkg_path):
sys.stderr.write(usage)
- sys.stderr.write("Argument 1 is not a regular file: '%s'\n" % binpkg_path)
+ sys.stderr.write(f"Argument 1 is not a regular file: '{binpkg_path}'\n")
return 1
if not os.path.isdir(metadata_dir):
sys.stderr.write(usage)
- sys.stderr.write("Argument 2 is not a directory: '%s'\n" % metadata_dir)
+ sys.stderr.write(f"Argument 2 is not a directory: '{metadata_dir}'\n")
return 1
t = portage.xpak.tbz2(binpkg_path)
@@ -44,7 +44,7 @@ def main(argv):
valid_commands = ("recompose",)
description = "Perform metadata operations on a binary package."
- usage = "usage: %s COMMAND [args]" % os.path.basename(argv[0])
+ usage = f"usage: {os.path.basename(argv[0])} COMMAND [args]"
parser = argparse.ArgumentParser(description=description, usage=usage)
options, args = parser.parse_known_args(argv[1:])
@@ -55,12 +55,12 @@ def main(argv):
command = args[0]
if command not in valid_commands:
- parser.error("invalid command: '%s'" % command)
+ parser.error(f"invalid command: '{command}'")
if command == "recompose":
rval = command_recompose(args[1:])
else:
- raise AssertionError("invalid command: '%s'" % command)
+ raise AssertionError(f"invalid command: '{command}'")
return rval
diff --git a/lib/_emerge/AbstractEbuildProcess.py b/lib/_emerge/AbstractEbuildProcess.py
index 5b8b7a3b8..d74d8225b 100644
--- a/lib/_emerge/AbstractEbuildProcess.py
+++ b/lib/_emerge/AbstractEbuildProcess.py
@@ -159,9 +159,7 @@ class AbstractEbuildProcess(SpawnProcess):
cgroup_path = tempfile.mkdtemp(
dir=cgroup_portage,
- prefix="{}:{}.".format(
- self.settings["CATEGORY"], self.settings["PF"]
- ),
+ prefix=f"{self.settings['CATEGORY']}:{self.settings['PF']}.",
)
except (subprocess.CalledProcessError, OSError):
pass
@@ -454,7 +452,7 @@ class AbstractEbuildProcess(SpawnProcess):
elif self._build_dir_unlock is None:
if self.returncode is None:
raise asyncio.InvalidStateError(
- "Result is not ready for {}".format(self)
+ f"Result is not ready for {self}"
)
self._async_unlock_builddir(returncode=self.returncode)
diff --git a/lib/_emerge/AsynchronousLock.py b/lib/_emerge/AsynchronousLock.py
index ad8570559..aaa5a0c14 100644
--- a/lib/_emerge/AsynchronousLock.py
+++ b/lib/_emerge/AsynchronousLock.py
@@ -262,7 +262,7 @@ class _LockProcess(AbstractPollTask):
# only safe to ignore if either the cancel() or
# unlock() methods have been previously called.
raise AssertionError(
- "lock process failed with returncode {}".format(proc.returncode)
+ f"lock process failed with returncode {proc.returncode}"
)
if self._unlock_future is not None:
@@ -306,7 +306,7 @@ class _LockProcess(AbstractPollTask):
raise AssertionError("lock not acquired yet")
if self.returncode != os.EX_OK:
raise AssertionError(
- "lock process failed with returncode {}".format(self.returncode)
+ f"lock process failed with returncode {self.returncode}"
)
if self._unlock_future is not None:
raise AssertionError("already unlocked")
diff --git a/lib/_emerge/AsynchronousTask.py b/lib/_emerge/AsynchronousTask.py
index 2d1af8b7d..fcc80fd0c 100644
--- a/lib/_emerge/AsynchronousTask.py
+++ b/lib/_emerge/AsynchronousTask.py
@@ -89,7 +89,7 @@ class AsynchronousTask(SlotObject):
if self.returncode is None:
if self.scheduler.is_running():
raise asyncio.InvalidStateError(
- "Result is not ready for {}".format(self)
+ f"Result is not ready for {self}"
)
self.scheduler.run_until_complete(self.async_wait())
self._wait_hook()
diff --git a/lib/_emerge/Binpkg.py b/lib/_emerge/Binpkg.py
index 006e1a497..8d4cf5041 100644
--- a/lib/_emerge/Binpkg.py
+++ b/lib/_emerge/Binpkg.py
@@ -86,7 +86,7 @@ class Binpkg(CompositeTask):
)
if dir_path != self.settings["PORTAGE_BUILDDIR"]:
raise AssertionError(
- "'{}' != '{}'".format(dir_path, self.settings["PORTAGE_BUILDDIR"])
+ f"'{dir_path}' != '{self.settings['PORTAGE_BUILDDIR']}'"
)
self._build_dir = EbuildBuildDir(scheduler=self.scheduler, settings=settings)
settings.configdict["pkg"]["EMERGE_FROM"] = "binary"
@@ -124,7 +124,7 @@ class Binpkg(CompositeTask):
"Fetching in the background:",
prefetcher.pkg_path,
"To view fetch progress, run in another terminal:",
- "tail -f %s" % fetch_log,
+ f"tail -f {fetch_log}",
)
out = portage.output.EOutput()
for l in msg:
@@ -377,7 +377,7 @@ class Binpkg(CompositeTask):
encoding=_encodings["content"],
errors="strict",
) as f:
- f.write(_unicode_decode("{}\n".format(md5sum)))
+ f.write(_unicode_decode(f"{md5sum}\n"))
env_extractor = BinpkgEnvExtractor(
background=self.background, scheduler=self.scheduler, settings=self.settings
@@ -386,7 +386,7 @@ class Binpkg(CompositeTask):
await env_extractor.async_wait()
if env_extractor.returncode != os.EX_OK:
raise portage.exception.PortageException(
- "failed to extract environment for {}".format(self.pkg.cpv)
+ f"failed to extract environment for {self.pkg.cpv}"
)
def _unpack_metadata_exit(self, unpack_metadata):
@@ -421,7 +421,7 @@ class Binpkg(CompositeTask):
self._async_unlock_builddir(returncode=self.returncode)
return
- self._writemsg_level(">>> Extracting %s\n" % self.pkg.cpv)
+ self._writemsg_level(f">>> Extracting {self.pkg.cpv}\n")
self._start_task(
AsyncTaskFuture(
future=self._bintree.dbapi.unpack_contents(
@@ -495,13 +495,13 @@ class Binpkg(CompositeTask):
scheduler=self.scheduler,
logfile=self.settings.get("PORTAGE_LOG_FILE"),
)
- self._writemsg_level(">>> Adjusting Prefix to %s\n" % self.settings["EPREFIX"])
+ self._writemsg_level(f">>> Adjusting Prefix to {self.settings['EPREFIX']}\n")
self._start_task(chpathtool, self._chpathtool_exit)
def _chpathtool_exit(self, chpathtool):
if self._final_exit(chpathtool) != os.EX_OK:
self._writemsg_level(
- "!!! Error Adjusting Prefix to {}\n".format(self.settings["EPREFIX"]),
+ f"!!! Error Adjusting Prefix to {self.settings['EPREFIX']}\n",
noiselevel=-1,
level=logging.ERROR,
)
--git a/lib/_emerge/BinpkgExtractorAsync.py b/lib/_emerge/BinpkgExtractorAsync.py
index ca7ccf73a..9245fd00a 100644
--- a/lib/_emerge/BinpkgExtractorAsync.py
+++ b/lib/_emerge/BinpkgExtractorAsync.py
@@ -50,7 +50,7 @@ class BinpkgExtractorAsync(SpawnProcess):
for x in portage.util.shlex_split(
self.env.get("PORTAGE_XATTR_EXCLUDE", "")
):
- tar_options.append(portage._shell_quote("--xattrs-exclude=%s" % x))
+ tar_options.append(portage._shell_quote(f"--xattrs-exclude={x}"))
tar_options = " ".join(tar_options)
decomp = _compressors.get(compression_probe(self.pkg_path))
diff --git a/lib/_emerge/BinpkgFetcher.py b/lib/_emerge/BinpkgFetcher.py
index 9018d0ee2..aeeff8c4a 100644
--- a/lib/_emerge/BinpkgFetcher.py
+++ b/lib/_emerge/BinpkgFetcher.py
@@ -131,7 +131,7 @@ class _BinpkgFetcherProcess(SpawnProcess):
raise FileNotFound("Binary packages index not found")
if pretend:
- portage.writemsg_stdout("\n%s\n" % uri, noiselevel=-1)
+ portage.writemsg_stdout(f"\n{uri}\n", noiselevel=-1)
self.returncode = os.EX_OK
self._async_wait()
return
diff --git a/lib/_emerge/BinpkgVerifier.py b/lib/_emerge/BinpkgVerifier.py
index 830b0c000..0477ea304 100644
--- a/lib/_emerge/BinpkgVerifier.py
+++ b/lib/_emerge/BinpkgVerifier.py
@@ -105,9 +105,7 @@ class BinpkgVerifier(CompositeTask):
path = path[: -len(".partial")]
eout = EOutput()
eout.ebegin(
- "{} {} ;-)".format(
- os.path.basename(path), " ".join(sorted(self._digests))
- )
+ f"{os.path.basename(path)} {' '.join(sorted(self._digests))} ;-)"
)
eout.eend(0)
diff --git a/lib/_emerge/BlockerCache.py b/lib/_emerge/BlockerCache.py
index f0d1a1563..ccc8d416c 100644
--- a/lib/_emerge/BlockerCache.py
+++ b/lib/_emerge/BlockerCache.py
@@ -62,7 +62,7 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
pass
else:
writemsg(
- "!!! Error loading '{}': {}\n".format(self._cache_filename, str(e)),
+ f"!!! Error loading '{self._cache_filename}': {str(e)}\n",
noiselevel=-1,
)
del e
diff --git a/lib/_emerge/BlockerDB.py b/lib/_emerge/BlockerDB.py
index 340076b99..e0dd3c8fe 100644
--- a/lib/_emerge/BlockerDB.py
+++ b/lib/_emerge/BlockerDB.py
@@ -71,7 +71,7 @@ class BlockerDB:
inst_pkg.root, portage.VDB_PATH, inst_pkg.category, inst_pkg.pf
)
portage.writemsg(
- "!!! {}/*DEPEND: {}\n".format(pkg_location, atoms),
+ f"!!! {pkg_location}/*DEPEND: {atoms}\n",
noiselevel=-1,
)
continue
@@ -129,7 +129,7 @@ class BlockerDB:
"""Discard a package from the list of potential blockers.
This will match any package(s) with identical cpv or cp:slot."""
for cpv_match in self._fake_vartree.dbapi.match_pkgs(
- Atom("={}".format(pkg.cpv))
+ Atom(f"={pkg.cpv}")
):
if cpv_match.cp == pkg.cp:
self._fake_vartree.cpv_discard(cpv_match)
diff --git a/lib/_emerge/CompositeTask.py b/lib/_emerge/CompositeTask.py
index ca2fab1fb..c15659ae3 100644
--- a/lib/_emerge/CompositeTask.py
+++ b/lib/_emerge/CompositeTask.py
@@ -54,7 +54,7 @@ class CompositeTask(AsynchronousTask):
for detecting bugs.
"""
if task is not self._current_task:
- raise AssertionError("Unrecognized task: {}".format(task))
+ raise AssertionError(f"Unrecognized task: {task}")
def _default_exit(self, task):
"""
diff --git a/lib/_emerge/DependencyArg.py b/lib/_emerge/DependencyArg.py
index 32e0369b8..5f487b344 100644
--- a/lib/_emerge/DependencyArg.py
+++ b/lib/_emerge/DependencyArg.py
@@ -33,4 +33,4 @@ class DependencyArg:
return hash((self.arg, self.root_config.root))
def __str__(self):
- return "{}".format(self.arg)
+ return f"{self.arg}"
diff --git a/lib/_emerge/EbuildBuild.py b/lib/_emerge/EbuildBuild.py
index f1488d200..2623b718c 100644
--- a/lib/_emerge/EbuildBuild.py
+++ b/lib/_emerge/EbuildBuild.py
@@ -82,7 +82,7 @@ class EbuildBuild(CompositeTask):
settings.configdict["pkg"]["MERGE_TYPE"] = "source"
ebuild_path = portdb.findname(pkg.cpv, myrepo=pkg.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % pkg.cpv)
+ raise AssertionError(f"ebuild not found for '{pkg.cpv}'")
self._ebuild_path = ebuild_path
portage.doebuild_environment(
ebuild_path, "setup", settings=self.settings, db=portdb
@@ -108,7 +108,7 @@ class EbuildBuild(CompositeTask):
msg = (
"Fetching files in the background.",
"To view fetch progress, run in another terminal:",
- "tail -f %s" % fetch_log,
+ f"tail -f {fetch_log}",
)
out = portage.output.EOutput()
for l in msg:
@@ -297,9 +297,7 @@ class EbuildBuild(CompositeTask):
already_fetched = already_fetched_task.future.result()
except portage.exception.InvalidDependString as e:
msg_lines = []
- msg = "Fetch failed for '{}' due to invalid SRC_URI: {}".format(
- self.pkg.cpv, e
- )
+ msg = f"Fetch failed for '{self.pkg.cpv}' due to invalid SRC_URI: {e}"
msg_lines.append(msg)
fetcher._eerror(msg_lines)
portage.elog.elog_process(self.pkg.cpv, self.settings)
@@ -559,10 +557,10 @@ class EbuildBuild(CompositeTask):
pkg = task.get_binpkg_info()
infoloc = os.path.join(self.settings["PORTAGE_BUILDDIR"], "build-info")
info = {
- "BINPKGMD5": "%s\n" % pkg._metadata["MD5"],
+ "BINPKGMD5": f"{pkg._metadata['MD5']}\n",
}
if pkg.build_id is not None:
- info["BUILD_ID"] = "%s\n" % pkg.build_id
+ info["BUILD_ID"] = f"{pkg.build_id}\n"
for k, v in info.items():
with open(
_unicode_encode(
diff --git a/lib/_emerge/EbuildBuildDir.py b/lib/_emerge/EbuildBuildDir.py
index b35ea5e10..136737204 100644
--- a/lib/_emerge/EbuildBuildDir.py
+++ b/lib/_emerge/EbuildBuildDir.py
@@ -23,9 +23,7 @@ class EbuildBuildDir(SlotObject):
if async_lock.returncode != os.EX_OK:
# TODO: create a better way to propagate this error to the caller
raise AssertionError(
- "AsynchronousLock failed with returncode {}".format(
- async_lock.returncode
- )
+ f"AsynchronousLock failed with returncode {async_lock.returncode}"
)
def clean_log(self):
diff --git a/lib/_emerge/EbuildFetcher.py b/lib/_emerge/EbuildFetcher.py
index 5becde176..8cb8ec440 100644
--- a/lib/_emerge/EbuildFetcher.py
+++ b/lib/_emerge/EbuildFetcher.py
@@ -69,9 +69,7 @@ class EbuildFetcher(CompositeTask):
uri_map = uri_map_task.future.result()
except portage.exception.InvalidDependString as e:
msg_lines = []
- msg = "Fetch failed for '{}' due to invalid SRC_URI: {}".format(
- self.pkg.cpv, e
- )
+ msg = f"Fetch failed for '{self.pkg.cpv}' due to invalid SRC_URI: {e}"
msg_lines.append(msg)
self._fetcher_proc._eerror(msg_lines)
self._current_task = None
@@ -289,7 +287,7 @@ class _EbuildFetcherProcess(ForkProcess):
portdb = self.pkg.root_config.trees["porttree"].dbapi
self.ebuild_path = portdb.findname(self.pkg.cpv, myrepo=self.pkg.repo)
if self.ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % self.pkg.cpv)
+ raise AssertionError(f"ebuild not found for '{self.pkg.cpv}'")
return self.ebuild_path
def _get_manifest(self):
@@ -375,7 +373,7 @@ class _EbuildFetcherProcess(ForkProcess):
for filename in uri_map:
f.write(
_unicode_decode(
- (" * %s size ;-) ..." % filename).ljust(73) + "[ ok ]\n"
+ f" * {filename} size ;-) ...".ljust(73) + "[ ok ]\n"
)
)
f.close()
@@ -409,11 +407,11 @@ class _EbuildFetcherProcess(ForkProcess):
"""
if not self.prefetch and not future.cancelled() and proc.exitcode != os.EX_OK:
msg_lines = []
- msg = "Fetch failed for '{}'".format(self.pkg.cpv)
+ msg = f"Fetch failed for '{self.pkg.cpv}'"
if self.logfile is not None:
msg += ", Log file:"
msg_lines.append(msg)
if self.logfile is not None:
- msg_lines.append(" '{}'".format(self.logfile))
+ msg_lines.append(f" '{self.logfile}'")
self._eerror(msg_lines)
super()._proc_join_done(proc, future)
diff --git a/lib/_emerge/EbuildFetchonly.py b/lib/_emerge/EbuildFetchonly.py
index 380c19e70..0b7c49f96 100644
--- a/lib/_emerge/EbuildFetchonly.py
+++ b/lib/_emerge/EbuildFetchonly.py
@@ -17,7 +17,7 @@ class EbuildFetchonly(SlotObject):
portdb = pkg.root_config.trees["porttree"].dbapi
ebuild_path = portdb.findname(pkg.cpv, myrepo=pkg.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % pkg.cpv)
+ raise AssertionError(f"ebuild not found for '{pkg.cpv}'")
settings.setcpv(pkg)
debug = settings.get("PORTAGE_DEBUG") == "1"
@@ -37,7 +37,7 @@ class EbuildFetchonly(SlotObject):
# and the unsuccessful return value is used to trigger
# a call to the pkg_nofetch phase.
if rval != os.EX_OK and not self.pretend:
- msg = "Fetch failed for '{}'".format(pkg.cpv)
+ msg = f"Fetch failed for '{pkg.cpv}'"
eerror(msg, phase="unpack", key=pkg.cpv)
return rval
diff --git a/lib/_emerge/EbuildIpcDaemon.py b/lib/_emerge/EbuildIpcDaemon.py
index 8e5e747d9..75ecb6c2f 100644
--- a/lib/_emerge/EbuildIpcDaemon.py
+++ b/lib/_emerge/EbuildIpcDaemon.py
@@ -111,7 +111,7 @@ class EbuildIpcDaemon(FifoIpcDaemon):
# This probably means that the client has been killed,
# which causes open to fail with ENXIO.
writemsg_level(
- "!!! EbuildIpcDaemon {}: {}\n".format(_("failed to send reply"), e),
+ f"!!! EbuildIpcDaemon {_('failed to send reply')}: {e}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/_emerge/EbuildPhase.py b/lib/_emerge/EbuildPhase.py
index 3be322f3e..8e408dfc2 100644
--- a/lib/_emerge/EbuildPhase.py
+++ b/lib/_emerge/EbuildPhase.py
@@ -100,7 +100,7 @@ class EbuildPhase(CompositeTask):
if need_builddir:
phase_completed_file = os.path.join(
- self.settings["PORTAGE_BUILDDIR"], ".%sed" % self.phase.rstrip("e")
+ self.settings["PORTAGE_BUILDDIR"], f".{self.phase.rstrip('e')}ed"
)
if not os.path.exists(phase_completed_file):
# If the phase is really going to run then we want
@@ -136,23 +136,23 @@ class EbuildPhase(CompositeTask):
msg = []
msg.append(
- "Package: {}:{}".format(self.settings.mycpv, self.settings["SLOT"])
+ f"Package: {self.settings.mycpv}:{self.settings['SLOT']}"
)
if self.settings.get("PORTAGE_REPO_NAME"):
- msg.append("Repository: %s" % self.settings["PORTAGE_REPO_NAME"])
+ msg.append(f"Repository: {self.settings['PORTAGE_REPO_NAME']}")
if maint_str:
- msg.append("Maintainer: %s" % maint_str)
+ msg.append(f"Maintainer: {maint_str}")
if upstr_str:
- msg.append("Upstream: %s" % upstr_str)
+ msg.append(f"Upstream: {upstr_str}")
- msg.append("USE: %s" % use)
+ msg.append(f"USE: {use}")
relevant_features = []
enabled_features = self.settings.features
for x in self._features_display:
if x in enabled_features:
relevant_features.append(x)
if relevant_features:
- msg.append("FEATURES: %s" % " ".join(relevant_features))
+ msg.append(f"FEATURES: {' '.join(relevant_features)}")
# Force background=True for this header since it's intended
# for the log and it doesn't necessarily need to be visible
@@ -408,7 +408,7 @@ class EbuildPhase(CompositeTask):
self._append_temp_log(post_phase.logfile, log_path)
if self._final_exit(post_phase) != os.EX_OK:
- writemsg("!!! post %s failed; exiting.\n" % self.phase, noiselevel=-1)
+ writemsg(f"!!! post {self.phase} failed; exiting.\n", noiselevel=-1)
self._die_hooks()
return
@@ -625,7 +625,7 @@ class _PostPhaseCommands(CompositeTask):
qa_msg = ["QA Notice: Unresolved soname dependencies:"]
qa_msg.append("")
qa_msg.extend(
- "\t{}: {}".format(filename, " ".join(sorted(soname_deps)))
+ f"\t{filename}: {' '.join(sorted(soname_deps))}"
for filename, soname_deps in unresolved
)
qa_msg.append("")
diff --git a/lib/_emerge/JobStatusDisplay.py b/lib/_emerge/JobStatusDisplay.py
index 02600c052..33e955167 100644
--- a/lib/_emerge/JobStatusDisplay.py
+++ b/lib/_emerge/JobStatusDisplay.py
@@ -127,7 +127,7 @@ class JobStatusDisplay:
return True
def _format_msg(self, msg):
- return ">>> %s" % msg
+ return f">>> {msg}"
def _erase(self):
self._write(self._term_codes["carriage_return"] + self._term_codes["clr_eol"])
@@ -228,10 +228,10 @@ class JobStatusDisplay:
def _display_status(self):
# Don't use len(self._completed_tasks) here since that also
# can include uninstall tasks.
- curval_str = "{}".format(self.curval)
- maxval_str = "{}".format(self.maxval)
- running_str = "{}".format(self.running)
- failed_str = "{}".format(self.failed)
+ curval_str = f"{self.curval}"
+ maxval_str = f"{self.maxval}"
+ running_str = f"{self.running}"
+ failed_str = f"{self.failed}"
load_avg_str = self._load_avg_str()
color_output = io.StringIO()
@@ -292,5 +292,5 @@ class JobStatusDisplay:
title_str = " ".join(plain_output.split())
hostname = os.environ.get("HOSTNAME")
if hostname is not None:
- title_str = "{}: {}".format(hostname, title_str)
+ title_str = f"{hostname}: {title_str}"
xtermTitle(title_str)
diff --git a/lib/_emerge/MergeListItem.py b/lib/_emerge/MergeListItem.py
index b80d1cb1d..d96dfc11d 100644
--- a/lib/_emerge/MergeListItem.py
+++ b/lib/_emerge/MergeListItem.py
@@ -76,7 +76,7 @@ class MergeListItem(CompositeTask):
)
if pkg.root_config.settings["ROOT"] != "/":
- msg += " {} {}".format(preposition, pkg.root)
+ msg += f" {preposition} {pkg.root}"
if not build_opts.pretend:
self.statusMessage(msg)
diff --git a/lib/_emerge/MetadataRegen.py b/lib/_emerge/MetadataRegen.py
index b272009b9..c2ab6722e 100644
--- a/lib/_emerge/MetadataRegen.py
+++ b/lib/_emerge/MetadataRegen.py
@@ -50,7 +50,7 @@ class MetadataRegen(AsyncScheduler):
if self._terminated.is_set():
break
cp_set.add(cp)
- portage.writemsg_stdout("Processing %s\n" % cp)
+ portage.writemsg_stdout(f"Processing {cp}\n")
# We iterate over portdb.porttrees, since it's common to
# tweak this attribute in order to adjust repo selection.
for mytree in portdb.porttrees:
@@ -63,8 +63,7 @@ class MetadataRegen(AsyncScheduler):
ebuild_path, repo_path = portdb.findname2(cpv, myrepo=repo.name)
if ebuild_path is None:
raise AssertionError(
- "ebuild not found for '%s%s%s'"
- % (cpv, _repo_separator, repo.name)
+ f"ebuild not found for '{cpv}{_repo_separator}{repo.name}'"
)
metadata, ebuild_hash = portdb._pull_valid_cache(
cpv, ebuild_path, repo_path
@@ -100,7 +99,7 @@ class MetadataRegen(AsyncScheduler):
except CacheError as e:
portage.writemsg(
"Error listing cache entries for "
- + "'{}': {}, continuing...\n".format(mytree, e),
+ + f"'{mytree}': {e}, continuing...\n",
noiselevel=-1,
)
del e
@@ -117,7 +116,7 @@ class MetadataRegen(AsyncScheduler):
except CacheError as e:
portage.writemsg(
"Error listing cache entries for "
- + "'{}': {}, continuing...\n".format(mytree, e),
+ + f"'{mytree}': {e}, continuing...\n",
noiselevel=-1,
)
del e
@@ -146,7 +145,7 @@ class MetadataRegen(AsyncScheduler):
self._valid_pkgs.discard(metadata_process.cpv)
if not self._terminated_tasks:
portage.writemsg(
- "Error processing {}, continuing...\n".format(metadata_process.cpv),
+ f"Error processing {metadata_process.cpv}, continuing...\n",
noiselevel=-1,
)
diff --git a/lib/_emerge/Package.py b/lib/_emerge/Package.py
index 8e741592d..599884b80 100644
--- a/lib/_emerge/Package.py
+++ b/lib/_emerge/Package.py
@@ -123,7 +123,7 @@ class Package(Task):
)
if hasattr(self.cpv, "slot_invalid"):
self._invalid_metadata(
- "SLOT.invalid", "SLOT: invalid value: '%s'" % self._metadata["SLOT"]
+ "SLOT.invalid", f"SLOT: invalid value: '{self._metadata['SLOT']}'"
)
self.cpv_split = self.cpv.cpv_split
self.category, self.pf = portage.catsplit(self.cpv)
@@ -131,7 +131,7 @@ class Package(Task):
self.version = self.cpv.version
self.slot = self.cpv.slot
self.sub_slot = self.cpv.sub_slot
- self.slot_atom = Atom("{}{}{}".format(self.cp, _slot_separator, self.slot))
+ self.slot_atom = Atom(f"{self.cp}{_slot_separator}{self.slot}")
# sync metadata with validated repo (may be UNKNOWN_REPO)
self._metadata["repository"] = self.cpv.repo
@@ -375,13 +375,13 @@ class Package(Task):
if not _get_eapi_attrs(eapi).required_use:
self._invalid_metadata(
"EAPI.incompatible",
- "REQUIRED_USE set, but EAPI='%s' doesn't allow it" % eapi,
+ f"REQUIRED_USE set, but EAPI='{eapi}' doesn't allow it",
)
else:
try:
check_required_use(v, (), self.iuse.is_valid_flag, eapi=eapi)
except InvalidDependString as e:
- self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
+ self._invalid_metadata(k + ".syntax", f"{k}: {e}")
k = "SRC_URI"
v = self._metadata.get(k)
@@ -403,13 +403,13 @@ class Package(Task):
try:
self._provides = frozenset(parse_soname_deps(self._metadata[k]))
except InvalidData as e:
- self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
+ self._invalid_metadata(k + ".syntax", f"{k}: {e}")
k = "REQUIRES"
try:
self._requires = frozenset(parse_soname_deps(self._metadata[k]))
except InvalidData as e:
- self._invalid_metadata(k + ".syntax", "{}: {}".format(k, e))
+ self._invalid_metadata(k + ".syntax", f"{k}: {e}")
def copy(self):
return Package(
@@ -546,17 +546,17 @@ class Package(Task):
if getattr(error, "category", None) is None:
continue
categorized_error = True
- self._invalid_metadata(error.category, "{}: {}".format(k, error))
+ self._invalid_metadata(error.category, f"{k}: {error}")
if not categorized_error:
- self._invalid_metadata(qacat, "{}: {}".format(k, e))
+ self._invalid_metadata(qacat, f"{k}: {e}")
else:
# For installed packages, show the path of the file
# containing the invalid metadata, since the user may
# want to fix the deps by hand.
vardb = self.root_config.trees["vartree"].dbapi
path = vardb.getpath(self.cpv, filename=k)
- self._invalid_metadata(qacat, "{}: {} in '{}'".format(k, e, path))
+ self._invalid_metadata(qacat, f"{k}: {e} in '{path}'")
def _invalid_metadata(self, msg_type, msg):
if self._invalid is None:
@@ -580,7 +580,7 @@ class Package(Task):
build_id_str = ""
if isinstance(self.cpv.build_id, int) and self.cpv.build_id > 0:
- build_id_str = "-%s" % self.cpv.build_id
+ build_id_str = f"-{self.cpv.build_id}"
s = "({}, {}".format(
portage.output.colorize(
@@ -599,14 +599,14 @@ class Package(Task):
if self.type_name == "installed":
if self.root_config.settings["ROOT"] != "/":
- s += " in '%s'" % self.root_config.settings["ROOT"]
+ s += f" in '{self.root_config.settings['ROOT']}'"
if self.operation == "uninstall":
s += " scheduled for uninstall"
else:
if self.operation == "merge":
s += " scheduled for merge"
if self.root_config.settings["ROOT"] != "/":
- s += " to '%s'" % self.root_config.settings["ROOT"]
+ s += f" to '{self.root_config.settings['ROOT']}'"
s += ")"
return s
diff --git a/lib/_emerge/PackageMerge.py b/lib/_emerge/PackageMerge.py
index 0fd02be7b..3e03ea137 100644
--- a/lib/_emerge/PackageMerge.py
+++ b/lib/_emerge/PackageMerge.py
@@ -37,7 +37,7 @@ class PackageMerge(CompositeTask):
)
if pkg.root_config.settings["ROOT"] != "/":
- msg += " {} {}".format(preposition, pkg.root)
+ msg += f" {preposition} {pkg.root}"
if (
not self.merge.build_opts.fetchonly
diff --git a/lib/_emerge/PackageUninstall.py b/lib/_emerge/PackageUninstall.py
index 1bd70b8ca..929e134f1 100644
--- a/lib/_emerge/PackageUninstall.py
+++ b/lib/_emerge/PackageUninstall.py
@@ -94,9 +94,9 @@ class PackageUninstall(CompositeTask):
return
self._writemsg_level(
- ">>> Unmerging {}...\n".format(self.pkg.cpv), noiselevel=-1
+ f">>> Unmerging {self.pkg.cpv}...\n", noiselevel=-1
)
- self._emergelog("=== Unmerging... ({})".format(self.pkg.cpv))
+ self._emergelog(f"=== Unmerging... ({self.pkg.cpv})")
cat, pf = portage.catsplit(self.pkg.cpv)
unmerge_task = MergeProcess(
@@ -117,9 +117,9 @@ class PackageUninstall(CompositeTask):
def _unmerge_exit(self, unmerge_task):
if self._final_exit(unmerge_task) != os.EX_OK:
- self._emergelog(" !!! unmerge FAILURE: {}".format(self.pkg.cpv))
+ self._emergelog(f" !!! unmerge FAILURE: {self.pkg.cpv}")
else:
- self._emergelog(" >>> unmerge success: {}".format(self.pkg.cpv))
+ self._emergelog(f" >>> unmerge success: {self.pkg.cpv}")
self.world_atom(self.pkg)
self._async_unlock_builddir(returncode=self.returncode)
diff --git a/lib/_emerge/Scheduler.py b/lib/_emerge/Scheduler.py
index f92573bb7..139022ac0 100644
--- a/lib/_emerge/Scheduler.py
+++ b/lib/_emerge/Scheduler.py
@@ -455,7 +455,7 @@ class Scheduler(PollScheduler):
msg.append(pkg_str)
msg.append("")
writemsg_level(
- "".join("{}\n".format(l) for l in msg),
+ "".join(f"{l}\n" for l in msg),
level=logging.INFO,
noiselevel=-1,
)
@@ -728,11 +728,11 @@ class Scheduler(PollScheduler):
portdb = x.root_config.trees["porttree"].dbapi
ebuild_path = portdb.findname(x.cpv, myrepo=x.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % x.cpv)
+ raise AssertionError(f"ebuild not found for '{x.cpv}'")
pkgsettings["O"] = os.path.dirname(ebuild_path)
if not digestgen(mysettings=pkgsettings, myportdb=portdb):
writemsg_level(
- "!!! Unable to generate manifest for '%s'.\n" % x.cpv,
+ f"!!! Unable to generate manifest for '{x.cpv}'.\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -777,7 +777,7 @@ class Scheduler(PollScheduler):
quiet_config = quiet_settings[root_config.root]
ebuild_path = portdb.findname(x.cpv, myrepo=x.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % x.cpv)
+ raise AssertionError(f"ebuild not found for '{x.cpv}'")
quiet_config["O"] = os.path.dirname(ebuild_path)
if not digestcheck([], quiet_config, strict=True):
failures |= 1
@@ -921,7 +921,7 @@ class Scheduler(PollScheduler):
portdb = root_config.trees["porttree"].dbapi
ebuild_path = portdb.findname(x.cpv, myrepo=x.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % x.cpv)
+ raise AssertionError(f"ebuild not found for '{x.cpv}'")
portage.package.ebuild.doebuild.doebuild_environment(
ebuild_path,
"clean",
@@ -993,7 +993,7 @@ class Scheduler(PollScheduler):
portdb = root_config.trees["porttree"].dbapi
ebuild_path = portdb.findname(x.cpv, myrepo=x.repo)
if ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % x.cpv)
+ raise AssertionError(f"ebuild not found for '{x.cpv}'")
settings.configdict["pkg"]["EMERGE_FROM"] = "ebuild"
if self._build_opts.buildpkgonly:
settings.configdict["pkg"]["MERGE_TYPE"] = "buildonly"
@@ -1133,7 +1133,7 @@ class Scheduler(PollScheduler):
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
portage.util.writemsg(
- "\n\nExiting on signal {signal}\n".format(signal=signum)
+ f"\n\nExiting on signal {signum}\n"
)
self.terminate()
received_signal.append(128 + signum)
@@ -1251,7 +1251,7 @@ class Scheduler(PollScheduler):
for line in log_file:
writemsg_level(line, noiselevel=-1)
except zlib.error as e:
- writemsg_level("{}\n".format(e), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"{e}\n", level=logging.ERROR, noiselevel=-1)
finally:
log_file.close()
if log_file_real is not None:
@@ -1274,11 +1274,10 @@ class Scheduler(PollScheduler):
for mysettings, key, logentries in self._failed_pkgs_die_msgs:
root_msg = ""
if mysettings["ROOT"] != "/":
- root_msg = " merged to %s" % mysettings["ROOT"]
+ root_msg = f" merged to {mysettings['ROOT']}"
print()
printer.einfo(
- "Error messages for package %s%s:"
- % (colorize("INFORM", key), root_msg)
+ f"Error messages for package {colorize('INFORM', key)}{root_msg}:"
)
print()
for phase in portage.const.EBUILD_PHASES:
@@ -1297,7 +1296,7 @@ class Scheduler(PollScheduler):
if len(self._failed_pkgs_all) > 1 or (self._failed_pkgs_all and keep_going):
if len(self._failed_pkgs_all) > 1:
msg = (
- "The following %d packages have " % len(self._failed_pkgs_all)
+ f"The following {len(self._failed_pkgs_all)} packages have "
+ "failed to build, install, or execute postinst:"
)
else:
@@ -1311,7 +1310,7 @@ class Scheduler(PollScheduler):
printer.eerror(line)
printer.eerror("")
for failed_pkg in self._failed_pkgs_all:
- msg = " {}".format(failed_pkg.pkg)
+ msg = f" {failed_pkg.pkg}"
if failed_pkg.postinst_failure:
msg += " (postinst failed)"
log_path = self._locate_failure_log(failed_pkg)
@@ -1319,7 +1318,7 @@ class Scheduler(PollScheduler):
msg += ", Log file:"
printer.eerror(msg)
if log_path is not None:
- printer.eerror(" '%s'" % colorize("INFORM", log_path))
+ printer.eerror(f" '{colorize('INFORM', log_path)}'")
printer.eerror("")
if self._failed_pkgs_all:
@@ -2024,9 +2023,9 @@ class Scheduler(PollScheduler):
def _failed_pkg_msg(self, failed_pkg, action, preposition):
pkg = failed_pkg.pkg
- msg = "{} to {} {}".format(bad("Failed"), action, colorize("INFORM", pkg.cpv))
+ msg = f"{bad('Failed')} to {action} {colorize('INFORM', pkg.cpv)}"
if pkg.root_config.settings["ROOT"] != "/":
- msg += " {} {}".format(preposition, pkg.root)
+ msg += f" {preposition} {pkg.root}"
log_path = self._locate_failure_log(failed_pkg)
if log_path is not None:
@@ -2034,7 +2033,7 @@ class Scheduler(PollScheduler):
self._status_msg(msg)
if log_path is not None:
- self._status_msg(" '{}'".format(colorize("INFORM", log_path)))
+ self._status_msg(f" '{colorize('INFORM', log_path)}'")
def _status_msg(self, msg):
"""
@@ -2162,13 +2161,13 @@ class Scheduler(PollScheduler):
if not (isinstance(task, Package) and task.operation == "merge"):
continue
pkg = task
- msg = "emerge --keep-going:" + " {}".format(pkg.cpv)
+ msg = "emerge --keep-going:" + f" {pkg.cpv}"
if pkg.root_config.settings["ROOT"] != "/":
- msg += " for {}".format(pkg.root)
+ msg += f" for {pkg.root}"
if not atoms:
msg += " dropped because it is masked or unavailable"
else:
- msg += " dropped because it requires %s" % ", ".join(set(atoms))
+ msg += f" dropped because it requires {', '.join(set(atoms))}"
for line in textwrap.wrap(msg, msg_width):
eerror(line, phase="other", key=pkg.cpv)
settings = self.pkgsettings[pkg.root]
@@ -2252,7 +2251,7 @@ class Scheduler(PollScheduler):
world_set.add(atom)
else:
writemsg_level(
- '\n!!! Unable to record {} in "world"\n'.format(atom),
+ f'\n!!! Unable to record {atom} in "world\"\n',
level=logging.WARN,
noiselevel=-1,
)
diff --git a/lib/_emerge/SubProcess.py b/lib/_emerge/SubProcess.py
index 39b5ece0b..2399b00d1 100644
--- a/lib/_emerge/SubProcess.py
+++ b/lib/_emerge/SubProcess.py
@@ -40,7 +40,7 @@ class SubProcess(AbstractPollTask):
def _async_wait(self):
if self.returncode is None:
- raise asyncio.InvalidStateError("Result is not ready for {}".format(self))
+ raise asyncio.InvalidStateError(f"Result is not ready for {self}")
else:
# This calls _unregister, so don't call it until pid status
# is available.
@@ -66,7 +66,7 @@ class SubProcess(AbstractPollTask):
def _async_waitpid_cb(self, pid, returncode):
if pid != self.pid:
- raise AssertionError("expected pid {}, got {}".format(self.pid, pid))
+ raise AssertionError(f"expected pid {self.pid}, got {pid}")
self.returncode = returncode
self._async_wait()
diff --git a/lib/_emerge/UseFlagDisplay.py b/lib/_emerge/UseFlagDisplay.py
index e79bbc83c..403df95a2 100644
--- a/lib/_emerge/UseFlagDisplay.py
+++ b/lib/_emerge/UseFlagDisplay.py
@@ -26,7 +26,7 @@ class UseFlagDisplay:
s = "-" + s
s = blue(s)
if self.forced:
- s = "(%s)" % s
+ s = f"({s})"
return s
def _cmp_combined(a, b):
@@ -111,7 +111,7 @@ def pkg_use_display(pkg, opts, modified_use=None):
else:
flags.sort(key=UseFlagDisplay.sort_separated)
flag_displays.append(
- '{}="{}"'.format(varname, " ".join("{}".format(f) for f in flags))
+ f"{varname}=\"{' '.join('{}'.format(f) for f in flags)}\""
)
return " ".join(flag_displays)
diff --git a/lib/_emerge/UserQuery.py b/lib/_emerge/UserQuery.py
index 4dcb704fa..c304a82cb 100644
--- a/lib/_emerge/UserQuery.py
+++ b/lib/_emerge/UserQuery.py
@@ -67,7 +67,7 @@ class UserQuery:
# first value in responses.
if response.upper() == key[: len(response)].upper():
return key
- print("Sorry, response '%s' not understood." % response, end=" ")
+ print(f"Sorry, response '{response}' not understood.", end=" ")
except (EOFError, KeyboardInterrupt):
print("Interrupted.")
sys.exit(128 + signal.SIGINT)
diff --git a/lib/_emerge/actions.py b/lib/_emerge/actions.py
index 047948f56..ca1cafde2 100644
--- a/lib/_emerge/actions.py
+++ b/lib/_emerge/actions.py
@@ -179,7 +179,7 @@ def action_build(
getbinpkgs="--getbinpkg" in emerge_config.opts, **kwargs
)
except ParseError as e:
- writemsg("\n\n!!!%s.\nSee make.conf(5) for more info.\n" % e, noiselevel=-1)
+ writemsg(f"\n\n!!!{e}.\nSee make.conf(5) for more info.\n", noiselevel=-1)
return 1
# validate the state of the resume data
@@ -236,7 +236,7 @@ def action_build(
myopts.update(resume_opts)
if "--debug" in myopts:
- writemsg_level("myopts {}\n".format(myopts))
+ writemsg_level(f"myopts {myopts}\n")
# Adjust config according to options of the command being resumed.
for myroot in trees:
@@ -285,7 +285,7 @@ def action_build(
prefix = bad(" * ")
writemsg(prefix + "\n")
for line in textwrap.wrap(msg, 72):
- writemsg("{}{}\n".format(prefix, line))
+ writemsg(f"{prefix}{line}\n")
writemsg(prefix + "\n")
if resume:
@@ -357,7 +357,7 @@ def action_build(
for line in textwrap.wrap(msg, 72):
out.eerror(line)
elif isinstance(e, portage.exception.PackageNotFound):
- out.eerror("An expected package is " + "not available: %s" % str(e))
+ out.eerror("An expected package is " + f"not available: {str(e)}")
out.eerror("")
msg = (
"The resume list contains one or more "
@@ -379,12 +379,12 @@ def action_build(
for task, atoms in dropped_tasks.items():
if not atoms:
writemsg(
- " {} is masked or unavailable\n".format(task),
+ f" {task} is masked or unavailable\n",
noiselevel=-1,
)
else:
writemsg(
- " {} requires {}\n".format(task, ", ".join(atoms)),
+ f" {task} requires {', '.join(atoms)}\n",
noiselevel=-1,
)
@@ -438,7 +438,7 @@ def action_build(
)
except ParseError as e:
writemsg(
- "\n\n!!!%s.\nSee make.conf(5) for more info.\n" % e,
+ f"\n\n!!!{e}.\nSee make.conf(5) for more info.\n",
noiselevel=-1,
)
return 1
@@ -644,7 +644,7 @@ def action_build(
gpg.unlock()
except GPGException as e:
writemsg_level(
- colorize("BAD", "!!! %s\n" % e),
+ colorize("BAD", f"!!! {e}\n"),
level=logging.ERROR,
noiselevel=-1,
)
@@ -706,7 +706,7 @@ def action_config(settings, trees, myopts, myfiles):
sys.exit(1)
if not is_valid_package_atom(myfiles[0], allow_repo=True):
portage.writemsg(
- "!!! '%s' is not a valid package atom.\n" % myfiles[0], noiselevel=-1
+ f"!!! '{myfiles[0]}' is not a valid package atom.\n", noiselevel=-1
)
portage.writemsg("!!! Please check ebuild(5) for full details.\n")
portage.writemsg(
@@ -748,7 +748,7 @@ def action_config(settings, trees, myopts, myfiles):
print()
if "--ask" in myopts:
- if uq.query("Ready to configure %s?" % pkg, enter_invalid) == "No":
+ if uq.query(f"Ready to configure {pkg}?", enter_invalid) == "No":
sys.exit(128 + signal.SIGINT)
else:
print("Configuring pkg...")
@@ -823,7 +823,7 @@ def action_depclean(
msg.append("unless *all* required dependencies have been resolved. As a\n")
msg.append("consequence of this, it often becomes necessary to run \n")
msg.append(
- "%s" % good("`emerge --update --newuse --deep @world`")
+ f"{good('`emerge --update --newuse --deep @world`')}"
+ " prior to depclean.\n"
)
@@ -844,13 +844,12 @@ def action_depclean(
matched_packages = True
else:
writemsg_level(
- "--- Couldn't find '%s' to %s.\n"
- % (x.replace("null/", ""), action),
+ f"--- Couldn't find '{x.replace('null/', '')}' to {action}.\n",
level=logging.WARN,
noiselevel=-1,
)
if not matched_packages:
- writemsg_level(">>> No packages selected for removal by %s\n" % action)
+ writemsg_level(f">>> No packages selected for removal by {action}\n")
return 0
# The calculation is done in a separate function so that depgraph
@@ -891,10 +890,10 @@ def action_depclean(
set_atoms[k] = root_config.sets[k].getAtoms()
print("Packages installed: " + str(len(vardb.cpv_all())))
- print("Packages in world: %d" % len(set_atoms["selected"]))
- print("Packages in system: %d" % len(set_atoms["system"]))
+ print(f"Packages in world: {len(set_atoms['selected'])}")
+ print(f"Packages in system: {len(set_atoms['system'])}")
if set_atoms["profile"]:
- print("Packages in profile: %d" % len(set_atoms["profile"]))
+ print(f"Packages in profile: {len(set_atoms['profile'])}")
print("Required packages: " + str(req_pkg_count))
if "--pretend" in myopts:
print("Number to remove: " + str(len(cleanlist)))
@@ -1134,12 +1133,12 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
msg.append("Broken soname dependencies found:")
msg.append("")
for atom, parent in soname_deps:
- msg.append(" {} required by:".format(atom))
- msg.append(" {}".format(parent))
+ msg.append(f" {atom} required by:")
+ msg.append(f" {parent}")
msg.append("")
writemsg_level(
- "".join("{}{}\n".format(prefix, line) for line in msg),
+ "".join(f"{prefix}{line}\n" for line in msg),
level=logging.WARNING,
noiselevel=-1,
)
@@ -1181,11 +1180,11 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
and vardb.match(Atom(str(atom)))
):
msg.append(
- " {} ({}) pulled in by:".format(atom.unevaluated_atom, atom)
+ f" {atom.unevaluated_atom} ({atom}) pulled in by:"
)
else:
- msg.append(" {} pulled in by:".format(atom))
- msg.append(" {}".format(parent))
+ msg.append(f" {atom} pulled in by:")
+ msg.append(f" {parent}")
msg.append("")
msg.extend(
textwrap.wrap(
@@ -1222,10 +1221,10 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
msg.append("")
msg.append(
"If you would like to ignore "
- + "dependencies then use %s." % good("--nodeps")
+ + f"dependencies then use {good('--nodeps')}."
)
writemsg_level(
- "".join("{}{}\n".format(prefix, line) for line in msg),
+ "".join(f"{prefix}{line}\n" for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -1277,9 +1276,9 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
)
parent_strs.sort()
msg = []
- msg.append(" {} pulled in by:\n".format(child_node.cpv))
+ msg.append(f" {child_node.cpv} pulled in by:\n")
for parent_str in parent_strs:
- msg.append(" {}\n".format(parent_str))
+ msg.append(f" {parent_str}\n")
msg.append("\n")
portage.writemsg_stdout("".join(msg), noiselevel=-1)
@@ -1334,14 +1333,14 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
show_parents(pkg)
if not pkgs_to_remove:
- writemsg_level(">>> No packages selected for removal by %s\n" % action)
+ writemsg_level(f">>> No packages selected for removal by {action}\n")
if "--verbose" not in myopts:
writemsg_level(
- ">>> To see reverse dependencies, use %s\n" % good("--verbose")
+ f">>> To see reverse dependencies, use {good('--verbose')}\n"
)
if action == "prune":
writemsg_level(
- ">>> To ignore dependencies, use %s\n" % good("--nodeps")
+ f">>> To ignore dependencies, use {good('--nodeps')}\n"
)
return pkgs_to_remove
@@ -1515,7 +1514,7 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
prefix = bad(" * ")
writemsg_level(
- "".join(prefix + "%s\n" % line for line in textwrap.wrap(msg, 70)),
+ "".join(prefix + f"{line}\n" for line in textwrap.wrap(msg, 70)),
level=logging.WARNING,
noiselevel=-1,
)
@@ -1534,15 +1533,15 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
consumer.mycpv for consumer in unique_consumers
)
msg.append("")
- msg.append(" {} pulled in by:".format(pkg.cpv))
+ msg.append(f" {pkg.cpv} pulled in by:")
for consumer in unique_consumers:
libs = consumer_libs[consumer]
msg.append(
- " {} needs {}".format(consumer, ", ".join(sorted(libs)))
+ f" {consumer} needs {', '.join(sorted(libs))}"
)
msg.append("")
writemsg_level(
- "".join(prefix + "%s\n" % line for line in msg),
+ "".join(prefix + f"{line}\n" for line in msg),
level=logging.WARNING,
noiselevel=-1,
)
@@ -1621,17 +1620,17 @@ def _calc_depclean(settings, trees, ldpath_mtimes, myopts, action, args_set, spi
if debug:
writemsg_level(
- "\nParent: {}\n".format(node),
+ f"\nParent: {node}\n",
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Depstring: {}\n".format(depstr),
+ f"Depstring: {depstr}\n",
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Priority: {}\n".format(priority),
+ f"Priority: {priority}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -1773,7 +1772,7 @@ def action_deselect(settings, trees, opts, atoms):
for cpv in vardb.match(atom):
pkg = vardb._pkg_str(cpv, None)
- expanded_atoms.add(Atom("{}:{}".format(pkg.cp, pkg.slot)))
+ expanded_atoms.add(Atom(f"{pkg.cp}:{pkg.slot}"))
discard_atoms = set()
for atom in world_set:
@@ -1897,11 +1896,11 @@ def action_info(settings, trees, myopts, myfiles):
break
if not cp_exists:
- xinfo = '"%s"' % x.unevaluated_atom
+ xinfo = f'"{x.unevaluated_atom}"'
# Discard null/ from failed cpv_expand category expansion.
xinfo = xinfo.replace("null/", "")
if settings["ROOT"] != "/":
- xinfo = "{} for {}".format(xinfo, eroot)
+ xinfo = f"{xinfo} for {eroot}"
writemsg(
"\nemerge: there are no ebuilds to satisfy %s.\n"
% colorize("INFORM", xinfo),
@@ -1959,7 +1958,7 @@ def action_info(settings, trees, myopts, myfiles):
append(header_width * "=")
append(header_title.rjust(int(header_width / 2 + len(header_title) / 2)))
append(header_width * "=")
- append("System uname: {}".format(platform.platform(aliased=1)))
+ append(f"System uname: {platform.platform(aliased=1)}")
vm_info = get_vm_info()
if "ram.total" in vm_info:
@@ -1979,7 +1978,7 @@ def action_info(settings, trees, myopts, myfiles):
)
head_commit = None
if last_sync:
- append("Timestamp of repository {}: {}".format(repo.name, last_sync[0]))
+ append(f"Timestamp of repository {repo.name}: {last_sync[0]}")
if repo.sync_type:
sync = portage.sync.module_controller.get_class(repo.sync_type)()
options = {"repo": repo}
@@ -1988,7 +1987,7 @@ def action_info(settings, trees, myopts, myfiles):
except NotImplementedError:
head_commit = (1, False)
if head_commit and head_commit[0] == os.EX_OK:
- append("Head commit of repository {}: {}".format(repo.name, head_commit[1]))
+ append(f"Head commit of repository {repo.name}: {head_commit[1]}")
# Searching contents for the /bin/sh provider is somewhat
# slow. Therefore, use the basename of the symlink target
@@ -2004,7 +2003,7 @@ def action_info(settings, trees, myopts, myfiles):
os.path.realpath(os.path.join(os.sep, portage.const.EPREFIX, "bin", "sh"))
)
try:
- Atom("null/%s" % basename)
+ Atom(f"null/{basename}")
except InvalidAtom:
matches = None
else:
@@ -2016,7 +2015,7 @@ def action_info(settings, trees, myopts, myfiles):
# If the name is ambiguous, then restrict our match
# to the app-shells category.
matches = trees[trees._running_eroot]["vartree"].dbapi.match(
- "app-shells/%s" % basename
+ f"app-shells/{basename}"
)
if matches:
@@ -2026,11 +2025,11 @@ def action_info(settings, trees, myopts, myfiles):
# Omit app-shells category from the output.
if name.startswith("app-shells/"):
name = name[len("app-shells/") :]
- sh_str = "{} {}".format(name, version)
+ sh_str = f"{name} {version}"
else:
sh_str = basename
- append("sh %s" % sh_str)
+ append(f"sh {sh_str}")
ld_names = []
if chost:
@@ -2047,7 +2046,7 @@ def action_info(settings, trees, myopts, myfiles):
output = _unicode_decode(proc.communicate()[0]).splitlines()
proc.wait()
if proc.wait() == os.EX_OK and output:
- append("ld %s" % (output[0]))
+ append(f"ld {output[0]}")
break
try:
@@ -2135,14 +2134,14 @@ def action_info(settings, trees, myopts, myfiles):
if matched_cp == orig_atom.cp:
provide_suffix = ""
else:
- provide_suffix = " ({})".format(orig_atom)
+ provide_suffix = f" ({orig_atom})"
ver_map[ver] = _info_pkgs_ver(ver, repo_suffix, provide_suffix)
for cp in sorted(cp_map):
versions = sorted(cp_map[cp].values())
versions = ", ".join(ver.toString() for ver in versions)
- append("{} {}".format((cp + ":").ljust(cp_max_len + 1), versions))
+ append(f"{(cp + ':').ljust(cp_max_len + 1)} {versions}")
append("Repositories:\n")
for repo in repos:
@@ -2222,7 +2221,7 @@ def action_info(settings, trees, myopts, myfiles):
v = _hide_url_passwd(v)
- append('{}="{}"'.format(k, v))
+ append(f'{k}="{v}"')
else:
use = set(v.split())
for varname in use_expand:
@@ -2232,11 +2231,11 @@ def action_info(settings, trees, myopts, myfiles):
use.remove(f)
use = list(use)
use.sort()
- use = ['USE="%s"' % " ".join(use)]
+ use = [f"USE=\"{' '.join(use)}\""]
for varname in use_expand:
myval = settings.get(varname)
if myval:
- use.append('{}="{}"'.format(varname, myval))
+ use.append(f'{varname}="{myval}"')
append(" ".join(use))
else:
unset_vars.append(k)
@@ -2306,11 +2305,11 @@ def action_info(settings, trees, myopts, myfiles):
% colorize("INFORM", str(pkg.cpv + _repo_separator + pkg.repo))
)
- append("%s" % pkg_use_display(pkg, myopts))
+ append(f"{pkg_use_display(pkg, myopts)}")
if pkg_type == "installed":
for myvar in mydesiredvars:
if metadata[myvar].split() != settings.get(myvar, "").split():
- append('{}="{}"'.format(myvar, metadata[myvar]))
+ append(f'{myvar}="{metadata[myvar]}"')
append("")
append("")
writemsg_stdout("\n".join(output_buffer), noiselevel=-1)
@@ -2321,7 +2320,7 @@ def action_info(settings, trees, myopts, myfiles):
continue
writemsg_stdout(
- ">>> Attempting to run pkg_info() for '%s'\n" % pkg.cpv, noiselevel=-1
+ f">>> Attempting to run pkg_info() for '{pkg.cpv}'\n", noiselevel=-1
)
if pkg_type == "installed":
@@ -2354,7 +2353,7 @@ def action_info(settings, trees, myopts, myfiles):
file.close()
if not ebuildpath or not os.path.exists(ebuildpath):
- out.ewarn("No ebuild found for '%s'" % pkg.cpv)
+ out.ewarn(f"No ebuild found for '{pkg.cpv}'")
continue
if pkg_type == "installed":
@@ -2424,9 +2423,7 @@ def action_search(root_config, myopts, myfiles, spinner):
searchinstance.execute(mysearch)
except re.error as comment:
print(
- '\n!!! Regular expression error in "{}": {}'.format(
- mysearch, comment
- )
+ f'\n!!! Regular expression error in "{mysearch}": {comment}'
)
sys.exit(1)
searchinstance.output()
@@ -2463,7 +2460,7 @@ def action_sync(
print_results(msgs)
elif msgs and not success:
writemsg_level(
- "".join("{}\n".format(line) for line in msgs),
+ "".join(f"{line}\n" for line in msgs),
level=logging.ERROR,
noiselevel=-1,
)
@@ -2499,11 +2496,11 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
)
for line in textwrap.wrap(msg, 70):
writemsg_level(
- "!!! {}\n".format(line), level=logging.ERROR, noiselevel=-1
+ f"!!! {line}\n", level=logging.ERROR, noiselevel=-1
)
for i in e.args[0]:
writemsg_level(
- " %s\n" % colorize("INFORM", i),
+ f" {colorize('INFORM', i)}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -2547,10 +2544,10 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
ext_atom = Atom(x, allow_repo=True, allow_wildcard=True)
except InvalidAtom:
msg = []
- msg.append("'{}' is not a valid package atom.".format(x))
+ msg.append(f"'{x}' is not a valid package atom.")
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
- "".join("!!! %s\n" % line for line in msg),
+ "".join(f"!!! {line}\n" for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -2582,10 +2579,10 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
else:
msg = []
- msg.append("'{}' is not a valid package atom.".format(x))
+ msg.append(f"'{x}' is not a valid package atom.")
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
- "".join("!!! %s\n" % line for line in msg),
+ "".join(f"!!! {line}\n" for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -2611,7 +2608,7 @@ def action_uninstall(settings, trees, ldpath_mtimes, opts, action, files, spinne
if owners:
for cpv in owners:
pkg = vardb._pkg_str(cpv, None)
- atom = "{}:{}".format(pkg.cp, pkg.slot)
+ atom = f"{pkg.cp}:{pkg.slot}"
valid_atoms.append(portage.dep.Atom(atom))
else:
writemsg_level(
@@ -2730,9 +2727,9 @@ def adjust_config(myopts, settings):
try:
CLEAN_DELAY = int(settings.get("CLEAN_DELAY", str(CLEAN_DELAY)))
except ValueError as e:
- portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ portage.writemsg(f"!!! {str(e)}\n", noiselevel=-1)
portage.writemsg(
- "!!! Unable to parse integer: CLEAN_DELAY='%s'\n" % settings["CLEAN_DELAY"],
+ f"!!! Unable to parse integer: CLEAN_DELAY='{settings['CLEAN_DELAY']}'\n",
noiselevel=-1,
)
settings["CLEAN_DELAY"] = str(CLEAN_DELAY)
@@ -2744,7 +2741,7 @@ def adjust_config(myopts, settings):
settings.get("EMERGE_WARNING_DELAY", str(EMERGE_WARNING_DELAY))
)
except ValueError as e:
- portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ portage.writemsg(f"!!! {str(e)}\n", noiselevel=-1)
portage.writemsg(
"!!! Unable to parse integer: EMERGE_WARNING_DELAY='%s'\n"
% settings["EMERGE_WARNING_DELAY"],
@@ -2783,7 +2780,7 @@ def adjust_config(myopts, settings):
portage.writemsg("!!! PORTAGE_DEBUG must be either 0 or 1\n", noiselevel=-1)
PORTAGE_DEBUG = 0
except ValueError as e:
- portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ portage.writemsg(f"!!! {str(e)}\n", noiselevel=-1)
portage.writemsg(
"!!! Unable to parse integer: PORTAGE_DEBUG='%s'\n"
% settings["PORTAGE_DEBUG"],
@@ -2828,10 +2825,10 @@ def display_missing_pkg_set(root_config, set_name):
msg.append("")
for s in sorted(root_config.sets):
- msg.append(" %s" % s)
+ msg.append(f" {s}")
msg.append("")
- writemsg_level("".join("%s\n" % l for l in msg), level=logging.ERROR, noiselevel=-1)
+ writemsg_level("".join(f"{l}\n" for l in msg), level=logging.ERROR, noiselevel=-1)
def relative_profile_path(portdir, abs_profile):
@@ -3050,7 +3047,7 @@ def validate_ebuild_environment(trees):
msg = (
"WARNING: The FEATURES variable contains one "
+ "or more values that should be disabled under "
- + "normal circumstances: %s" % " ".join(features_warn)
+ + f"normal circumstances: {' '.join(features_warn)}"
)
out = portage.output.EOutput()
for line in textwrap.wrap(msg, 65):
@@ -3063,9 +3060,9 @@ def check_procfs():
procfs_path = "/proc"
if platform.system() not in ("Linux",) or os.path.ismount(procfs_path):
return os.EX_OK
- msg = "It seems that %s is not mounted. You have been warned." % procfs_path
+ msg = f"It seems that {procfs_path} is not mounted. You have been warned."
writemsg_level(
- "".join("!!! %s\n" % l for l in textwrap.wrap(msg, 70)),
+ "".join(f"!!! {l}\n" for l in textwrap.wrap(msg, 70)),
level=logging.ERROR,
noiselevel=-1,
)
@@ -3078,7 +3075,7 @@ def config_protect_check(trees):
if not settings.get("CONFIG_PROTECT"):
msg = "!!! CONFIG_PROTECT is empty"
if settings["ROOT"] != "/":
- msg += " for '%s'" % root
+ msg += f" for '{root}'"
msg += "\n"
writemsg_level(msg, level=logging.WARN, noiselevel=-1)
@@ -3095,10 +3092,9 @@ def nice(settings):
except (OSError, ValueError) as e:
out = portage.output.EOutput()
out.eerror(
- "Failed to change nice value to '%s'"
- % settings.get("PORTAGE_NICENESS", "0")
+ f"Failed to change nice value to '{settings.get('PORTAGE_NICENESS', '0')}'"
)
- out.eerror("%s\n" % str(e))
+ out.eerror(f"{str(e)}\n")
def ionice(settings):
@@ -3195,18 +3191,18 @@ def get_missing_sets(root_config):
def missing_sets_warning(root_config, missing_sets):
if len(missing_sets) > 2:
- missing_sets_str = ", ".join('"%s"' % s for s in missing_sets[:-1])
- missing_sets_str += ', and "%s"' % missing_sets[-1]
+ missing_sets_str = ", ".join(f'"{s}"' for s in missing_sets[:-1])
+ missing_sets_str += f', and "{missing_sets[-1]}"'
elif len(missing_sets) == 2:
missing_sets_str = '"%s" and "%s"' % tuple(missing_sets)
else:
- missing_sets_str = '"%s"' % missing_sets[-1]
+ missing_sets_str = f'"{missing_sets[-1]}"'
msg = [
"emerge: incomplete set configuration, "
- + "missing set(s): %s" % missing_sets_str
+ + f"missing set(s): {missing_sets_str}"
]
if root_config.sets:
- msg.append(" sets defined: %s" % ", ".join(root_config.sets))
+ msg.append(f" sets defined: {', '.join(root_config.sets)}")
global_config_path = portage.const.GLOBAL_CONFIG_PATH
if portage.const.EPREFIX:
global_config_path = os.path.join(
@@ -3292,7 +3288,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
# display errors that occurred while loading the SetConfig instance
for e in setconfig.errors:
- print(colorize("BAD", "Error during set creation: %s" % e))
+ print(colorize("BAD", f"Error during set creation: {e}"))
unmerge_actions = ("unmerge", "prune", "clean", "depclean", "rage-clean")
@@ -3352,7 +3348,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
return (None, 1)
if myaction in unmerge_actions and not sets[s].supportsOperation("unmerge"):
writemsg_level(
- "emerge: the given set '%s' does " % s
+ f"emerge: the given set '{s}' does "
+ "not support unmerge operations\n",
level=logging.ERROR,
noiselevel=-1,
@@ -3360,7 +3356,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
retval = 1
elif not set_atoms:
writemsg_level(
- "emerge: '%s' is an empty set\n" % s,
+ f"emerge: '{s}' is an empty set\n",
level=logging.INFO,
noiselevel=-1,
)
@@ -3368,7 +3364,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
newargs.extend(set_atoms)
for error_msg in sets[s].errors:
writemsg_level(
- "{}\n".format(error_msg), level=logging.ERROR, noiselevel=-1
+ f"{error_msg}\n", level=logging.ERROR, noiselevel=-1
)
else:
newargs.append(a)
@@ -3397,7 +3393,7 @@ def repo_name_check(trees):
)
msg.append("")
for p in missing_repo_names:
- msg.append("\t{}/profiles/repo_name".format(p))
+ msg.append(f"\t{p}/profiles/repo_name")
msg.append("")
msg.extend(
textwrap.wrap(
@@ -3409,7 +3405,7 @@ def repo_name_check(trees):
)
msg.append("\n")
writemsg_level(
- "".join("%s\n" % l for l in msg), level=logging.WARNING, noiselevel=-1
+ "".join(f"{l}\n" for l in msg), level=logging.WARNING, noiselevel=-1
)
return bool(missing_repo_names)
@@ -3433,9 +3429,9 @@ def repo_name_duplicate_check(trees):
msg.append(" profiles/repo_name entries:")
msg.append("")
for k in sorted(ignored_repos):
- msg.append(" %s overrides" % ", ".join(k))
+ msg.append(f" {', '.join(k)} overrides")
for path in ignored_repos[k]:
- msg.append(" {}".format(path))
+ msg.append(f" {path}")
msg.append("")
msg.extend(
" " + x
@@ -3448,7 +3444,7 @@ def repo_name_duplicate_check(trees):
)
msg.append("\n")
writemsg_level(
- "".join("%s\n" % l for l in msg), level=logging.WARNING, noiselevel=-1
+ "".join(f"{l}\n" for l in msg), level=logging.WARNING, noiselevel=-1
)
return bool(ignored_repos)
@@ -3532,7 +3528,7 @@ def run_action(emerge_config):
)
except ParseError as e:
writemsg(
- "\n\n!!!{}.\nSee make.conf(5) for more info.\n".format(e),
+ f"\n\n!!!{e}.\nSee make.conf(5) for more info.\n",
noiselevel=-1,
)
return 1
@@ -3549,8 +3545,7 @@ def run_action(emerge_config):
and emerge_config.opts.get("--autounmask") == "n"
):
writemsg_level(
- " %s --autounmask-continue has been disabled by --autounmask=n\n"
- % warn("*"),
+ f" {warn('*')} --autounmask-continue has been disabled by --autounmask=n\n",
level=logging.WARNING,
noiselevel=-1,
)
@@ -3622,7 +3617,7 @@ def run_action(emerge_config):
if emerge_config.action == "list-sets":
writemsg_stdout(
- "".join("%s\n" % s for s in sorted(emerge_config.target_config.sets))
+ "".join(f"{s}\n" for s in sorted(emerge_config.target_config.sets))
)
return os.EX_OK
if emerge_config.action == "check-news":
@@ -3770,7 +3765,7 @@ def run_action(emerge_config):
# access is required but the user is not in the portage group.
if "--ask" in emerge_config.opts:
writemsg_stdout(
- "This action requires {} access...\n".format(access_desc),
+ f"This action requires {access_desc} access...\n",
noiselevel=-1,
)
if portage.data.secpass < 1 and not need_superuser:
@@ -3839,7 +3834,7 @@ def run_action(emerge_config):
except portage.exception.PortageException as e:
writemsg_level(
"!!! Error creating directory for "
- + "EMERGE_LOG_DIR='{}':\n!!! {}\n".format(emerge_log_dir, e),
+ + f"EMERGE_LOG_DIR='{emerge_log_dir}':\n!!! {e}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -3858,7 +3853,7 @@ def run_action(emerge_config):
time_str = _unicode_decode(
time_str, encoding=_encodings["content"], errors="replace"
)
- emergelog(xterm_titles, "Started emerge on: %s" % time_str)
+ emergelog(xterm_titles, f"Started emerge on: {time_str}")
myelogstr = ""
if emerge_config.opts:
opt_list = []
@@ -3868,9 +3863,9 @@ def run_action(emerge_config):
elif isinstance(arg, list):
# arguments like --exclude that use 'append' action
for x in arg:
- opt_list.append("{}={}".format(opt, x))
+ opt_list.append(f"{opt}={x}")
else:
- opt_list.append("{}={}".format(opt, arg))
+ opt_list.append(f"{opt}={arg}")
myelogstr = " ".join(opt_list)
if emerge_config.action:
myelogstr += " --" + emerge_config.action
@@ -3882,7 +3877,7 @@ def run_action(emerge_config):
def emergeexitsig(signum, frame):
signal.signal(signal.SIGTERM, signal.SIG_IGN)
- portage.util.writemsg("\n\nExiting on signal {signal}\n".format(signal=signum))
+ portage.util.writemsg(f"\n\nExiting on signal {signum}\n")
sys.exit(128 + signum)
signal.signal(signal.SIGTERM, emergeexitsig)
@@ -4004,11 +3999,11 @@ def run_action(emerge_config):
)
for line in textwrap.wrap(msg, 70):
writemsg_level(
- "!!! {}\n".format(line), level=logging.ERROR, noiselevel=-1
+ f"!!! {line}\n", level=logging.ERROR, noiselevel=-1
)
for i in e.args[0]:
writemsg_level(
- " %s\n" % colorize("INFORM", i),
+ f" {colorize('INFORM', i)}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -4016,10 +4011,10 @@ def run_action(emerge_config):
return 1
continue
msg = []
- msg.append("'{}' is not a valid package atom.".format(x))
+ msg.append(f"'{x}' is not a valid package atom.")
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
- "".join("!!! %s\n" % line for line in msg),
+ "".join(f"!!! {line}\n" for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
@@ -4047,10 +4042,10 @@ def run_action(emerge_config):
except OSError:
pass
msg = []
- msg.append("'{}' is not a valid package atom.".format(x))
+ msg.append(f"'{x}' is not a valid package atom.")
msg.append("Please check ebuild(5) for full details.")
writemsg_level(
- "".join("!!! %s\n" % line for line in msg),
+ "".join(f"!!! {line}\n" for line in msg),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/_emerge/chk_updated_cfg_files.py b/lib/_emerge/chk_updated_cfg_files.py
index 29f107804..a92475601 100644
--- a/lib/_emerge/chk_updated_cfg_files.py
+++ b/lib/_emerge/chk_updated_cfg_files.py
@@ -16,7 +16,7 @@ def chk_updated_cfg_files(eroot, config_protect):
for x in result:
writemsg_level(
- "\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))),
+ f"\n {colorize('WARN', '* ' + _('IMPORTANT:'))} ",
level=logging.INFO,
noiselevel=-1,
)
diff --git a/lib/_emerge/create_depgraph_params.py b/lib/_emerge/create_depgraph_params.py
index 11c3e3736..531230402 100644
--- a/lib/_emerge/create_depgraph_params.py
+++ b/lib/_emerge/create_depgraph_params.py
@@ -209,7 +209,7 @@ def create_depgraph_params(myopts, myaction):
if "--debug" in myopts:
writemsg_level(
- "\n\nmyparams %s\n\n" % myparams, noiselevel=-1, level=logging.DEBUG
+ f"\n\nmyparams {myparams}\n\n", noiselevel=-1, level=logging.DEBUG
)
return myparams
diff --git a/lib/_emerge/depgraph.py b/lib/_emerge/depgraph.py
index 15dcfbc84..bdac29bb1 100644
--- a/lib/_emerge/depgraph.py
+++ b/lib/_emerge/depgraph.py
@@ -801,11 +801,11 @@ class depgraph:
for root in atoms:
writemsg_level(
- " root: %s\n" % root, level=logging.DEBUG, noiselevel=-1
+ f" root: {root}\n", level=logging.DEBUG, noiselevel=-1
)
for atom in atoms[root]:
writemsg_level(
- " atom: %s\n" % atom, level=logging.DEBUG, noiselevel=-1
+ f" atom: {atom}\n", level=logging.DEBUG, noiselevel=-1
)
writemsg_level("\n\n", level=logging.DEBUG, noiselevel=-1)
@@ -962,18 +962,18 @@ class depgraph:
slot_atom,
), deps in self._dynamic_config._slot_operator_deps.items():
writemsg_level(
- " ({}, {})\n".format(root, slot_atom),
+ f" ({root}, {slot_atom})\n",
level=logging.DEBUG,
noiselevel=-1,
)
for dep in deps:
writemsg_level(
- " parent: %s\n" % dep.parent,
+ f" parent: {dep.parent}\n",
level=logging.DEBUG,
noiselevel=-1,
)
writemsg_level(
- " child: {} ({})\n".format(dep.child, dep.priority),
+ f" child: {dep.child} ({dep.priority})\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -984,15 +984,15 @@ class depgraph:
for root in forced_rebuilds:
writemsg_level(
- " root: %s\n" % root, level=logging.DEBUG, noiselevel=-1
+ f" root: {root}\n", level=logging.DEBUG, noiselevel=-1
)
for child in forced_rebuilds[root]:
writemsg_level(
- " child: %s\n" % child, level=logging.DEBUG, noiselevel=-1
+ f" child: {child}\n", level=logging.DEBUG, noiselevel=-1
)
for parent in forced_rebuilds[root][child]:
writemsg_level(
- " parent: %s\n" % parent,
+ f" parent: {parent}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1012,10 +1012,10 @@ class depgraph:
for root in self._forced_rebuilds:
for child in self._forced_rebuilds[root]:
writemsg_stdout(
- " {} causes rebuilds for:\n".format(child), noiselevel=-1
+ f" {child} causes rebuilds for:\n", noiselevel=-1
)
for parent in self._forced_rebuilds[root][child]:
- writemsg_stdout(" {}\n".format(parent), noiselevel=-1)
+ writemsg_stdout(f" {parent}\n", noiselevel=-1)
def _eliminate_ignored_binaries(self):
"""
@@ -1156,9 +1156,9 @@ class depgraph:
)
for pkg, ebuild in report_pkgs:
- writemsg(" {}::{}".format(pkg.cpv, pkg.repo), noiselevel=-1)
+ writemsg(f" {pkg.cpv}::{pkg.repo}", noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for {}".format(pkg.root), noiselevel=-1)
+ writemsg(f" for {pkg.root}", noiselevel=-1)
writemsg("\n", noiselevel=-1)
msg = []
@@ -1246,9 +1246,9 @@ class depgraph:
flag_display.append(flag)
flag_display = " ".join(flag_display)
# The user can paste this line into package.use
- writemsg(" ={} {}".format(pkg.cpv, flag_display), noiselevel=-1)
+ writemsg(f" ={pkg.cpv} {flag_display}", noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" # for {}".format(pkg.root), noiselevel=-1)
+ writemsg(f" # for {pkg.root}", noiselevel=-1)
writemsg("\n", noiselevel=-1)
msg = [
@@ -1272,10 +1272,10 @@ class depgraph:
)
for pkg in changed_deps:
- msg = " {}{}{}".format(pkg.cpv, _repo_separator, pkg.repo)
+ msg = f" {pkg.cpv}{_repo_separator}{pkg.repo}"
if pkg.root_config.settings["ROOT"] != "/":
- msg += " for %s" % pkg.root
- writemsg("%s\n" % msg, noiselevel=-1)
+ msg += f" for {pkg.root}"
+ writemsg(f"{msg}\n", noiselevel=-1)
msg = [
"",
@@ -1381,7 +1381,7 @@ class depgraph:
writemsg(str(pkg.slot_atom), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for {}".format(pkg.root), noiselevel=-1)
+ writemsg(f" for {pkg.root}", noiselevel=-1)
writemsg("\n\n", noiselevel=-1)
selected_pkg = next(
@@ -1389,9 +1389,9 @@ class depgraph:
None,
)
- writemsg(" selected: {}\n".format(selected_pkg), noiselevel=-1)
+ writemsg(f" selected: {selected_pkg}\n", noiselevel=-1)
writemsg(
- " skipped: {} (see unsatisfied dependency below)\n".format(pkg),
+ f" skipped: {pkg} (see unsatisfied dependency below)\n",
noiselevel=-1,
)
@@ -1411,7 +1411,7 @@ class depgraph:
for pkg, parent_atoms in backtrack_masked:
writemsg(str(pkg.slot_atom), noiselevel=-1)
if pkg.root_config.settings["ROOT"] != "/":
- writemsg(" for {}".format(pkg.root), noiselevel=-1)
+ writemsg(f" for {pkg.root}", noiselevel=-1)
writemsg("\n", noiselevel=-1)
def _show_missed_update_slot_conflicts(self, missed_updates):
@@ -1429,7 +1429,7 @@ class depgraph:
for pkg, parent_atoms in missed_updates:
msg.append(str(pkg.slot_atom))
if pkg.root_config.settings["ROOT"] != "/":
- msg.append(" for {}".format(pkg.root))
+ msg.append(f" for {pkg.root}")
msg.append("\n\n")
msg.append(indent)
@@ -1471,7 +1471,7 @@ class depgraph:
msg.append(2 * indent)
msg.append(
- "{} required by {} {}\n".format(atom, parent, use_display)
+ f"{atom} required by {parent} {use_display}\n"
)
msg.append(2 * indent)
msg.append(marker)
@@ -1625,25 +1625,25 @@ class depgraph:
"""
def __str__(self):
- return "(%s)" % ",".join(str(pkg) for pkg in self)
+ return f"({','.join(str(pkg) for pkg in self)})"
non_matching_forced = set()
for conflict in conflicts:
if debug:
writemsg_level(" conflict:\n", level=logging.DEBUG, noiselevel=-1)
writemsg_level(
- " root: %s\n" % conflict.root,
+ f" root: {conflict.root}\n",
level=logging.DEBUG,
noiselevel=-1,
)
writemsg_level(
- " atom: %s\n" % conflict.atom,
+ f" atom: {conflict.atom}\n",
level=logging.DEBUG,
noiselevel=-1,
)
for pkg in conflict:
writemsg_level(
- " pkg: %s\n" % pkg, level=logging.DEBUG, noiselevel=-1
+ f" pkg: {pkg}\n", level=logging.DEBUG, noiselevel=-1
)
all_parent_atoms = set()
@@ -1666,7 +1666,7 @@ class depgraph:
if debug:
writemsg_level(
- " parent: %s\n" % parent,
+ f" parent: {parent}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1677,7 +1677,7 @@ class depgraph:
noiselevel=-1,
)
writemsg_level(
- " atom: %s\n" % atom, level=logging.DEBUG, noiselevel=-1
+ f" atom: {atom}\n", level=logging.DEBUG, noiselevel=-1
)
if is_non_conflict_parent:
@@ -1693,7 +1693,7 @@ class depgraph:
if debug:
for match in matched:
writemsg_level(
- " match: %s\n" % match,
+ f" match: {match}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1717,7 +1717,7 @@ class depgraph:
if debug:
for pkg in conflict:
writemsg_level(
- " non-match: %s\n" % pkg,
+ f" non-match: {pkg}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -1809,20 +1809,20 @@ class depgraph:
)
for conflict in conflicts:
writemsg_level(
- " Conflict: ({}, {})\n".format(conflict.root, conflict.atom),
+ f" Conflict: ({conflict.root}, {conflict.atom})\n",
level=logging.DEBUG,
noiselevel=-1,
)
for pkg in conflict:
if pkg in forced:
writemsg_level(
- " keep: %s\n" % pkg,
+ f" keep: {pkg}\n",
level=logging.DEBUG,
noiselevel=-1,
)
else:
writemsg_level(
- " remove: %s\n" % pkg,
+ f" remove: {pkg}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -2041,9 +2041,9 @@ class depgraph:
"",
"",
"backtracking due to slot conflict:",
- " first package: {}".format(existing_node),
- " package(s) to mask: {}".format(str(to_be_masked)),
- " slot: {}".format(slot_atom),
+ f" first package: {existing_node}",
+ f" package(s) to mask: {str(to_be_masked)}",
+ f" slot: {slot_atom}",
" parents: {}".format(
", ".join(
"({}, '{}')".format(ppkg, atom) for ppkg, atom in all_parents
@@ -2052,7 +2052,7 @@ class depgraph:
"",
]
writemsg_level(
- "".join("%s\n" % l for l in msg), noiselevel=-1, level=logging.DEBUG
+ "".join(f"{l}\n" for l in msg), noiselevel=-1, level=logging.DEBUG
)
def _slot_conflict_backtrack_abi(self, pkg, slot_nodes, conflict_atoms):
@@ -2113,7 +2113,7 @@ class depgraph:
for unbuilt_child in chain(
matches,
self._iter_match_pkgs(
- root_config, "ebuild", Atom("={}".format(dep.child.cpv))
+ root_config, "ebuild", Atom(f"={dep.child.cpv}")
),
):
if unbuilt_child in self._dynamic_config._runtime_pkg_mask:
@@ -2143,13 +2143,12 @@ class depgraph:
"",
"",
"backtracking due to slot/sub-slot change:",
- " child package: %s" % child,
- " child slot: {}/{}".format(child.slot, child.sub_slot),
- " new child: %s" % new_child_slot,
- " new child slot: %s/%s"
- % (new_child_slot.slot, new_child_slot.sub_slot),
- " parent package: %s" % dep.parent,
- " atom: %s" % dep.atom,
+ f" child package: {child}",
+ f" child slot: {child.slot}/{child.sub_slot}",
+ f" new child: {new_child_slot}",
+ f" new child slot: {new_child_slot.slot}/{new_child_slot.sub_slot}",
+ f" parent package: {dep.parent}",
+ f" atom: {dep.atom}",
"",
]
writemsg_level("\n".join(msg), noiselevel=-1, level=logging.DEBUG)
@@ -2186,14 +2185,14 @@ class depgraph:
"",
"",
"backtracking due to missed slot abi update:",
- " child package: %s" % child,
+ f" child package: {child}",
]
if new_child_slot is not None:
- msg.append(" new child slot package: %s" % new_child_slot)
- msg.append(" parent package: %s" % dep.parent)
+ msg.append(f" new child slot package: {new_child_slot}")
+ msg.append(f" parent package: {dep.parent}")
if new_dep is not None:
- msg.append(" new parent pkg: %s" % new_dep.parent)
- msg.append(" atom: %s" % dep.atom)
+ msg.append(f" new parent pkg: {new_dep.parent}")
+ msg.append(f" atom: {dep.atom}")
msg.append("")
writemsg_level("\n".join(msg), noiselevel=-1, level=logging.DEBUG)
backtrack_infos = self._dynamic_config._backtrack_infos
@@ -2346,8 +2345,8 @@ class depgraph:
"_slot_operator_check_reverse_dependencies:",
" candidate package does not match atom '%s': %s"
% (atom, candidate_pkg),
- " parent: %s" % parent,
- " parent atoms: %s" % " ".join(parent_atoms),
+ f" parent: {parent}",
+ f" parent atoms: {' '.join(parent_atoms)}",
"",
)
writemsg_level("\n".join(msg), noiselevel=-1, level=logging.DEBUG)
@@ -2571,10 +2570,10 @@ class depgraph:
"",
"",
"slot_operator_update_probe:",
- " existing child package: %s" % dep.child,
- " existing parent package: %s" % dep.parent,
- " new child package: %s" % selected[0],
- " new parent package: %s" % replacement_parent,
+ f" existing child package: {dep.child}",
+ f" existing parent package: {dep.parent}",
+ f" new child package: {selected[0]}",
+ f" new parent package: {replacement_parent}",
"",
)
writemsg_level("\n".join(msg), noiselevel=-1, level=logging.DEBUG)
@@ -2588,8 +2587,8 @@ class depgraph:
"",
"",
"slot_operator_update_probe:",
- " existing child package: %s" % dep.child,
- " existing parent package: %s" % dep.parent,
+ f" existing child package: {dep.child}",
+ f" existing parent package: {dep.parent}",
" new child package: %s" % None,
" new parent package: %s" % None,
"",
@@ -2638,10 +2637,10 @@ class depgraph:
"",
"",
"slot_operator_unsatisfied_probe:",
- " existing parent package: %s" % dep.parent,
- " existing parent atom: %s" % dep.atom,
- " new parent package: %s" % replacement_parent,
- " new child package: %s" % pkg,
+ f" existing parent package: {dep.parent}",
+ f" existing parent atom: {dep.atom}",
+ f" new parent package: {replacement_parent}",
+ f" new child package: {pkg}",
"",
)
writemsg_level(
@@ -2655,8 +2654,8 @@ class depgraph:
"",
"",
"slot_operator_unsatisfied_probe:",
- " existing parent package: %s" % dep.parent,
- " existing parent atom: %s" % dep.atom,
+ f" existing parent package: {dep.parent}",
+ f" existing parent atom: {dep.atom}",
" new parent package: %s" % None,
" new child package: %s" % None,
"",
@@ -2674,8 +2673,8 @@ class depgraph:
"",
"",
"backtracking due to unsatisfied built slot-operator dep:",
- " parent package: %s" % parent,
- " atom: %s" % dep.atom,
+ f" parent package: {parent}",
+ f" atom: {dep.atom}",
"",
)
writemsg_level("\n".join(msg), noiselevel=-1, level=logging.DEBUG)
@@ -2851,7 +2850,7 @@ class depgraph:
SLOT may differ from the installed SLOT, so first search by cpv.
"""
built_pkgs = []
- for pkg in self._iter_similar_available(inst_pkg, Atom("=%s" % inst_pkg.cpv)):
+ for pkg in self._iter_similar_available(inst_pkg, Atom(f"={inst_pkg.cpv}")):
if not pkg.built:
return pkg.slot_atom
if not pkg.installed:
@@ -3276,13 +3275,13 @@ class depgraph:
msg.append("")
msg.append("")
msg.append("backtracking due to unsatisfied dep:")
- msg.append(" parent: %s" % dep.parent)
- msg.append(" priority: %s" % dep.priority)
- msg.append(" root: %s" % dep.root)
- msg.append(" atom: %s" % dep.atom)
+ msg.append(f" parent: {dep.parent}")
+ msg.append(f" priority: {dep.priority}")
+ msg.append(f" root: {dep.root}")
+ msg.append(f" atom: {dep.atom}")
msg.append("")
writemsg_level(
- "".join("%s\n" % l for l in msg),
+ "".join(f"{l}\n" for l in msg),
noiselevel=-1,
level=logging.DEBUG,
)
@@ -3354,7 +3353,7 @@ class depgraph:
# For PackageArg and AtomArg types, it's
# redundant to display the atom attribute.
writemsg_level(
- "{}{}\n".format("Parent Dep:".ljust(15), myparent),
+ f"{'Parent Dep:'.ljust(15)}{myparent}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -3367,7 +3366,7 @@ class depgraph:
and dep.atom.package
and dep.atom is not dep.atom.unevaluated_atom
):
- uneval = " ({})".format(dep.atom.unevaluated_atom)
+ uneval = f" ({dep.atom.unevaluated_atom})"
writemsg_level(
"%s%s%s required by %s\n"
% ("Parent Dep:".ljust(15), dep.atom, uneval, myparent),
@@ -3642,7 +3641,7 @@ class depgraph:
debug = "--debug" in self._frozen_config.myopts
if debug:
writemsg_level(
- "Removing package: %s\n" % pkg, level=logging.DEBUG, noiselevel=-1
+ f"Removing package: {pkg}\n", level=logging.DEBUG, noiselevel=-1
)
try:
@@ -3855,15 +3854,15 @@ class depgraph:
continue
if debug:
writemsg_level(
- "\nParent: {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
+ f"\nParent: {pkg}\n", noiselevel=-1, level=logging.DEBUG
)
writemsg_level(
- "Depstring: {}\n".format(dep_string),
+ f"Depstring: {dep_string}\n",
noiselevel=-1,
level=logging.DEBUG,
)
writemsg_level(
- "Priority: {}\n".format(dep_priority),
+ f"Priority: {dep_priority}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4031,16 +4030,16 @@ class depgraph:
if debug:
writemsg_level(
- "\nParent: {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
+ f"\nParent: {pkg}\n", noiselevel=-1, level=logging.DEBUG
)
dep_repr = portage.dep.paren_enclose(
dep_string, unevaluated_atom=True, opconvert=True
)
writemsg_level(
- "Depstring: {}\n".format(dep_repr), noiselevel=-1, level=logging.DEBUG
+ f"Depstring: {dep_repr}\n", noiselevel=-1, level=logging.DEBUG
)
writemsg_level(
- "Priority: {}\n".format(dep_priority),
+ f"Priority: {dep_priority}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4064,7 +4063,7 @@ class depgraph:
if debug:
writemsg_level(
- "Candidates: {}\n".format([str(x) for x in selected_atoms[pkg]]),
+ f"Candidates: {[str(x) for x in selected_atoms[pkg]]}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4180,9 +4179,7 @@ class depgraph:
if debug:
writemsg_level(
- "\nCandidates: {}: {}\n".format(
- virt_pkg.cpv, [str(x) for x in atoms]
- ),
+ f"\nCandidates: {virt_pkg.cpv}: {[str(x) for x in atoms]}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -4287,7 +4284,7 @@ class depgraph:
if debug:
writemsg_level(
- "\nExiting... {}\n".format(pkg), noiselevel=-1, level=logging.DEBUG
+ f"\nExiting... {pkg}\n", noiselevel=-1, level=logging.DEBUG
)
return 1
@@ -4480,7 +4477,7 @@ class depgraph:
categories = set()
for db, pkg_type, built, installed, db_keys in dbs:
for cat in db.categories:
- if db.cp_list("{}/{}".format(cat, atom_pn)):
+ if db.cp_list(f"{cat}/{atom_pn}"):
categories.add(cat)
deps = []
@@ -4633,7 +4630,7 @@ class depgraph:
writemsg(
colorize(
"BAD",
- "\n*** Package is missing CATEGORY metadata: %s.\n\n" % x,
+ f"\n*** Package is missing CATEGORY metadata: {x}.\n\n",
),
noiselevel=-1,
)
@@ -4642,7 +4639,7 @@ class depgraph:
x = os.path.realpath(x)
for pkg in self._iter_match_pkgs(
- root_config, "binary", Atom("=%s" % mykey)
+ root_config, "binary", Atom(f"={mykey}")
):
if x == os.path.realpath(bindb.bintree.getname(pkg.cpv)):
break
@@ -4774,7 +4771,7 @@ class depgraph:
for pset in list(depgraph_sets.sets.values()) + [sets[s]]:
for error_msg in pset.errors:
writemsg_level(
- "{}\n".format(error_msg),
+ f"{error_msg}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -4813,7 +4810,7 @@ class depgraph:
continue
if not is_valid_package_atom(x, allow_repo=True):
portage.writemsg(
- "\n\n!!! '%s' is not a valid package atom.\n" % x, noiselevel=-1
+ f"\n\n!!! '{x}' is not a valid package atom.\n", noiselevel=-1
)
portage.writemsg("!!! Please check ebuild(5) for full details.\n")
portage.writemsg(
@@ -4935,7 +4932,7 @@ class depgraph:
for cpv in owners:
pkg = vardb._pkg_str(cpv, None)
- atom = Atom("{}:{}".format(pkg.cp, pkg.slot))
+ atom = Atom(f"{pkg.cp}:{pkg.slot}")
args.append(AtomArg(arg=atom, atom=atom, root_config=root_config))
if "--update" in self._frozen_config.myopts:
@@ -5072,7 +5069,7 @@ class depgraph:
continue
if debug:
writemsg_level(
- "\n Arg: {}\n Atom: {}\n".format(arg, atom),
+ f"\n Arg: {arg}\n Atom: {atom}\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -5181,12 +5178,10 @@ class depgraph:
raise # Needed else can't exit
except Exception as e:
writemsg(
- "\n\n!!! Problem in '%s' dependencies.\n" % atom, noiselevel=-1
+ f"\n\n!!! Problem in '{atom}' dependencies.\n", noiselevel=-1
)
writemsg(
- "!!! {} {}\n".format(
- str(e), str(getattr(e, "__module__", None))
- )
+ f"!!! {str(e)} {str(getattr(e, '__module__', None))}\n"
)
raise
@@ -5448,7 +5443,7 @@ class depgraph:
slots.remove(highest_pkg.slot)
while slots:
slot = slots.pop()
- slot_atom = portage.dep.Atom("{}:{}".format(highest_pkg.cp, slot))
+ slot_atom = portage.dep.Atom(f"{highest_pkg.cp}:{slot}")
pkg, in_graph = self._select_package(root_config.root, slot_atom)
if pkg is not None and pkg.cp == highest_pkg.cp and pkg < highest_pkg:
greedy_pkgs.append(pkg)
@@ -5713,7 +5708,7 @@ class depgraph:
except InvalidDependString as e:
writemsg_level(
"!!! Invalid RDEPEND in "
- + "'{}var/db/pkg/{}/RDEPEND': {}\n".format(pkg.root, pkg.cpv, e),
+ + f"'{pkg.root}var/db/pkg/{pkg.cpv}/RDEPEND': {e}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -5752,7 +5747,7 @@ class depgraph:
raise
writemsg_level(
"!!! Invalid RDEPEND in "
- + "'{}var/db/pkg/{}/RDEPEND': {}\n".format(pkg.root, pkg.cpv, e),
+ + f"'{pkg.root}var/db/pkg/{pkg.cpv}/RDEPEND': {e}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -5787,7 +5782,7 @@ class depgraph:
graph = self._dynamic_config.digraph
def format_pkg(pkg):
- pkg_name = "{}{}{}".format(pkg.cpv, _repo_separator, pkg.repo)
+ pkg_name = f"{pkg.cpv}{_repo_separator}{pkg.repo}"
return pkg_name
if target_atom is not None and isinstance(node, Package):
@@ -5812,7 +5807,7 @@ class depgraph:
usedep.append(flag)
else:
usedep.append("-" + flag)
- pkg_name += "[%s]" % ",".join(usedep)
+ pkg_name += f"[{','.join(usedep)}]"
dep_chain.append((pkg_name, node.type_name))
@@ -5866,7 +5861,7 @@ class depgraph:
node_type = "set"
else:
node_type = "argument"
- dep_chain.append(("{}".format(node), node_type))
+ dep_chain.append((f"{node}", node_type))
elif node is not start_node:
for ppkg, patom in all_parents[child]:
@@ -5920,7 +5915,7 @@ class depgraph:
usedep.append(flag)
else:
usedep.append("-" + flag)
- pkg_name += "[%s]" % ",".join(usedep)
+ pkg_name += f"[{','.join(usedep)}]"
dep_chain.append((pkg_name, node.type_name))
@@ -5962,7 +5957,7 @@ class depgraph:
if self._dynamic_config.digraph.parent_nodes(parent_arg):
selected_parent = parent_arg
else:
- dep_chain.append(("{}".format(parent_arg), "argument"))
+ dep_chain.append((f"{parent_arg}", "argument"))
selected_parent = None
node = selected_parent
@@ -5975,9 +5970,9 @@ class depgraph:
display_list = []
for node, node_type in dep_chain:
if node_type == "argument":
- display_list.append("required by %s (argument)" % node)
+ display_list.append(f"required by {node} (argument)")
else:
- display_list.append("required by %s" % node)
+ display_list.append(f"required by {node}")
msg = "# " + "\n# ".join(display_list) + "\n"
return msg
@@ -6001,20 +5996,20 @@ class depgraph:
backtrack_mask = False
autounmask_broke_use_dep = False
if atom.package:
- xinfo = '"%s"' % atom.unevaluated_atom
+ xinfo = f'"{atom.unevaluated_atom}"'
atom_without_use = atom.without_use
else:
- xinfo = '"%s"' % atom
+ xinfo = f'"{atom}"'
atom_without_use = None
if arg:
- xinfo = '"%s"' % arg
+ xinfo = f'"{arg}"'
if isinstance(myparent, AtomArg):
- xinfo = '"{}"'.format(myparent)
+ xinfo = f'"{myparent}"'
# Discard null/ from failed cpv_expand category expansion.
xinfo = xinfo.replace("null/", "")
if root != self._frozen_config._running_root.root:
- xinfo = "{} for {}".format(xinfo, root)
+ xinfo = f"{xinfo} for {root}"
masked_packages = []
missing_use = []
missing_use_adjustable = set()
@@ -6107,8 +6102,7 @@ class depgraph:
pkg
]
mreasons.append(
- "backtracking: %s"
- % ", ".join(sorted(backtrack_reasons))
+ f"backtracking: {', '.join(sorted(backtrack_reasons))}"
)
backtrack_mask = True
if (
@@ -6139,9 +6133,7 @@ class depgraph:
except InvalidAtom:
writemsg(
"violated_conditionals raised "
- + "InvalidAtom: '{}' parent: {}".format(
- atom, myparent
- ),
+ + f"InvalidAtom: '{atom}' parent: {myparent}",
noiselevel=-1,
)
raise
@@ -6232,7 +6224,7 @@ class depgraph:
mreasons = []
if missing_iuse:
- mreasons.append("Missing IUSE: %s" % " ".join(missing_iuse))
+ mreasons.append(f"Missing IUSE: {' '.join(missing_iuse)}")
missing_iuse_reasons.append((pkg, mreasons))
else:
need_enable = sorted((atom.use.enabled - use) & pkg.iuse.all)
@@ -6270,7 +6262,7 @@ class depgraph:
changes.extend(colorize("red", "+" + x) for x in need_enable)
changes.extend(colorize("blue", "-" + x) for x in need_disable)
mreasons.append(
- "Change USE: %s" % " ".join(changes) + required_use_warning
+ f"Change USE: {' '.join(changes)}" + required_use_warning
)
missing_use_reasons.append((pkg, mreasons))
@@ -6364,7 +6356,7 @@ class depgraph:
return True
mreasons.append(
- "Change USE: %s" % " ".join(changes) + required_use_warning
+ f"Change USE: {' '.join(changes)}" + required_use_warning
)
if (myparent, mreasons) not in missing_use_reasons:
missing_use_reasons.append((myparent, mreasons))
@@ -6451,7 +6443,7 @@ class depgraph:
noiselevel=-1,
)
use_display = pkg_use_display(pkg, self._frozen_config.myopts)
- writemsg("- {} {}\n".format(output_cpv, use_display), noiselevel=-1)
+ writemsg(f"- {output_cpv} {use_display}\n", noiselevel=-1)
writemsg(
"\n The following REQUIRED_USE flag constraints "
+ "are unsatisfied:\n",
@@ -6464,7 +6456,7 @@ class depgraph:
eapi=pkg.eapi,
).tounicode()
writemsg(
- " %s\n" % human_readable_required_use(reduced_noise), noiselevel=-1
+ f" {human_readable_required_use(reduced_noise)}\n", noiselevel=-1
)
normalized_required_use = " ".join(pkg._metadata["REQUIRED_USE"].split())
if reduced_noise != normalized_required_use:
@@ -6474,7 +6466,7 @@ class depgraph:
noiselevel=-1,
)
writemsg(
- " %s\n" % human_readable_required_use(normalized_required_use),
+ f" {human_readable_required_use(normalized_required_use)}\n",
noiselevel=-1,
)
writemsg("\n", noiselevel=-1)
@@ -6912,7 +6904,7 @@ class depgraph:
except portage.exception.PackageNotFound:
return next(
self._iter_match_pkgs(
- pkg.root_config, "ebuild", Atom("={}".format(pkg.cpv))
+ pkg.root_config, "ebuild", Atom(f"={pkg.cpv}")
),
None,
)
@@ -6923,7 +6915,7 @@ class depgraph:
except portage.exception.PackageNotFound:
pkg_eb_visible = False
for pkg_eb in self._iter_match_pkgs(
- pkg.root_config, "ebuild", Atom("={}".format(pkg.cpv))
+ pkg.root_config, "ebuild", Atom(f"={pkg.cpv}")
):
if self._pkg_visibility_check(pkg_eb, autounmask_level):
pkg_eb_visible = True
@@ -8495,7 +8487,7 @@ class depgraph:
# is thrown from cpv_expand due to multiple
# matches (this can happen if an atom lacks a
# category).
- show_invalid_depstring_notice(pkg, "{}".format(e))
+ show_invalid_depstring_notice(pkg, f"{e}")
del e
raise
if not success:
@@ -8533,7 +8525,7 @@ class depgraph:
except portage.exception.InvalidAtom as e:
depstr = " ".join(vardb.aux_get(pkg.cpv, dep_keys))
show_invalid_depstring_notice(
- pkg, "Invalid Atom: {}".format(e)
+ pkg, f"Invalid Atom: {e}"
)
return False
for cpv in stale_cache:
@@ -9324,7 +9316,7 @@ class depgraph:
if leaves:
writemsg(
- "runtime cycle leaf: {}\n\n".format(selected_nodes[0]),
+ f"runtime cycle leaf: {selected_nodes[0]}\n\n",
noiselevel=-1,
)
@@ -9437,8 +9429,7 @@ class depgraph:
except portage.exception.InvalidDependString as e:
portage.writemsg(
"!!! Invalid PROVIDE in "
- + "'%svar/db/pkg/%s/PROVIDE': %s\n"
- % (task.root, task.cpv, e),
+ + f"'{task.root}var/db/pkg/{task.cpv}/PROVIDE': {e}\n",
noiselevel=-1,
)
del e
@@ -9475,8 +9466,7 @@ class depgraph:
except portage.exception.InvalidDependString as e:
portage.writemsg(
"!!! Invalid PROVIDE in "
- + "'%svar/db/pkg/%s/PROVIDE': %s\n"
- % (task.root, task.cpv, e),
+ + f"'{task.root}var/db/pkg/{task.cpv}/PROVIDE': {e}\n",
noiselevel=-1,
)
del e
@@ -9517,8 +9507,7 @@ class depgraph:
except portage.exception.InvalidDependString as e:
portage.writemsg(
"!!! Invalid PROVIDE in "
- + "'%svar/db/pkg/%s/PROVIDE': %s\n"
- % (task.root, task.cpv, e),
+ + f"'{task.root}var/db/pkg/{task.cpv}/PROVIDE': {e}\n",
noiselevel=-1,
)
del e
@@ -9766,7 +9755,7 @@ class depgraph:
]
for node in retlist:
if isinstance(node, Package) and node.operation == "uninstall":
- msg.append("\t{}".format(node))
+ msg.append(f"\t{node}")
writemsg_level(
"\n%s\n" % "".join("%s\n" % line for line in msg),
level=logging.DEBUG,
@@ -9961,7 +9950,7 @@ class depgraph:
if parent_atom not in preferred_parents:
ordered_list.append(parent_atom)
- msg.append(indent + "%s pulled in by\n" % pkg)
+ msg.append(indent + f"{pkg} pulled in by\n")
for parent_atom in ordered_list:
parent, atom = parent_atom
@@ -9991,7 +9980,7 @@ class depgraph:
)
else:
msg.append(
- "{} required by {} {}".format(atom, parent, use_display)
+ f"{atom} required by {parent} {use_display}"
)
msg.append("\n")
@@ -10102,19 +10091,19 @@ class depgraph:
if autounmask_unrestricted_atoms:
if is_latest:
unstable_keyword_msg[root].append(
- ">={} {}\n".format(pkg.cpv, keyword)
+ f">={pkg.cpv} {keyword}\n"
)
elif is_latest_in_slot:
unstable_keyword_msg[root].append(
- ">={}:{} {}\n".format(pkg.cpv, pkg.slot, keyword)
+ f">={pkg.cpv}:{pkg.slot} {keyword}\n"
)
else:
unstable_keyword_msg[root].append(
- "={} {}\n".format(pkg.cpv, keyword)
+ f"={pkg.cpv} {keyword}\n"
)
else:
unstable_keyword_msg[root].append(
- "={} {}\n".format(pkg.cpv, keyword)
+ f"={pkg.cpv} {keyword}\n"
)
p_mask_change_msg = {}
@@ -10145,22 +10134,22 @@ class depgraph:
self._get_dep_chain_as_comment(pkg)
)
if filename:
- p_mask_change_msg[root].append("# %s:\n" % filename)
+ p_mask_change_msg[root].append(f"# {filename}:\n")
if comment:
comment = [line for line in comment.splitlines() if line]
for line in comment:
- p_mask_change_msg[root].append("%s\n" % line)
+ p_mask_change_msg[root].append(f"{line}\n")
if autounmask_unrestricted_atoms:
if is_latest:
- p_mask_change_msg[root].append(">=%s\n" % pkg.cpv)
+ p_mask_change_msg[root].append(f">={pkg.cpv}\n")
elif is_latest_in_slot:
p_mask_change_msg[root].append(
- ">={}:{}\n".format(pkg.cpv, pkg.slot)
+ f">={pkg.cpv}:{pkg.slot}\n"
)
else:
- p_mask_change_msg[root].append("=%s\n" % pkg.cpv)
+ p_mask_change_msg[root].append(f"={pkg.cpv}\n")
else:
- p_mask_change_msg[root].append("=%s\n" % pkg.cpv)
+ p_mask_change_msg[root].append(f"={pkg.cpv}\n")
use_changes_msg = {}
for (
@@ -10192,15 +10181,15 @@ class depgraph:
)
if is_latest:
use_changes_msg[root].append(
- ">={} {}\n".format(pkg.cpv, " ".join(adjustments))
+ f">={pkg.cpv} {' '.join(adjustments)}\n"
)
elif is_latest_in_slot:
use_changes_msg[root].append(
- ">={}:{} {}\n".format(pkg.cpv, pkg.slot, " ".join(adjustments))
+ f">={pkg.cpv}:{pkg.slot} {' '.join(adjustments)}\n"
)
else:
use_changes_msg[root].append(
- "={} {}\n".format(pkg.cpv, " ".join(adjustments))
+ f"={pkg.cpv} {' '.join(adjustments)}\n"
)
license_msg = {}
@@ -10218,16 +10207,15 @@ class depgraph:
license_msg[root].append(self._get_dep_chain_as_comment(pkg))
if is_latest:
license_msg[root].append(
- ">={} {}\n".format(pkg.cpv, " ".join(sorted(missing_licenses)))
+ f">={pkg.cpv} {' '.join(sorted(missing_licenses))}\n"
)
elif is_latest_in_slot:
license_msg[root].append(
- ">=%s:%s %s\n"
- % (pkg.cpv, pkg.slot, " ".join(sorted(missing_licenses)))
+ f">={pkg.cpv}:{pkg.slot} {' '.join(sorted(missing_licenses))}\n"
)
else:
license_msg[root].append(
- "={} {}\n".format(pkg.cpv, " ".join(sorted(missing_licenses)))
+ f"={pkg.cpv} {' '.join(sorted(missing_licenses))}\n"
)
def find_config_file(abs_user_config, file_name):
@@ -10348,7 +10336,7 @@ class depgraph:
)
if len(roots) > 1:
- writemsg("\nFor %s:\n" % abs_user_config, noiselevel=-1)
+ writemsg(f"\nFor {abs_user_config}:\n", noiselevel=-1)
def _writemsg(reason, file):
writemsg(
@@ -10403,7 +10391,7 @@ class depgraph:
file_contents = []
else:
problems.append(
- "!!! Failed to read '{}': {}\n".format(file_to_write_to, e)
+ f"!!! Failed to read '{file_to_write_to}': {e}\n"
)
if file_contents is not None:
file_contents.extend(changes)
@@ -10420,7 +10408,7 @@ class depgraph:
try:
write_atomic(file_to_write_to, "".join(file_contents))
except PortageException:
- problems.append("!!! Failed to write '%s'\n" % file_to_write_to)
+ problems.append(f"!!! Failed to write '{file_to_write_to}'\n")
if not quiet and (p_mask_change_msg or masked_by_missing_keywords):
msg = [
@@ -10504,7 +10492,7 @@ class depgraph:
]
writemsg("\n", noiselevel=-1)
for line in msg:
- writemsg(" {} {}\n".format(colorize("WARN", "*"), line), noiselevel=-1)
+ writemsg(f" {colorize('WARN', '*')} {line}\n", noiselevel=-1)
def display_problems(self):
"""
@@ -10548,7 +10536,7 @@ class depgraph:
for pset in depgraph_sets.sets.values():
for error_msg in pset.errors:
writemsg_level(
- "{}\n".format(error_msg), level=logging.ERROR, noiselevel=-1
+ f"{error_msg}\n", level=logging.ERROR, noiselevel=-1
)
# TODO: Add generic support for "set problem" handlers so that
@@ -10626,9 +10614,9 @@ class depgraph:
if refs:
problems_sets.update(refs)
refs.sort()
- ref_string = ", ".join(["'%s'" % name for name in refs])
+ ref_string = ", ".join([f"'{name}'" for name in refs])
ref_string = " pulled in by " + ref_string
- msg.append(" {}{}\n".format(colorize("INFORM", str(arg)), ref_string))
+ msg.append(f" {colorize('INFORM', str(arg))}{ref_string}\n")
msg.append("\n")
if "selected" in problems_sets or "world" in problems_sets:
msg.append(
@@ -10754,7 +10742,7 @@ class depgraph:
added_favorites.add(myfavkey)
except portage.exception.InvalidDependString as e:
writemsg(
- "\n\n!!! '{}' has invalid PROVIDE: {}\n".format(x.cpv, e),
+ f"\n\n!!! '{x.cpv}' has invalid PROVIDE: {e}\n",
noiselevel=-1,
)
writemsg(
@@ -10787,7 +10775,7 @@ class depgraph:
writemsg_stdout("\n", noiselevel=-1)
for a in all_added:
writemsg_stdout(
- " {} {}\n".format(colorize("GOOD", "*"), a), noiselevel=-1
+ f" {colorize('GOOD', '*')} {a}\n", noiselevel=-1
)
writemsg_stdout("\n", noiselevel=-1)
prompt = (
@@ -11272,7 +11260,7 @@ class _dep_check_composite_db(dbapi):
while sub_slots:
slot, sub_slot = sub_slots.pop()
- slot_atom = atom.with_slot("{}/{}".format(slot, sub_slot))
+ slot_atom = atom.with_slot(f"{slot}/{sub_slot}")
pkg, existing = self._depgraph._select_package(self._root, slot_atom)
if not pkg:
continue
@@ -11437,7 +11425,7 @@ def ambiguous_package_name(arg, atoms, root_config, spinner, myopts):
if "--quiet" in myopts:
writemsg(
- '!!! The short ebuild name "%s" is ambiguous. Please specify\n' % arg,
+ f'!!! The short ebuild name "{arg}" is ambiguous. Please specify\n',
noiselevel=-1,
)
writemsg(
@@ -11464,7 +11452,7 @@ def ambiguous_package_name(arg, atoms, root_config, spinner, myopts):
s.addCP(cp)
s.output()
writemsg(
- '!!! The short ebuild name "%s" is ambiguous. Please specify\n' % arg,
+ f'!!! The short ebuild name "{arg}" is ambiguous. Please specify\n',
noiselevel=-1,
)
writemsg(
@@ -11495,7 +11483,7 @@ def _spinner_start(spinner, myopts):
portage.writemsg_stdout(
"\n"
+ darkgreen(
- "These are the packages that " + "would be %s:" % action
+ "These are the packages that " + f"would be {action}:"
)
+ "\n\n"
)
@@ -11504,7 +11492,7 @@ def _spinner_start(spinner, myopts):
"\n"
+ darkgreen(
"These are the packages that "
- + "would be %s, in reverse order:" % action
+ + f"would be {action}, in reverse order:"
)
+ "\n\n"
)
@@ -11512,7 +11500,7 @@ def _spinner_start(spinner, myopts):
portage.writemsg_stdout(
"\n"
+ darkgreen(
- "These are the packages that " + "would be %s, in order:" % action
+ "These are the packages that " + f"would be {action}, in order:"
)
+ "\n\n"
)
@@ -11571,7 +11559,7 @@ def _backtrack_depgraph(settings, trees, myopts, myparams, myaction, myfiles, sp
if debug and mydepgraph is not None:
writemsg_level(
- "\n\nbacktracking try %s \n\n" % backtracked,
+ f"\n\nbacktracking try {backtracked} \n\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -11580,7 +11568,7 @@ def _backtrack_depgraph(settings, trees, myopts, myparams, myaction, myfiles, sp
backtrack_parameters = backtracker.get()
if debug and backtrack_parameters.runtime_pkg_mask:
writemsg_level(
- "\n\nruntime_pkg_mask: %s \n\n" % backtrack_parameters.runtime_pkg_mask,
+ f"\n\nruntime_pkg_mask: {backtrack_parameters.runtime_pkg_mask} \n\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -11613,7 +11601,7 @@ def _backtrack_depgraph(settings, trees, myopts, myparams, myaction, myfiles, sp
if debug:
writemsg_level(
- "\n\nbacktracking aborted after %s tries\n\n" % backtracked,
+ f"\n\nbacktracking aborted after {backtracked} tries\n\n",
noiselevel=-1,
level=logging.DEBUG,
)
@@ -11784,7 +11772,7 @@ def get_mask_info(
else:
eapi = metadata["EAPI"]
if not portage.eapi_is_supported(eapi):
- mreasons = ["EAPI %s" % eapi]
+ mreasons = [f"EAPI {eapi}"]
else:
pkg = Package(
type_name=pkg_type,
@@ -11914,12 +11902,12 @@ def _get_masking_status(pkg, pkgsettings, root_config, myrepo=None, use=None):
if not pkg.installed:
if not pkgsettings._accept_chost(pkg.cpv, pkg._metadata):
- mreasons.append(_MaskReason("CHOST", "CHOST: %s" % pkg._metadata["CHOST"]))
+ mreasons.append(_MaskReason("CHOST", f"CHOST: {pkg._metadata['CHOST']}"))
if pkg.invalid:
for msgs in pkg.invalid.values():
for msg in msgs:
- mreasons.append(_MaskReason("invalid", "invalid: {}".format(msg)))
+ mreasons.append(_MaskReason("invalid", f"invalid: {msg}"))
if not pkg._metadata["SLOT"]:
mreasons.append(_MaskReason("invalid", "SLOT: undefined"))
diff --git a/lib/_emerge/emergelog.py b/lib/_emerge/emergelog.py
index f82abdbad..50c1fddf0 100644
--- a/lib/_emerge/emergelog.py
+++ b/lib/_emerge/emergelog.py
@@ -46,10 +46,10 @@ def emergelog(xterm_titles, mystr, short_msg=None):
)
mylock = portage.locks.lockfile(file_path)
try:
- mylogfile.write("{:.0f}: {}\n".format(time.time(), mystr))
+ mylogfile.write(f"{time.time():.0f}: {mystr}\n")
mylogfile.close()
finally:
portage.locks.unlockfile(mylock)
except (OSError, portage.exception.PortageException) as e:
if secpass >= 1:
- portage.util.writemsg("emergelog(): {}\n".format(e), noiselevel=-1)
+ portage.util.writemsg(f"emergelog(): {e}\n", noiselevel=-1)
diff --git a/lib/_emerge/is_valid_package_atom.py b/lib/_emerge/is_valid_package_atom.py
index d05d35e17..0db8275c7 100644
--- a/lib/_emerge/is_valid_package_atom.py
+++ b/lib/_emerge/is_valid_package_atom.py
@@ -9,7 +9,7 @@ def insert_category_into_atom(atom, category):
# Handle '*' character for "extended syntax" wildcard support.
alphanum = re.search(r"[\*\w]", atom, re.UNICODE)
if alphanum:
- ret = atom[: alphanum.start()] + "%s/" % category + atom[alphanum.start() :]
+ ret = atom[: alphanum.start()] + f"{category}/" + atom[alphanum.start() :]
else:
ret = None
return ret
diff --git a/lib/_emerge/post_emerge.py b/lib/_emerge/post_emerge.py
index 019167251..922d07f17 100644
--- a/lib/_emerge/post_emerge.py
+++ b/lib/_emerge/post_emerge.py
@@ -104,7 +104,7 @@ def post_emerge(myaction, myopts, myfiles, target_root, trees, mtimedb, retval):
if retval == os.EX_OK:
exit_msg = " *** exiting successfully."
else:
- exit_msg = " *** exiting unsuccessfully with status '%s'." % retval
+ exit_msg = f" *** exiting unsuccessfully with status '{retval}'."
emergelog("notitles" not in settings.features, exit_msg)
_flush_elog_mod_echo()
@@ -161,11 +161,7 @@ def post_emerge(myaction, myopts, myfiles, target_root, trees, mtimedb, retval):
hook_retval = portage.process.spawn([postemerge], env=settings.environ())
if hook_retval != os.EX_OK:
portage.util.writemsg_level(
- " %s spawn failed of %s\n"
- % (
- colorize("BAD", "*"),
- postemerge,
- ),
+ f" {colorize('BAD', '*')} spawn failed of {postemerge}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/_emerge/resolver/circular_dependency.py b/lib/_emerge/resolver/circular_dependency.py
index 1f5242346..db599a94b 100644
--- a/lib/_emerge/resolver/circular_dependency.py
+++ b/lib/_emerge/resolver/circular_dependency.py
@@ -90,14 +90,10 @@ class circular_dependency_handler:
if pos > 0:
msg.append(
indent
- + "%s (%s)"
- % (
- pkg,
- priorities[-1],
- )
+ + f"{pkg} ({priorities[-1]})"
)
else:
- msg.append(indent + "%s depends on" % pkg)
+ msg.append(indent + f"{pkg} depends on")
indent += " "
pkg = self.shortest_cycle[0]
@@ -105,11 +101,7 @@ class circular_dependency_handler:
priorities = self.graph.nodes[parent][0][pkg]
msg.append(
indent
- + "%s (%s)"
- % (
- pkg,
- priorities[-1],
- )
+ + f"{pkg} ({priorities[-1]})"
)
return "\n".join(msg)
@@ -292,7 +284,7 @@ class circular_dependency_handler:
changes.append(colorize("red", "+" + flag))
else:
changes.append(colorize("blue", "-" + flag))
- msg = "- {} (Change USE: {})\n".format(parent.cpv, " ".join(changes))
+ msg = f"- {parent.cpv} (Change USE: {' '.join(changes)})\n"
if followup_change:
msg += (
" (This change might require USE changes on parent packages.)"
diff --git a/lib/_emerge/resolver/output.py b/lib/_emerge/resolver/output.py
index 30fb8ce43..24517e548 100644
--- a/lib/_emerge/resolver/output.py
+++ b/lib/_emerge/resolver/output.py
@@ -85,10 +85,10 @@ class Display:
"""
if blocker.satisfied:
self.blocker_style = "PKG_BLOCKER_SATISFIED"
- addl = "{} ".format(colorize(self.blocker_style, "b"))
+ addl = f"{colorize(self.blocker_style, 'b')} "
else:
self.blocker_style = "PKG_BLOCKER"
- addl = "{} ".format(colorize(self.blocker_style, "B"))
+ addl = f"{colorize(self.blocker_style, 'B')} "
addl += self.empty_space_in_brackets()
self.resolved = dep_expand(
str(blocker.atom).lstrip("!"), mydb=self.vardb, settings=self.pkgsettings
@@ -117,7 +117,7 @@ class Display:
)
else:
addl += colorize(
- self.blocker_style, " (is {} {})".format(blocking_desc, block_parents)
+ self.blocker_style, f" (is {blocking_desc} {block_parents})"
)
if blocker.satisfied:
if not self.conf.columns:
@@ -444,7 +444,7 @@ class Display:
@rtype string
"""
if pkg.type_name == "binary" and pkg.cpv.build_id is not None:
- pkg_str += "-%s" % pkg.cpv.build_id
+ pkg_str += f"-{pkg.cpv.build_id}"
return pkg_str
def _set_non_root_columns(self, pkg, pkg_info):
@@ -576,30 +576,30 @@ class Display:
"""
for msg in self.print_msg:
if isinstance(msg, str):
- writemsg_stdout("{}\n".format(msg), noiselevel=-1)
+ writemsg_stdout(f"{msg}\n", noiselevel=-1)
continue
myprint, self.verboseadd, repoadd = msg
if self.verboseadd:
myprint += " " + self.verboseadd
if show_repos and repoadd:
- myprint += " " + teal("[%s]" % repoadd)
- writemsg_stdout("{}\n".format(myprint), noiselevel=-1)
+ myprint += " " + teal(f"[{repoadd}]")
+ writemsg_stdout(f"{myprint}\n", noiselevel=-1)
def print_blockers(self):
"""Performs the actual output printing of the pre-formatted
blocker messages
"""
for pkg in self.blockers:
- writemsg_stdout("{}\n".format(pkg), noiselevel=-1)
+ writemsg_stdout(f"{pkg}\n", noiselevel=-1)
def print_verbose(self, show_repos):
"""Prints the verbose output to std_out
@param show_repos: bool.
"""
- writemsg_stdout("\n{}\n".format(self.counters), noiselevel=-1)
+ writemsg_stdout(f"\n{self.counters}\n", noiselevel=-1)
if show_repos:
- writemsg_stdout("{}".format(self.conf.repo_display), noiselevel=-1)
+ writemsg_stdout(f"{self.conf.repo_display}", noiselevel=-1)
def get_display_list(self, mylist):
"""Determines the display list to process
@@ -662,7 +662,7 @@ class Display:
pkg.cpv, myrepo=pkg_info.repo_name
)
if pkg_info.ebuild_path is None:
- raise AssertionError("ebuild not found for '%s'" % pkg.cpv)
+ raise AssertionError(f"ebuild not found for '{pkg.cpv}'")
pkg_info.repo_path_real = os.path.dirname(
os.path.dirname(os.path.dirname(pkg_info.ebuild_path))
)
@@ -925,7 +925,7 @@ class Display:
self.print_verbose(show_repos)
for pkg, pkg_info in self.restrict_fetch_list.items():
writemsg_stdout(
- "\nFetch instructions for {}:\n".format(pkg.cpv), noiselevel=-1
+ f"\nFetch instructions for {pkg.cpv}:\n", noiselevel=-1
)
spawn_nofetch(
self.conf.trees[pkg.root]["porttree"].dbapi, pkg_info.ebuild_path
@@ -951,7 +951,7 @@ def format_unmatched_atom(pkg, atom, pkg_use_enabled):
# 5. USE
if atom.soname:
- return "{}".format(atom), ""
+ return f"{atom}", ""
highlight = set()
@@ -1015,7 +1015,7 @@ def format_unmatched_atom(pkg, atom, pkg_use_enabled):
highlight_use = set()
if atom.use:
- use_atom = "{}[{}]".format(atom.cp, str(atom.use))
+ use_atom = f"{atom.cp}[{str(atom.use)}]"
use_atom_set = InternalPackageSet(initial_atoms=(use_atom,))
if not use_atom_set.findAtomForPackage(pkg, modified_use=pkg_use_enabled(pkg)):
missing_iuse = pkg.iuse.get_missing_iuse(atom.unevaluated_atom.use.required)
diff --git a/lib/_emerge/resolver/output_helpers.py b/lib/_emerge/resolver/output_helpers.py
index 366de2429..f949eba8f 100644
--- a/lib/_emerge/resolver/output_helpers.py
+++ b/lib/_emerge/resolver/output_helpers.py
@@ -75,7 +75,7 @@ class _RepoDisplay:
show_repo_paths[repo_index] = repo_path
if show_repo_paths:
for index, repo_path in enumerate(show_repo_paths):
- output.append(" " + teal("[" + str(index) + "]") + " %s\n" % repo_path)
+ output.append(" " + teal("[" + str(index) + "]") + f" {repo_path}\n")
if unknown_repo:
output.append(
" "
@@ -107,47 +107,47 @@ class _PackageCounters:
)
myoutput = []
details = []
- myoutput.append("Total: %s package" % total_installs)
+ myoutput.append(f"Total: {total_installs} package")
if total_installs != 1:
myoutput.append("s")
if total_installs != 0:
myoutput.append(" (")
if self.upgrades > 0:
- details.append("%s upgrade" % self.upgrades)
+ details.append(f"{self.upgrades} upgrade")
if self.upgrades > 1:
details[-1] += "s"
if self.downgrades > 0:
- details.append("%s downgrade" % self.downgrades)
+ details.append(f"{self.downgrades} downgrade")
if self.downgrades > 1:
details[-1] += "s"
if self.new > 0:
- details.append("%s new" % self.new)
+ details.append(f"{self.new} new")
if self.newslot > 0:
- details.append("%s in new slot" % self.newslot)
+ details.append(f"{self.newslot} in new slot")
if self.newslot > 1:
details[-1] += "s"
if self.reinst > 0:
- details.append("%s reinstall" % self.reinst)
+ details.append(f"{self.reinst} reinstall")
if self.reinst > 1:
details[-1] += "s"
if self.binary > 0:
- details.append("%s binary" % self.binary)
+ details.append(f"{self.binary} binary")
if self.binary > 1:
details[-1] = details[-1][:-1] + "ies"
if self.uninst > 0:
- details.append("%s uninstall" % self.uninst)
+ details.append(f"{self.uninst} uninstall")
if self.uninst > 1:
details[-1] += "s"
if self.interactive > 0:
details.append(
- "{} {}".format(self.interactive, colorize("WARN", "interactive"))
+ f"{self.interactive} {colorize('WARN', 'interactive')}"
)
myoutput.append(", ".join(details))
if total_installs != 0:
myoutput.append(")")
- myoutput.append(", Size of downloads: %s" % localized_size(self.totalsize))
+ myoutput.append(f", Size of downloads: {localized_size(self.totalsize)}")
if self.restrict_fetch:
- myoutput.append("\nFetch Restriction: %s package" % self.restrict_fetch)
+ myoutput.append(f"\nFetch Restriction: {self.restrict_fetch} package")
if self.restrict_fetch > 1:
myoutput.append("s")
if self.restrict_fetch_satisfied < self.restrict_fetch:
@@ -156,7 +156,7 @@ class _PackageCounters:
% (self.restrict_fetch - self.restrict_fetch_satisfied)
)
if self.blocks > 0:
- myoutput.append("\nConflict: %s block" % self.blocks)
+ myoutput.append(f"\nConflict: {self.blocks} block")
if self.blocks > 1:
myoutput.append("s")
if self.blocks_satisfied < self.blocks:
@@ -205,7 +205,7 @@ class _DisplayConfig:
try:
mywidth = int(frozen_config.settings["COLUMNWIDTH"])
except ValueError as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(
"!!! Unable to parse COLUMNWIDTH='%s'\n"
% frozen_config.settings["COLUMNWIDTH"],
@@ -339,7 +339,7 @@ def _create_use_string(
else:
ret = " ".join(enabled + disabled + removed)
if ret:
- ret = '{}="{}" '.format(name, ret)
+ ret = f'{name}="{ret}" '
return ret
diff --git a/lib/_emerge/resolver/slot_collision.py b/lib/_emerge/resolver/slot_collision.py
index 88c28464b..aad97014a 100644
--- a/lib/_emerge/resolver/slot_collision.py
+++ b/lib/_emerge/resolver/slot_collision.py
@@ -146,7 +146,7 @@ class slot_conflict_handler:
if self.debug:
writemsg("\nNew configuration:\n", noiselevel=-1)
for pkg in config:
- writemsg(" {}\n".format(pkg), noiselevel=-1)
+ writemsg(f" {pkg}\n", noiselevel=-1)
writemsg("\n", noiselevel=-1)
new_solutions = self._check_configuration(
@@ -258,9 +258,9 @@ class slot_conflict_handler:
)
for root, slot_atom, pkgs in self.all_conflicts:
- msg.append("{}".format(slot_atom))
+ msg.append(f"{slot_atom}")
if root != self.depgraph._frozen_config._running_root.root:
- msg.append(" for {}".format(root))
+ msg.append(f" for {root}")
msg.append("\n\n")
for pkg in pkgs:
@@ -523,7 +523,7 @@ class slot_conflict_handler:
def highlight_violations(atom, version, use, slot_violated):
"""Colorize parts of an atom"""
- atom_str = "{}".format(atom)
+ atom_str = f"{atom}"
colored_idx = set()
if version:
op = atom.operator
@@ -614,7 +614,7 @@ class slot_conflict_handler:
atom_str = (
atom_str[:use_part_start]
- + "[{}]".format(",".join(new_tokens))
+ + f"[{','.join(new_tokens)}]"
+ atom_str[use_part_end + 1 :]
)
@@ -639,17 +639,15 @@ class slot_conflict_handler:
use_display = ""
if atom.soname:
msg.append(
- "{} required by {} {}\n".format(
- atom, parent, use_display
- )
+ f"{atom} required by {parent} {use_display}\n"
)
elif isinstance(parent, PackageArg):
# For PackageArg it's
# redundant to display the atom attribute.
- msg.append("{}\n".format(parent))
+ msg.append(f"{parent}\n")
elif isinstance(parent, AtomArg):
msg.append(2 * indent)
- msg.append("{} (Argument)\n".format(atom))
+ msg.append(f"{atom} (Argument)\n")
else:
# Display the specific atom from SetArg or
# Package types.
@@ -738,7 +736,7 @@ class slot_conflict_handler:
)
msg.append("!!! package(s) cannot be rebuilt for the reason(s) shown:\n\n")
for ppkg, reason in need_rebuild.items():
- msg.append("{}{}: {}\n".format(indent, ppkg, reason))
+ msg.append(f"{indent}{ppkg}: {reason}\n")
msg.append("\n")
msg.append("\n")
@@ -786,7 +784,7 @@ class slot_conflict_handler:
indent
+ "- "
+ pkg.cpv
- + " (Change USE: %s" % " ".join(changes)
+ + f" (Change USE: {' '.join(changes)}"
+ ")\n"
)
mymsg += "\n"
@@ -977,7 +975,7 @@ class slot_conflict_handler:
if self.debug:
writemsg("All involved flags:\n", noiselevel=-1)
for idx, involved_flags in enumerate(all_involved_flags):
- writemsg(" {}\n".format(config[idx]), noiselevel=-1)
+ writemsg(f" {config[idx]}\n", noiselevel=-1)
for flag, state in involved_flags.items():
writemsg(" " + flag + ": " + state + "\n", noiselevel=-1)
@@ -1068,7 +1066,7 @@ class slot_conflict_handler:
inner_first = False
else:
msg += ", "
- msg += flag + ": {}".format(state)
+ msg += flag + f": {state}"
msg += "}"
msg += "]\n"
writemsg(msg, noiselevel=-1)
@@ -1283,7 +1281,7 @@ class _solution_candidate_generator:
return self.value == other.value
def __str__(self):
- return "{}".format(self.value)
+ return f"{self.value}"
def __init__(self, all_involved_flags):
# A copy of all_involved_flags with all "cond" values
diff --git a/lib/_emerge/search.py b/lib/_emerge/search.py
index a20d432ad..07b28ba94 100644
--- a/lib/_emerge/search.py
+++ b/lib/_emerge/search.py
@@ -107,7 +107,7 @@ class search:
def _aux_get_error(self, cpv):
portage.writemsg(
- "emerge: search: " "aux_get('%s') failed, skipping\n" % cpv, noiselevel=-1
+ f"emerge: search: aux_get('{cpv}') failed, skipping\n", noiselevel=-1
)
def _findname(self, *args, **kwargs):
@@ -475,7 +475,7 @@ class search:
try:
uri_map = _parse_uri_map(mycpv, metadata, use=pkg.use.enabled)
except portage.exception.InvalidDependString as e:
- file_size_str = "Unknown ({})".format(e)
+ file_size_str = f"Unknown ({e})"
del e
else:
try:
@@ -483,9 +483,7 @@ class search:
except KeyError as e:
file_size_str = (
"Unknown (missing "
- + "digest for {})".format(
- e,
- )
+ + f"digest for {e})"
)
del e
@@ -514,12 +512,11 @@ class search:
% (darkgreen("Latest version available:"), myversion)
)
msg.append(
- " %s\n" % self.getInstallationStatus(mycat + "/" + mypkg)
+ f" {self.getInstallationStatus(mycat + '/' + mypkg)}\n"
)
if myebuild:
msg.append(
- " %s %s\n"
- % (darkgreen("Size of files:"), file_size_str)
+ f" {darkgreen('Size of files:')} {file_size_str}\n"
)
msg.append(
" " + darkgreen("Homepage:") + " " + homepage + "\n"
diff --git a/lib/_emerge/show_invalid_depstring_notice.py b/lib/_emerge/show_invalid_depstring_notice.py
index 0ae788a93..c3c2dc8c7 100644
--- a/lib/_emerge/show_invalid_depstring_notice.py
+++ b/lib/_emerge/show_invalid_depstring_notice.py
@@ -12,7 +12,7 @@ def show_invalid_depstring_notice(parent_node, error_msg):
msg1 = (
"\n\n!!! Invalid or corrupt dependency specification: "
- + "\n\n{}\n\n{}\n\n".format(error_msg, parent_node)
+ + f"\n\n{error_msg}\n\n{parent_node}\n\n"
)
p_key = parent_node.cpv
p_status = parent_node.operation
@@ -23,17 +23,17 @@ def show_invalid_depstring_notice(parent_node, error_msg):
parent_node.root_config.settings["EROOT"], portage.VDB_PATH, category, pf
)
msg.append("Portage is unable to process the dependencies of the ")
- msg.append("'%s' package. " % p_key)
+ msg.append(f"'{p_key}' package. ")
msg.append("In order to correct this problem, the package ")
msg.append("should be uninstalled, reinstalled, or upgraded. ")
msg.append("As a temporary workaround, the --nodeps option can ")
msg.append("be used to ignore all dependencies. For reference, ")
msg.append("the problematic dependencies can be found in the ")
- msg.append("*DEPEND files located in '%s/'." % pkg_location)
+ msg.append(f"*DEPEND files located in '{pkg_location}/'.")
else:
msg.append("This package can not be installed. ")
- msg.append("Please notify the '%s' package maintainer " % p_key)
+ msg.append(f"Please notify the '{p_key}' package maintainer ")
msg.append("about this problem.")
- msg2 = "".join("%s\n" % line for line in textwrap.wrap("".join(msg), 72))
+ msg2 = "".join(f"{line}\n" for line in textwrap.wrap("".join(msg), 72))
writemsg_level(msg1 + msg2, level=logging.ERROR, noiselevel=-1)
diff --git a/lib/_emerge/unmerge.py b/lib/_emerge/unmerge.py
index 5466e8346..b49a8e8b6 100644
--- a/lib/_emerge/unmerge.py
+++ b/lib/_emerge/unmerge.py
@@ -104,7 +104,7 @@ def _unmerge_display(
if unmerge_action in ["rage-clean", "unmerge"]:
print()
print(
- bold("emerge %s" % unmerge_action)
+ bold(f"emerge {unmerge_action}")
+ " can only be used with specific package names"
)
print()
@@ -120,7 +120,7 @@ def _unmerge_display(
else:
# we've got command-line arguments
if not unmerge_files:
- print("\nNo packages to %s have been provided.\n" % unmerge_action)
+ print(f"\nNo packages to {unmerge_action} have been provided.\n")
return 1, {}
for x in unmerge_files:
arg_parts = x.split("/")
@@ -187,7 +187,7 @@ def _unmerge_display(
writemsg_level(
darkgreen(
newline
- + ">>> Using system located in ROOT tree %s\n" % settings["ROOT"]
+ + f">>> Using system located in ROOT tree {settings['ROOT']}\n"
)
)
@@ -445,11 +445,11 @@ def _unmerge_display(
parents.append(s)
break
if parents:
- print(colorize("WARN", "Package %s is going to be unmerged," % cpv))
+ print(colorize("WARN", f"Package {cpv} is going to be unmerged,"))
print(
colorize("WARN", "but still listed in the following package sets:")
)
- print(" %s\n" % ", ".join(parents))
+ print(f" {', '.join(parents)}\n")
del installed_sets
@@ -506,13 +506,13 @@ def _unmerge_display(
if not (pkgmap[x]["protected"] or pkgmap[x]["omitted"]) and cp in syslist:
virt_cp = sys_virt_map.get(cp)
if virt_cp is None:
- cp_info = "'{}'".format(cp)
+ cp_info = f"'{cp}'"
else:
- cp_info = "'{}' ({})".format(cp, virt_cp)
+ cp_info = f"'{cp}' ({virt_cp})"
writemsg_level(
colorize(
"BAD",
- "\n\n!!! " + "{} is part of your system profile.\n".format(cp_info),
+ "\n\n!!! " + f"{cp_info} is part of your system profile.\n",
),
level=logging.WARNING,
noiselevel=-1,
@@ -525,7 +525,7 @@ def _unmerge_display(
noiselevel=-1,
)
if not quiet:
- writemsg_level("\n {}\n".format(bold(cp)), noiselevel=-1)
+ writemsg_level(f"\n {bold(cp)}\n", noiselevel=-1)
else:
writemsg_level(bold(cp) + ": ", noiselevel=-1)
for mytype in ["selected", "protected", "omitted"]:
diff --git a/lib/portage/_compat_upgrade/binpkg_compression.py b/lib/portage/_compat_upgrade/binpkg_compression.py
index 6cb117e4e..d643fcaf3 100644
--- a/lib/portage/_compat_upgrade/binpkg_compression.py
+++ b/lib/portage/_compat_upgrade/binpkg_compression.py
@@ -34,7 +34,7 @@ def main():
)
with open(config_path) as f:
content = f.read()
- compat_setting = 'BINPKG_COMPRESS="{}"'.format(COMPAT_BINPKG_COMPRESS)
+ compat_setting = f'BINPKG_COMPRESS="{COMPAT_BINPKG_COMPRESS}"'
portage.output.EOutput().einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
diff --git a/lib/portage/_compat_upgrade/binpkg_multi_instance.py b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
index a99e4dc54..a5a412666 100644
--- a/lib/portage/_compat_upgrade/binpkg_multi_instance.py
+++ b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
@@ -24,15 +24,13 @@ def main():
"""
if "binpkg-multi-instance" not in portage.settings.features:
portage.output.EOutput().einfo(
- "Setting make.globals default {} for backward compatibility".format(
- COMPAT_FEATURES
- )
+ f"Setting make.globals default {COMPAT_FEATURES} for backward compatibility"
)
config_path = os.path.join(
os.environ["ED"], GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals"
)
with open(config_path, "a") as f:
- f.write("{}\n".format(COMPAT_FEATURES))
+ f.write(f"{COMPAT_FEATURES}\n")
if __name__ == "__main__":
diff --git a/lib/portage/_compat_upgrade/default_locations.py b/lib/portage/_compat_upgrade/default_locations.py
index d09b5858b..730a0682a 100644
--- a/lib/portage/_compat_upgrade/default_locations.py
+++ b/lib/portage/_compat_upgrade/default_locations.py
@@ -68,7 +68,7 @@ def main():
with open(config_path) as f:
content = f.read()
if do_distdir:
- compat_setting = 'DISTDIR="{}"'.format(compat_distdir)
+ compat_setting = f'DISTDIR="{compat_distdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -78,7 +78,7 @@ def main():
"^DISTDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
if do_pkgdir:
- compat_setting = 'PKGDIR="{}"'.format(compat_pkgdir)
+ compat_setting = f'PKGDIR="{compat_pkgdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -88,7 +88,7 @@ def main():
"^PKGDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
if do_rpmdir:
- compat_setting = 'RPMDIR="{}"'.format(compat_rpmdir)
+ compat_setting = f'RPMDIR="{compat_rpmdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -106,7 +106,7 @@ def main():
)
with open(config_path) as f:
content = f.read()
- compat_setting = "location = {}".format(compat_main_repo)
+ compat_setting = f"location = {compat_main_repo}"
out.einfo(
"Setting repos.conf default {} for backward compatibility".format(
compat_setting
diff --git a/lib/portage/_emirrordist/Config.py b/lib/portage/_emirrordist/Config.py
index 5a67c5c18..eef859477 100644
--- a/lib/portage/_emirrordist/Config.py
+++ b/lib/portage/_emirrordist/Config.py
@@ -94,7 +94,7 @@ class Config:
line_format = "%s: %%s" % log_desc
add_newline = False
if log_path is not None:
- logger.warning("dry-run: %s log redirected to logging.info" % log_desc)
+ logger.warning(f"dry-run: {log_desc} log redirected to logging.info")
else:
self._open_files.append(open(log_path, mode=mode, encoding="utf_8"))
line_format = "%s\n"
@@ -136,7 +136,7 @@ class Config:
db = dbshelve.open(db_file, flags=open_flag)
if dry_run:
- logger.warning("dry-run: %s db opened in readonly mode" % db_desc)
+ logger.warning(f"dry-run: {db_desc} db opened in readonly mode")
if not isinstance(db, dict):
volatile_db = {k: db[k] for k in db}
db.close()
diff --git a/lib/portage/_emirrordist/ContentDB.py b/lib/portage/_emirrordist/ContentDB.py
index ac6140257..c9e5f3e7b 100644
--- a/lib/portage/_emirrordist/ContentDB.py
+++ b/lib/portage/_emirrordist/ContentDB.py
@@ -38,10 +38,10 @@ class ContentDB:
@param filename: file name with digests attribute
"""
distfile_str = str(filename)
- distfile_key = "filename:{}".format(distfile_str)
+ distfile_key = f"filename:{distfile_str}"
for k, v in filename.digests.items():
if k != "size":
- digest_key = "digest:{}:{}".format(k.upper(), v.lower())
+ digest_key = f"digest:{k.upper()}:{v.lower()}"
try:
digest_files = self._shelve[digest_key]
except KeyError:
@@ -77,7 +77,7 @@ class ContentDB:
@param filename: file name with digests attribute
"""
- distfile_key = "filename:{}".format(filename)
+ distfile_key = f"filename:{filename}"
try:
content_revisions = self._shelve[distfile_key]
except KeyError:
@@ -92,7 +92,7 @@ class ContentDB:
remaining.add(revision_key)
continue
for k, v in revision_key:
- digest_key = "digest:{}:{}".format(k, v)
+ digest_key = f"digest:{k}:{v}"
try:
digest_files = self._shelve[digest_key]
except KeyError:
@@ -153,7 +153,7 @@ class ContentDB:
for distfile_str in digest_files:
matched_revisions.setdefault(distfile_str, set())
try:
- content_revisions = self._shelve["filename:{}".format(distfile_str)]
+ content_revisions = self._shelve[f"filename:{distfile_str}"]
except KeyError:
pass
else:
diff --git a/lib/portage/_emirrordist/DeletionIterator.py b/lib/portage/_emirrordist/DeletionIterator.py
index 636a34a81..4921e226e 100644
--- a/lib/portage/_emirrordist/DeletionIterator.py
+++ b/lib/portage/_emirrordist/DeletionIterator.py
@@ -103,7 +103,7 @@ class DeletionIterator:
deletion_entry = deletion_db.get(filename)
if deletion_entry is None:
- logger.debug("add '%s' to deletion db" % filename)
+ logger.debug(f"add '{filename}' to deletion db")
deletion_db[filename] = start_time
elif deletion_entry + deletion_delay <= start_time:
@@ -123,4 +123,4 @@ class DeletionIterator:
except KeyError:
pass
else:
- logger.debug("drop '%s' from deletion db" % filename)
+ logger.debug(f"drop '{filename}' from deletion db")
diff --git a/lib/portage/_emirrordist/DeletionTask.py b/lib/portage/_emirrordist/DeletionTask.py
index 65ba0462d..88015ea4b 100644
--- a/lib/portage/_emirrordist/DeletionTask.py
+++ b/lib/portage/_emirrordist/DeletionTask.py
@@ -60,7 +60,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "{} unlink failed in distfiles: {}".format(self.distfile, e)
+ f"{self.distfile} unlink failed in distfiles: {e}"
)
success = False
@@ -86,7 +86,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "{} unlink failed in distfiles: {}".format(self.distfile, e)
+ f"{self.distfile} unlink failed in distfiles: {e}"
)
success = False
@@ -119,7 +119,7 @@ class DeletionTask(CompositeTask):
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
logger.error(
- "{} unlink failed in distfiles: {}".format(self.distfile, e)
+ f"{self.distfile} unlink failed in distfiles: {e}"
)
success = False
@@ -136,7 +136,7 @@ class DeletionTask(CompositeTask):
cpv = self.config.distfiles_db.get(self.distfile, cpv)
self.config.delete_count += 1
- self.config.log_success("{}\t{}\tremoved".format(cpv, self.distfile))
+ self.config.log_success(f"{cpv}\t{self.distfile}\tremoved")
if self.config.distfiles_db is not None:
try:
diff --git a/lib/portage/_emirrordist/FetchIterator.py b/lib/portage/_emirrordist/FetchIterator.py
index de90d23bd..2d7945b0f 100644
--- a/lib/portage/_emirrordist/FetchIterator.py
+++ b/lib/portage/_emirrordist/FetchIterator.py
@@ -162,7 +162,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
(restrict,) = aux_get_result.result()
except (PortageKeyError, PortageException) as e:
- config.log_failure("{}\t\taux_get exception {}".format(cpv, e))
+ config.log_failure(f"{cpv}\t\taux_get exception {e}")
result.set_result(fetch_tasks)
return
@@ -172,14 +172,14 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
restrict = frozenset(use_reduce(restrict, flat=True, matchnone=True))
except PortageException as e:
- config.log_failure("{}\t\tuse_reduce exception {}".format(cpv, e))
+ config.log_failure(f"{cpv}\t\tuse_reduce exception {e}")
result.set_result(fetch_tasks)
return
try:
uri_map = fetch_map_result.result()
except PortageException as e:
- config.log_failure("{}\t\tgetFetchMap exception {}".format(cpv, e))
+ config.log_failure(f"{cpv}\t\tgetFetchMap exception {e}")
result.set_result(fetch_tasks)
return
@@ -238,7 +238,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
digests_future.done() or digests_future.set_exception(e)
for filename in new_uri_map:
config.log_failure(
- "{}\t{}\tManifest exception {}".format(cpv, filename, e)
+ f"{cpv}\t{filename}\tManifest exception {e}"
)
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
@@ -248,7 +248,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
if not digests:
for filename in new_uri_map:
- config.log_failure("{}\t{}\tdigest entry missing".format(cpv, filename))
+ config.log_failure(f"{cpv}\t{filename}\tdigest entry missing")
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
return
@@ -256,7 +256,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
for filename, uri_tuple in new_uri_map.items():
file_digests = digests.get(filename)
if file_digests is None:
- config.log_failure("{}\t{}\tdigest entry missing".format(cpv, filename))
+ config.log_failure(f"{cpv}\t{filename}\tdigest entry missing")
config.file_failures[filename] = cpv
continue
if filename in config.file_owners:
diff --git a/lib/portage/_emirrordist/FetchTask.py b/lib/portage/_emirrordist/FetchTask.py
index a85411b6d..1521a25be 100644
--- a/lib/portage/_emirrordist/FetchTask.py
+++ b/lib/portage/_emirrordist/FetchTask.py
@@ -70,11 +70,11 @@ class FetchTask(CompositeTask):
self.config.content_db.add(self.distfile)
if not self._have_needed_digests():
- msg = "incomplete digests: %s" % " ".join(self.digests)
+ msg = f"incomplete digests: {' '.join(self.digests)}"
self.scheduler.output(
msg, background=self.background, log_path=self._log_path
)
- self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self._async_wait()
@@ -89,9 +89,7 @@ class FetchTask(CompositeTask):
st = os.stat(distfile_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "{} stat failed in {}: {}".format(
- self.distfile, "distfiles", e
- )
+ msg = f"{self.distfile} stat failed in distfiles: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -177,7 +175,7 @@ class FetchTask(CompositeTask):
if self.config.options.dry_run:
if os.path.exists(recycle_file):
logger.info(
- "dry-run: delete '{}' from recycle".format(self.distfile)
+ f"dry-run: delete '{self.distfile}' from recycle"
)
else:
try:
@@ -185,7 +183,7 @@ class FetchTask(CompositeTask):
except OSError:
pass
else:
- logger.debug("delete '{}' from recycle".format(self.distfile))
+ logger.debug(f"delete '{self.distfile}' from recycle")
def _distfiles_digester_exit(self, digester):
@@ -199,10 +197,10 @@ class FetchTask(CompositeTask):
# is a bad situation which normally does not occur, so
# skip this file and report it, in order to draw attention
# from the administrator.
- msg = "{} distfiles digester failed unexpectedly".format(self.distfile)
+ msg = f"{self.distfile} distfiles digester failed unexpectedly"
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
logger.error(msg)
- self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.wait()
return
@@ -275,7 +273,7 @@ class FetchTask(CompositeTask):
else:
msg = "no fetchable uris"
- self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self.wait()
@@ -314,9 +312,7 @@ class FetchTask(CompositeTask):
st = os.stat(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "{} stat failed in {}: {}".format(
- self.distfile, mirror_info.name, e
- )
+ msg = f"{self.distfile} stat failed in {mirror_info.name}: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -348,10 +344,7 @@ class FetchTask(CompositeTask):
current_mirror = self._current_mirror
if digester.returncode != os.EX_OK:
- msg = "{} {} digester failed unexpectedly".format(
- self.distfile,
- current_mirror.name,
- )
+ msg = f"{self.distfile} {current_mirror.name} digester failed unexpectedly"
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
logger.error(msg)
else:
@@ -393,7 +386,7 @@ class FetchTask(CompositeTask):
self.config.layouts[0].get_path(self.distfile),
)
if self._hardlink_atomic(
- src, dest, "{} to {}".format(current_mirror.name, "distfiles")
+ src, dest, f"{current_mirror.name} to distfiles"
):
logger.debug(
"hardlink '%s' from %s to distfiles"
@@ -436,9 +429,7 @@ class FetchTask(CompositeTask):
else:
logger.debug(
- "copy '{}' from {} to distfiles".format(
- self.distfile, current_mirror.name
- )
+ f"copy '{self.distfile}' from {current_mirror.name} to distfiles"
)
# Apply the timestamp from the source file, but
@@ -470,7 +461,7 @@ class FetchTask(CompositeTask):
if self.config.options.dry_run:
# Simply report success.
- logger.info("dry-run: fetch '{}' from '{}'".format(self.distfile, uri))
+ logger.info(f"dry-run: fetch '{self.distfile}' from '{uri}'")
self._success()
self.returncode = os.EX_OK
self._async_wait()
@@ -483,7 +474,7 @@ class FetchTask(CompositeTask):
self._fetch_tmp_dir_info = "distfiles"
distdir = self.config.options.distfiles
- tmp_basename = self.distfile + "._emirrordist_fetch_.%s" % portage.getpid()
+ tmp_basename = self.distfile + f"._emirrordist_fetch_.{portage.getpid()}"
variables = {"DISTDIR": distdir, "URI": uri, "FILE": tmp_basename}
@@ -619,7 +610,7 @@ class FetchTask(CompositeTask):
)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
logger.error(msg)
- self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
self.wait()
@@ -644,7 +635,7 @@ class FetchTask(CompositeTask):
if not self._hardlink_atomic(
src_path,
link_path,
- "{} -> {}".format(link_path, src_path),
+ f"{link_path} -> {src_path}",
self.config.options.symlinks,
):
success = False
@@ -657,7 +648,7 @@ class FetchTask(CompositeTask):
msg = "failed to create distfiles layout {}".format(
"symlink" if self.config.options.symlinks else "hardlink"
)
- self.config.log_failure("{}\t{}\t{}".format(self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
@@ -668,7 +659,7 @@ class FetchTask(CompositeTask):
os.unlink(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "unlink '{}' failed in {}: {}".format(self.distfile, dir_info, e)
+ msg = f"unlink '{self.distfile}' failed in {dir_info}: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -711,7 +702,7 @@ class FetchTask(CompositeTask):
head, tail = os.path.split(dest)
hardlink_tmp = os.path.join(
- head, ".{}._mirrordist_hardlink_.{}".format(tail, portage.getpid())
+ head, f".{tail}._mirrordist_hardlink_.{portage.getpid()}"
)
try:
@@ -722,11 +713,7 @@ class FetchTask(CompositeTask):
os.link(src, hardlink_tmp)
except OSError as e:
if e.errno != errno.EXDEV:
- msg = "hardlink {} from {} failed: {}".format(
- self.distfile,
- dir_info,
- e,
- )
+ msg = f"hardlink {self.distfile} from {dir_info} failed: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
@@ -736,11 +723,7 @@ class FetchTask(CompositeTask):
try:
os.rename(hardlink_tmp, dest)
except OSError as e:
- msg = "hardlink rename '{}' from {} failed: {}".format(
- self.distfile,
- dir_info,
- e,
- )
+ msg = f"hardlink rename '{self.distfile}' from {dir_info} failed: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
diff --git a/lib/portage/_emirrordist/MirrorDistTask.py b/lib/portage/_emirrordist/MirrorDistTask.py
index 303911cba..c52aec24e 100644
--- a/lib/portage/_emirrordist/MirrorDistTask.py
+++ b/lib/portage/_emirrordist/MirrorDistTask.py
@@ -182,12 +182,12 @@ class MirrorDistTask(CompositeTask):
("dry-run: scheduled deletions for %s: %s files")
% (date, len(date_files))
)
- lines.append("%s\n" % date)
+ lines.append(f"{date}\n")
for filename in date_files:
cpv = "unknown"
if distfiles_db is not None:
cpv = distfiles_db.get(filename, cpv)
- lines.append("\t{}\t{}\n".format(filename, cpv))
+ lines.append(f"\t{filename}\t{cpv}\n")
if not dry_run:
portage.util.write_atomic(
diff --git a/lib/portage/_emirrordist/main.py b/lib/portage/_emirrordist/main.py
index 9a143c221..a497ff4e1 100644
--- a/lib/portage/_emirrordist/main.py
+++ b/lib/portage/_emirrordist/main.py
@@ -244,7 +244,7 @@ def emirrordist_main(args):
parser, options, args = parse_args(args)
if options.version:
- sys.stdout.write("Portage %s\n" % portage.VERSION)
+ sys.stdout.write(f"Portage {portage.VERSION}\n")
return os.EX_OK
config_root = options.config_root
@@ -275,7 +275,7 @@ def emirrordist_main(args):
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
- parser.error("Unable to locate repository named '{}'".format(options.repo))
+ parser.error(f"Unable to locate repository named '{options.repo}'")
if options.jobs is not None:
options.jobs = int(options.jobs)
@@ -422,7 +422,7 @@ def emirrordist_main(args):
for x in options.whitelist_from:
path = normalize_path(os.path.abspath(x))
if not os.access(path, os.R_OK):
- parser.error("--whitelist-from '%s' is not readable" % x)
+ parser.error(f"--whitelist-from '{x}' is not readable")
if os.path.isfile(path):
normalized_paths.append(path)
elif os.path.isdir(path):
@@ -435,7 +435,7 @@ def emirrordist_main(args):
normalized_paths.append(file)
else:
parser.error(
- "--whitelist-from '%s' is not a regular file or a directory" % x
+ f"--whitelist-from '{x}' is not a regular file or a directory"
)
options.whitelist_from = normalized_paths
diff --git a/lib/portage/_global_updates.py b/lib/portage/_global_updates.py
index 136f6874a..ba183e87b 100644
--- a/lib/portage/_global_updates.py
+++ b/lib/portage/_global_updates.py
@@ -195,7 +195,7 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
if world_modified:
world_list.sort()
- write_atomic(world_file, "".join("{}\n".format(x) for x in world_list))
+ write_atomic(world_file, "".join(f"{x}\n" for x in world_list))
if world_warnings:
# XXX: print warning that we've updated world entries
# and the old name still matches something (from an overlay)?
diff --git a/lib/portage/_selinux.py b/lib/portage/_selinux.py
index d05d6b8e7..bf6ad2489 100644
--- a/lib/portage/_selinux.py
+++ b/lib/portage/_selinux.py
@@ -81,7 +81,7 @@ def settype(newtype):
ret[2] = newtype
return ":".join(ret)
except IndexError:
- warnings.warn("Invalid SELinux context: %s" % getcontext())
+ warnings.warn(f"Invalid SELinux context: {getcontext()}")
return None
@@ -98,7 +98,7 @@ def setexec(ctx="\n"):
if selinux.security_getenforce() == 1:
raise OSError(msg)
else:
- portage.writemsg("!!! %s\n" % msg, noiselevel=-1)
+ portage.writemsg(f"!!! {msg}\n", noiselevel=-1)
if rc < 0:
if selinux.security_getenforce() == 1:
diff --git a/lib/portage/_sets/ProfilePackageSet.py b/lib/portage/_sets/ProfilePackageSet.py
index f7855eb7f..8ef7a5609 100644
--- a/lib/portage/_sets/ProfilePackageSet.py
+++ b/lib/portage/_sets/ProfilePackageSet.py
@@ -20,7 +20,7 @@ class ProfilePackageSet(PackageSet):
description = desc_profile.location
else:
description = None
- self.description = "Profile packages for profile %s" % description
+ self.description = f"Profile packages for profile {description}"
def load(self):
self._setAtoms(
diff --git a/lib/portage/_sets/dbapi.py b/lib/portage/_sets/dbapi.py
index 8f92602f5..f40246261 100644
--- a/lib/portage/_sets/dbapi.py
+++ b/lib/portage/_sets/dbapi.py
@@ -47,7 +47,7 @@ class EverythingSet(PackageSet):
# SLOT installed, in order to avoid the possibility
# of unwanted upgrades as reported in bug #338959.
pkg = pkg_str(cpv, None)
- atom = Atom("{}:{}".format(pkg.cp, pkg.slot))
+ atom = Atom(f"{pkg.cp}:{pkg.slot}")
if self._filter:
if self._filter(atom):
myatoms.append(atom)
@@ -105,7 +105,7 @@ class OwnerSet(PackageSet):
if not exclude_paths:
for link, p in vardb._owners.iter_owners(paths):
pkg = pkg_str(link.mycpv, None)
- rValue.add("{}:{}".format(pkg.cp, pkg.slot))
+ rValue.add(f"{pkg.cp}:{pkg.slot}")
else:
all_paths = set()
all_paths.update(paths)
@@ -113,7 +113,7 @@ class OwnerSet(PackageSet):
exclude_atoms = set()
for link, p in vardb._owners.iter_owners(all_paths):
pkg = pkg_str(link.mycpv, None)
- atom = "{}:{}".format(pkg.cp, pkg.slot)
+ atom = f"{pkg.cp}:{pkg.slot}"
rValue.add(atom)
# Returned paths are relative to ROOT and do not have
# a leading slash.
@@ -242,7 +242,7 @@ class SubslotChangedSet(PackageSet):
cp_list = self._vardb.cp_list
for cp in self._vardb.cp_all():
for pkg in cp_list(cp):
- slot_atom = "{}:{}".format(pkg.cp, pkg.slot)
+ slot_atom = f"{pkg.cp}:{pkg.slot}"
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -281,7 +281,7 @@ class DowngradeSet(PackageSet):
for cp in self._vardb.cp_all():
for cpv in cp_list(cp):
pkg = pkg_str(cpv, None)
- slot_atom = "{}:{}".format(pkg.cp, pkg.slot)
+ slot_atom = f"{pkg.cp}:{pkg.slot}"
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -672,7 +672,7 @@ class ChangedDepsSet(PackageSet):
# if dependencies don't match, trigger the rebuild.
if vdbvars != pdbvars:
- atoms.append("=%s" % cpv)
+ atoms.append(f"={cpv}")
self._setAtoms(atoms)
diff --git a/lib/portage/_sets/files.py b/lib/portage/_sets/files.py
index 97220d841..e76cd1e86 100644
--- a/lib/portage/_sets/files.py
+++ b/lib/portage/_sets/files.py
@@ -38,7 +38,7 @@ class StaticFileSet(EditablePackageSet):
super().__init__(allow_repo=True)
self._filename = filename
self._mtime = None
- self.description = "Package set loaded from file %s" % self._filename
+ self.description = f"Package set loaded from file {self._filename}"
self.loader = ItemFileLoader(self._filename, self._validate)
if greedy and not dbapi:
self.errors.append(
@@ -77,7 +77,7 @@ class StaticFileSet(EditablePackageSet):
write_atomic(
self._filename,
"".join(
- "{}\n".format(atom)
+ f"{atom}\n"
for atom in sorted(chain(self._atoms, self._nonatoms))
),
)
@@ -104,7 +104,7 @@ class StaticFileSet(EditablePackageSet):
matches = self.dbapi.match(a)
for cpv in matches:
pkg = self.dbapi._pkg_str(cpv, None)
- atoms.append("{}:{}".format(pkg.cp, pkg.slot))
+ atoms.append(f"{pkg.cp}:{pkg.slot}")
# In addition to any installed slots, also try to pull
# in the latest new slot that may be available.
atoms.append(a)
@@ -216,7 +216,7 @@ class ConfigFileSet(PackageSet):
def __init__(self, filename):
super().__init__()
self._filename = filename
- self.description = "Package set generated from %s" % self._filename
+ self.description = f"Package set generated from {self._filename}"
self.loader = KeyListFileLoader(self._filename, ValidAtomValidator)
def load(self):
@@ -299,7 +299,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
return ValidAtomValidator(atom, allow_repo=True)
def write(self):
- write_atomic(self._filename, "".join(sorted("%s\n" % x for x in self._atoms)))
+ write_atomic(self._filename, "".join(sorted(f"{x}\n" for x in self._atoms)))
def load(self):
atoms = []
@@ -396,7 +396,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
def write(self):
write_atomic(
- self._filename, "".join(sorted("%s\n" % x for x in self._nonatoms))
+ self._filename, "".join(sorted(f"{x}\n" for x in self._nonatoms))
)
def load(self):
diff --git a/lib/portage/_sets/libs.py b/lib/portage/_sets/libs.py
index a99f621d8..9636b9d2c 100644
--- a/lib/portage/_sets/libs.py
+++ b/lib/portage/_sets/libs.py
@@ -28,7 +28,7 @@ class LibraryConsumerSet(PackageSet):
# without replacement.
pass
else:
- rValue.add("{}:{}".format(pkg.cp, pkg.slot))
+ rValue.add(f"{pkg.cp}:{pkg.slot}")
return rValue
diff --git a/lib/portage/_sets/profiles.py b/lib/portage/_sets/profiles.py
index 94f67cb88..4731a0add 100644
--- a/lib/portage/_sets/profiles.py
+++ b/lib/portage/_sets/profiles.py
@@ -27,13 +27,13 @@ class PackagesSystemSet(PackageSet):
description = desc_profile.location
else:
description = None
- self.description = "System packages for profile %s" % description
+ self.description = f"System packages for profile {description}"
def load(self):
debug = self._debug
if debug:
writemsg_level(
- "\nPackagesSystemSet: profiles: {}\n".format(self._profiles),
+ f"\nPackagesSystemSet: profiles: {self._profiles}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -52,7 +52,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: raw packages: {}\n".format(mylist),
+ f"\nPackagesSystemSet: raw packages: {mylist}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -61,7 +61,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: stacked packages: {}\n".format(mylist),
+ f"\nPackagesSystemSet: stacked packages: {mylist}\n",
level=logging.DEBUG,
noiselevel=-1,
)
diff --git a/lib/portage/_sets/security.py b/lib/portage/_sets/security.py
index fff81c46b..a5b2e6aec 100644
--- a/lib/portage/_sets/security.py
+++ b/lib/portage/_sets/security.py
@@ -49,7 +49,7 @@ class SecuritySet(PackageSet):
for atom in atomlist[:]:
cpv = self._portdbapi.xmatch("match-all", atom)[0]
pkg = self._portdbapi._pkg_str(cpv, None)
- cps = "{}:{}".format(pkg.cp, pkg.slot)
+ cps = f"{pkg.cp}:{pkg.slot}"
if not cps in mydict:
mydict[cps] = (atom, cpv)
else:
diff --git a/lib/portage/_sets/shell.py b/lib/portage/_sets/shell.py
index 6999c6d65..6f59bdbcd 100644
--- a/lib/portage/_sets/shell.py
+++ b/lib/portage/_sets/shell.py
@@ -31,7 +31,7 @@ class CommandOutputSet(PackageSet):
def __init__(self, command):
super().__init__()
self._command = command
- self.description = "Package set generated from output of '%s'" % self._command
+ self.description = f"Package set generated from output of '{self._command}'"
def load(self):
pipe = subprocess.Popen(self._command, stdout=subprocess.PIPE, shell=True)
diff --git a/lib/portage/cache/cache_errors.py b/lib/portage/cache/cache_errors.py
index 075f79350..581c4eed3 100644
--- a/lib/portage/cache/cache_errors.py
+++ b/lib/portage/cache/cache_errors.py
@@ -12,10 +12,7 @@ class InitializationError(CacheError):
self.error, self.class_name = error, class_name
def __str__(self):
- return "Creation of instance {} failed due to {}".format(
- self.class_name,
- str(self.error),
- )
+ return f"Creation of instance {self.class_name} failed due to {str(self.error)}"
class CacheCorruption(CacheError):
@@ -23,7 +20,7 @@ class CacheCorruption(CacheError):
self.key, self.ex = key, ex
def __str__(self):
- return "{} is corrupt: {}".format(self.key, str(self.ex))
+ return f"{self.key} is corrupt: {str(self.ex)}"
class GeneralCacheCorruption(CacheError):
@@ -31,7 +28,7 @@ class GeneralCacheCorruption(CacheError):
self.ex = ex
def __str__(self):
- return "corruption detected: %s" % str(self.ex)
+ return f"corruption detected: {str(self.ex)}"
class InvalidRestriction(CacheError):
@@ -41,7 +38,7 @@ class InvalidRestriction(CacheError):
self.key, self.restriction, self.ex = key, restriction, ex
def __str__(self):
- return "{}:{} is not valid: {}".format(self.key, self.restriction, str(self.ex))
+ return f"{self.key}:{self.restriction} is not valid: {str(self.ex)}"
class ReadOnlyRestriction(CacheError):
diff --git a/lib/portage/cache/ebuild_xattr.py b/lib/portage/cache/ebuild_xattr.py
index 35aa0b40d..f2ba1a389 100644
--- a/lib/portage/cache/ebuild_xattr.py
+++ b/lib/portage/cache/ebuild_xattr.py
@@ -134,7 +134,7 @@ class database(fs_template.FsBased):
parts += 1
# Only the first entry carries the number of parts
- self.__set(path, key, "{}:{}".format(parts, s[0:max_len]))
+ self.__set(path, key, f"{parts}:{s[0:max_len]}")
# Write out the rest
for i in range(1, parts):
@@ -142,7 +142,7 @@ class database(fs_template.FsBased):
val = s[start : start + max_len]
self.__set(path, key + str(i), val)
else:
- self.__set(path, key, "{}:{}".format(1, s))
+ self.__set(path, key, f"{1}:{s}")
def _delitem(self, cpv):
pass # Will be gone with the ebuild
@@ -165,4 +165,4 @@ class database(fs_template.FsBased):
pn_pv = file[:-7]
path = os.path.join(root, file)
if self.__has_cache(path):
- yield "{}/{}/{}".format(cat, os.path.basename(root), file[:-7])
+ yield f"{cat}/{os.path.basename(root)}/{file[:-7]}"
diff --git a/lib/portage/cache/flat_hash.py b/lib/portage/cache/flat_hash.py
index ec243b87a..992721c85 100644
--- a/lib/portage/cache/flat_hash.py
+++ b/lib/portage/cache/flat_hash.py
@@ -26,7 +26,7 @@ class database(fs_template.FsBased):
)
write_keys = set(self._known_keys)
write_keys.add("_eclasses_")
- write_keys.add("_{}_".format(self.validation_chf))
+ write_keys.add(f"_{self.validation_chf}_")
self._write_keys = sorted(write_keys)
if not self.readonly and not os.path.exists(self.location):
self._ensure_dirs()
@@ -74,7 +74,7 @@ class database(fs_template.FsBased):
v = values.get(k)
if not v:
continue
- myf.write("{}={}\n".format(k, v))
+ myf.write(f"{k}={v}\n")
self._ensure_access(fp)
diff --git a/lib/portage/cache/fs_template.py b/lib/portage/cache/fs_template.py
index 2ca295197..c399a0d13 100644
--- a/lib/portage/cache/fs_template.py
+++ b/lib/portage/cache/fs_template.py
@@ -87,4 +87,4 @@ def gen_label(base, label):
label = label.strip('"').strip("'")
label = os.path.join(*(label.rstrip(os.path.sep).split(os.path.sep)))
tail = os.path.split(label)[1]
- return "{}-{:X}".format(tail, abs(label.__hash__()))
+ return f"{tail}-{abs(label.__hash__()):X}"
diff --git a/lib/portage/cache/index/pkg_desc_index.py b/lib/portage/cache/index/pkg_desc_index.py
index 847f10af7..8e9ee47f4 100644
--- a/lib/portage/cache/index/pkg_desc_index.py
+++ b/lib/portage/cache/index/pkg_desc_index.py
@@ -33,9 +33,7 @@ class pkg_node(str):
def pkg_desc_index_line_format(cp, pkgs, desc):
- return "{} {}: {}\n".format(
- cp, " ".join(_pkg_str(cpv).version for cpv in pkgs), desc
- )
+ return f"{cp} {' '.join(_pkg_str(cpv).version for cpv in pkgs)}: {desc}\n"
def pkg_desc_index_line_read(line, repo=None):
diff --git a/lib/portage/cache/sql_template.py b/lib/portage/cache/sql_template.py
index e705a891b..9e6668129 100644
--- a/lib/portage/cache/sql_template.py
+++ b/lib/portage/cache/sql_template.py
@@ -27,7 +27,7 @@ class SQLDatabase(template.database):
pkgid INTEGER PRIMARY KEY, label VARCHAR(255), cpv VARCHAR(255), UNIQUE(label, cpv))"
% SCHEMA_PACKAGE_NAME
)
- SCHEMA_PACKAGE_DROP = "DROP TABLE %s" % SCHEMA_PACKAGE_NAME
+ SCHEMA_PACKAGE_DROP = f"DROP TABLE {SCHEMA_PACKAGE_NAME}"
SCHEMA_VALUES_NAME = "values_cache"
SCHEMA_VALUES_CREATE = (
@@ -35,7 +35,7 @@ class SQLDatabase(template.database):
key varchar(255), value text, UNIQUE(pkgid, key))"
% (SCHEMA_VALUES_NAME, SCHEMA_PACKAGE_NAME)
)
- SCHEMA_VALUES_DROP = "DROP TABLE %s" % SCHEMA_VALUES_NAME
+ SCHEMA_VALUES_DROP = f"DROP TABLE {SCHEMA_VALUES_NAME}"
SCHEMA_INSERT_CPV_INTO_PACKAGE = (
"INSERT INTO %s (label, cpv) VALUES(%%s, %%s)" % SCHEMA_PACKAGE_NAME
)
@@ -76,7 +76,7 @@ class SQLDatabase(template.database):
if not self._table_exists(self.SCHEMA_PACKAGE_NAME):
if self.readonly:
raise cache_errors.ReadOnlyRestriction(
- "table %s doesn't exist" % self.SCHEMA_PACKAGE_NAME
+ f"table {self.SCHEMA_PACKAGE_NAME} doesn't exist"
)
try:
self.con.execute(self.SCHEMA_PACKAGE_CREATE)
@@ -86,7 +86,7 @@ class SQLDatabase(template.database):
if not self._table_exists(self.SCHEMA_VALUES_NAME):
if self.readonly:
raise cache_errors.ReadOnlyRestriction(
- "table %s doesn't exist" % self.SCHEMA_VALUES_NAME
+ f"table {self.SCHEMA_VALUES_NAME} doesn't exist"
)
try:
self.con.execute(self.SCHEMA_VALUES_CREATE)
@@ -255,8 +255,7 @@ class SQLDatabase(template.database):
try:
self.con.execute(
- "SELECT cpv FROM %s WHERE label=%s"
- % (self.SCHEMA_PACKAGE_NAME, self.label)
+ f"SELECT cpv FROM {self.SCHEMA_PACKAGE_NAME} WHERE label={self.label}"
)
except self._BaseError as e:
raise cache_errors.GeneralCacheCorruption(e)
@@ -309,7 +308,7 @@ class SQLDatabase(template.database):
v = v.replace("%", "\\%")
v = v.replace(".*", "%")
query_list.append(
- "(key={} AND value LIKE {})".format(self._sfilter(k), self._sfilter(v))
+ f"(key={self._sfilter(k)} AND value LIKE {self._sfilter(v)})"
)
if len(query_list):
diff --git a/lib/portage/cache/sqlite.py b/lib/portage/cache/sqlite.py
index fd05fe406..00b6807a4 100644
--- a/lib/portage/cache/sqlite.py
+++ b/lib/portage/cache/sqlite.py
@@ -34,7 +34,7 @@ class database(fs_template.FsBased):
self._import_sqlite()
self._allowed_keys = ["_eclasses_"]
self._allowed_keys.extend(self._known_keys)
- self._allowed_keys.extend("_%s_" % k for k in self.chf_types)
+ self._allowed_keys.extend(f"_{k}_" for k in self.chf_types)
self._allowed_keys_set = frozenset(self._allowed_keys)
self._allowed_keys = sorted(self._allowed_keys_set)
@@ -108,11 +108,11 @@ class database(fs_template.FsBased):
connection, cursor, portage.getpid()
)
self._db_cursor.execute(
- "PRAGMA encoding = %s" % self._db_escape_string("UTF-8")
+ f"PRAGMA encoding = {self._db_escape_string('UTF-8')}"
)
if not self.readonly and not self._ensure_access(self._dbpath):
raise cache_errors.InitializationError(
- self.__class__, "can't ensure perms on %s" % self._dbpath
+ self.__class__, f"can't ensure perms on {self._dbpath}"
)
self._db_init_cache_size(config["cache_bytes"])
self._db_init_synchronous(config["synchronous"])
@@ -136,11 +136,11 @@ class database(fs_template.FsBased):
"%s INTEGER PRIMARY KEY AUTOINCREMENT"
% self._db_table["packages"]["package_id"]
)
- table_parameters.append("%s TEXT" % self._db_table["packages"]["package_key"])
+ table_parameters.append(f"{self._db_table['packages']['package_key']} TEXT")
for k in self._allowed_keys:
- table_parameters.append("%s TEXT" % k)
+ table_parameters.append(f"{k} TEXT")
table_parameters.append(
- "UNIQUE(%s)" % self._db_table["packages"]["package_key"]
+ f"UNIQUE({self._db_table['packages']['package_key']})"
)
create_statement.append(",".join(table_parameters))
create_statement.append(")")
@@ -163,7 +163,7 @@ class database(fs_template.FsBased):
)
else:
writemsg(_("sqlite: dropping old table: %s\n") % v["table_name"])
- cursor.execute("DROP TABLE %s" % v["table_name"])
+ cursor.execute(f"DROP TABLE {v['table_name']}")
cursor.execute(v["create"])
else:
cursor.execute(v["create"])
@@ -289,7 +289,7 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
update_statement = []
update_statement.append(
- "REPLACE INTO %s" % self._db_table["packages"]["table_name"]
+ f"REPLACE INTO {self._db_table['packages']['table_name']}"
)
update_statement.append("(")
update_statement.append(
@@ -309,7 +309,7 @@ class database(fs_template.FsBased):
s = " ".join(update_statement)
cursor.execute(s)
except self._db_error as e:
- writemsg("{}: {}\n".format(cpv, str(e)))
+ writemsg(f"{cpv}: {str(e)}\n")
raise
def commit(self):
diff --git a/lib/portage/cache/template.py b/lib/portage/cache/template.py
index 843ca2e5f..7fc8946bb 100644
--- a/lib/portage/cache/template.py
+++ b/lib/portage/cache/template.py
@@ -46,7 +46,7 @@ class database:
if self.serialize_eclasses and "_eclasses_" in d:
for chf_type in chf_types:
- if "_%s_" % chf_type not in d:
+ if f"_{chf_type}_" not in d:
# Skip the reconstruct_eclasses call, since it's
# a waste of time if it contains a different chf_type
# than the current one. In the past, it was possible
@@ -77,7 +77,7 @@ class database:
# those that egencache uses to avoid redundant writes.
d.pop("INHERITED", None)
- mtime_required = not any(d.get("_%s_" % x) for x in chf_types if x != "mtime")
+ mtime_required = not any(d.get(f"_{x}_") for x in chf_types if x != "mtime")
mtime = d.get("_mtime_")
if not mtime:
@@ -89,7 +89,7 @@ class database:
mtime = int(mtime)
except ValueError:
raise cache_errors.CacheCorruption(
- cpv, "_mtime_ conversion to int failed: {}".format(mtime)
+ cpv, f"_mtime_ conversion to int failed: {mtime}"
)
d["_mtime_"] = mtime
return d
@@ -240,7 +240,7 @@ class database:
return False
def _validate_entry(self, chf_type, entry, ebuild_hash, eclass_db):
- hash_key = "_%s_" % chf_type
+ hash_key = f"_{chf_type}_"
try:
entry_hash = entry[hash_key]
except KeyError:
@@ -311,11 +311,11 @@ def serialize_eclasses(eclass_dict, chf_type="mtime", paths=True):
getter = operator.attrgetter(chf_type)
if paths:
return "\t".join(
- "{}\t{}\t{}".format(k, v.eclass_dir, getter(v))
+ f"{k}\t{v.eclass_dir}\t{getter(v)}"
for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
return "\t".join(
- "{}\t{}".format(k, getter(v))
+ f"{k}\t{getter(v)}"
for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
@@ -349,11 +349,11 @@ def reconstruct_eclasses(cpv, eclass_string, chf_type="mtime", paths=True):
if paths:
if len(eclasses) % 3 != 0:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
elif len(eclasses) % 2 != 0:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
d = {}
try:
@@ -367,11 +367,11 @@ def reconstruct_eclasses(cpv, eclass_string, chf_type="mtime", paths=True):
d[name] = converter(val)
except IndexError:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
except ValueError:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ not valid for chf_type {}".format(chf_type)
+ cpv, f"_eclasses_ not valid for chf_type {chf_type}"
)
del eclasses
return d
diff --git a/lib/portage/checksum.py b/lib/portage/checksum.py
index 3bdb2bc53..f93d7fbd4 100644
--- a/lib/portage/checksum.py
+++ b/lib/portage/checksum.py
@@ -273,12 +273,12 @@ if "RMD160" not in hashfunc_map or "WHIRLPOOL" not in hashfunc_map:
("WHIRLPOOL", "WHIRLPOOL"),
):
if local_name not in hashfunc_map and hasattr(
- mhash, "MHASH_%s" % hash_name
+ mhash, f"MHASH_{hash_name}"
):
_generate_hash_function(
local_name,
functools.partial(
- mhash.MHASH, getattr(mhash, "MHASH_%s" % hash_name)
+ mhash.MHASH, getattr(mhash, f"MHASH_{hash_name}")
),
origin="mhash",
)
diff --git a/lib/portage/data.py b/lib/portage/data.py
index 8aa66df7f..18cf50a87 100644
--- a/lib/portage/data.py
+++ b/lib/portage/data.py
@@ -215,7 +215,7 @@ def _get_global(k):
return portage_gid
if k == "portage_uid":
return portage_uid
- raise AssertionError("unknown name: %s" % k)
+ raise AssertionError(f"unknown name: {k}")
elif k == "userpriv_groups":
v = [_get_global("portage_gid")]
@@ -287,7 +287,7 @@ def _get_global(k):
else:
v = pwd_struct.pw_name
else:
- raise AssertionError("unknown name: %s" % k)
+ raise AssertionError(f"unknown name: {k}")
globals()[k] = v
_initialized_globals.add(k)
diff --git a/lib/portage/dbapi/_MergeProcess.py b/lib/portage/dbapi/_MergeProcess.py
index b9f58e1ea..97ec8ca34 100644
--- a/lib/portage/dbapi/_MergeProcess.py
+++ b/lib/portage/dbapi/_MergeProcess.py
@@ -50,7 +50,7 @@ class MergeProcess(ForkProcess):
# since closing of file descriptors in the subprocess
# can prevent access to open database connections such
# as that used by the sqlite metadata cache module.
- cpv = "{}/{}".format(self.mycat, self.mypkg)
+ cpv = f"{self.mycat}/{self.mypkg}"
settings = self.settings
if cpv != settings.mycpv or "EAPI" not in settings.configdict["pkg"]:
settings.reload()
diff --git a/lib/portage/dbapi/_VdbMetadataDelta.py b/lib/portage/dbapi/_VdbMetadataDelta.py
index 295cdfa64..99d8cf94e 100644
--- a/lib/portage/dbapi/_VdbMetadataDelta.py
+++ b/lib/portage/dbapi/_VdbMetadataDelta.py
@@ -111,7 +111,7 @@ class VdbMetadataDelta:
"package": cpv.cp,
"version": cpv.version,
"slot": slot,
- "counter": "%s" % counter,
+ "counter": f"{counter}",
}
deltas_obj["deltas"].append(delta_node)
diff --git a/lib/portage/dbapi/__init__.py b/lib/portage/dbapi/__init__.py
index 1719bd6c5..366a6c170 100644
--- a/lib/portage/dbapi/__init__.py
+++ b/lib/portage/dbapi/__init__.py
@@ -350,11 +350,11 @@ class dbapi:
if "/" + MERGING_IDENTIFIER in mypath:
if os.path.exists(mypath):
writemsg(
- colorize("BAD", _("INCOMPLETE MERGE:")) + " %s\n" % mypath,
+ colorize("BAD", _("INCOMPLETE MERGE:")) + f" {mypath}\n",
noiselevel=-1,
)
else:
- writemsg("!!! Invalid db entry: %s\n" % mypath, noiselevel=-1)
+ writemsg(f"!!! Invalid db entry: {mypath}\n", noiselevel=-1)
def update_ents(self, updates, onProgress=None, onUpdate=None):
"""
@@ -453,7 +453,7 @@ class dbapi:
and mycpv.sub_slot
and mycpv.sub_slot not in (mycpv.slot, newslot)
):
- newslot = "{}/{}".format(newslot, mycpv.sub_slot)
+ newslot = f"{newslot}/{mycpv.sub_slot}"
mydata = {"SLOT": newslot + "\n"}
self.aux_update(mycpv, mydata)
return moves
diff --git a/lib/portage/dbapi/bintree.py b/lib/portage/dbapi/bintree.py
index 61aa610df..97c74903a 100644
--- a/lib/portage/dbapi/bintree.py
+++ b/lib/portage/dbapi/bintree.py
@@ -273,7 +273,7 @@ class bindbapi(fakedbapi):
encoding_key = False
else:
raise InvalidBinaryPackageFormat(
- "Unknown binary package format %s" % binpkg_path
+ f"Unknown binary package format {binpkg_path}"
)
for k, v in values.items():
@@ -295,7 +295,7 @@ class bindbapi(fakedbapi):
mybinpkg.update_metadata(mydata)
else:
raise InvalidBinaryPackageFormat(
- "Unknown binary package format %s" % binpkg_path
+ f"Unknown binary package format {binpkg_path}"
)
# inject will clear stale caches via cpv_inject.
@@ -336,7 +336,7 @@ class bindbapi(fakedbapi):
)
else:
raise InvalidBinaryPackageFormat(
- "Unknown binary package format %s" % binpkg_file
+ f"Unknown binary package format {binpkg_file}"
)
async def unpack_contents(self, pkg, dest_dir, loop=None):
@@ -374,7 +374,7 @@ class bindbapi(fakedbapi):
extractor.start()
await extractor.async_wait()
if extractor.returncode != os.EX_OK:
- raise PortageException("Error Extracting '{}'".format(pkg_path))
+ raise PortageException(f"Error Extracting '{pkg_path}'")
elif binpkg_format == "gpkg":
await loop.run_in_executor(
ForkExecutor(loop=loop),
@@ -426,7 +426,7 @@ class bindbapi(fakedbapi):
except KeyError:
raise portage.exception.MissingSignature("SIZE")
except ValueError:
- raise portage.exception.InvalidSignature("SIZE: %s" % metadata["SIZE"])
+ raise portage.exception.InvalidSignature(f"SIZE: {metadata['SIZE']}")
else:
filesdict[os.path.basename(self.bintree.getname(pkg))] = size
@@ -644,7 +644,7 @@ class binarytree:
# If this update has already been applied to the same
# package build then silently continue.
applied = False
- for maybe_applied in self.dbapi.match("={}".format(mynewcpv)):
+ for maybe_applied in self.dbapi.match(f"={mynewcpv}"):
if maybe_applied.build_time == mycpv.build_time:
applied = True
break
@@ -1080,7 +1080,7 @@ class binarytree:
)
)
for line in textwrap.wrap("".join(msg), 72):
- writemsg("!!! %s\n" % line, noiselevel=-1)
+ writemsg(f"!!! {line}\n", noiselevel=-1)
self.invalids.append(mypkg)
continue
@@ -1092,7 +1092,7 @@ class binarytree:
build_id = self._parse_build_id(myfile)
if build_id < 1:
invalid_name = True
- elif myfile != "{}-{}.xpak".format(mypf, build_id):
+ elif myfile != f"{mypf}-{build_id}.xpak":
invalid_name = True
else:
mypkg = mypkg[: -len(str(build_id)) - 1]
@@ -1100,12 +1100,12 @@ class binarytree:
build_id = self._parse_build_id(myfile)
if build_id > 0:
multi_instance = True
- if myfile != "{}-{}.gpkg.tar".format(mypf, build_id):
+ if myfile != f"{mypf}-{build_id}.gpkg.tar":
invalid_name = True
else:
mypkg = mypkg[: -len(str(build_id)) - 1]
else:
- if myfile != "%s.gpkg.tar" % mypf:
+ if myfile != f"{mypf}.gpkg.tar":
invalid_name = True
elif myfile != mypf + ".tbz2":
invalid_name = True
@@ -1131,7 +1131,7 @@ class binarytree:
build_id = None
if multi_instance:
- name_split = catpkgsplit("{}/{}".format(mycat, mypf))
+ name_split = catpkgsplit(f"{mycat}/{mypf}")
if (
name_split is None
or tuple(catsplit(mydir)) != name_split[:2]
@@ -1251,7 +1251,7 @@ class binarytree:
user, passwd = user.split(":", 1)
if port is not None:
- port_str = ":{}".format(port)
+ port_str = f":{port}"
if host.endswith(port_str):
host = host[: -len(port_str)]
pkgindex_file = os.path.join(
@@ -1343,7 +1343,7 @@ class binarytree:
raise
except ValueError:
raise ParseError(
- "Invalid Portage BINHOST value '%s'" % url.lstrip()
+ f"Invalid Portage BINHOST value '{url.lstrip()}'"
)
if f is None:
@@ -1356,7 +1356,7 @@ class binarytree:
# matches that of the cached Packages file.
ssh_args = ["ssh"]
if port is not None:
- ssh_args.append("-p{}".format(port))
+ ssh_args.append(f"-p{port}")
# NOTE: shlex evaluates embedded quotes
ssh_args.extend(
portage.util.shlex_split(
@@ -1400,7 +1400,7 @@ class binarytree:
fcmd=fcmd, fcmd_vars=fcmd_vars
)
if not success:
- raise OSError("{} failed".format(setting))
+ raise OSError(f"{setting} failed")
f = open(tmp_filename, "rb")
f_dec = codecs.iterdecode(
@@ -1475,7 +1475,7 @@ class binarytree:
error_msg = str(e)
except UnicodeDecodeError as uerror:
error_msg = str(uerror.object, encoding="utf_8", errors="replace")
- writemsg("!!! %s\n\n" % error_msg)
+ writemsg(f"!!! {error_msg}\n\n")
del e
pkgindex = None
if proc is not None:
@@ -1977,7 +1977,7 @@ class binarytree:
deps = use_reduce(deps, uselist=use, token_class=token_class)
deps = paren_enclose(deps)
except portage.exception.InvalidDependString as e:
- writemsg("{}: {}\n".format(k, e), noiselevel=-1)
+ writemsg(f"{k}: {e}\n", noiselevel=-1)
raise
metadata[k] = deps
@@ -1993,13 +1993,13 @@ class binarytree:
if not self.populated:
self.populate()
writemsg("\n\n", 1)
- writemsg("mydep: %s\n" % mydep, 1)
+ writemsg(f"mydep: {mydep}\n", 1)
mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
- writemsg("mydep: %s\n" % mydep, 1)
+ writemsg(f"mydep: {mydep}\n", 1)
mykey = dep_getkey(mydep)
- writemsg("mykey: %s\n" % mykey, 1)
+ writemsg(f"mykey: {mykey}\n", 1)
mymatch = best(match_from_list(mydep, self.dbapi.cp_list(mykey)))
- writemsg("mymatch: %s\n" % mymatch, 1)
+ writemsg(f"mymatch: {mymatch}\n", 1)
if mymatch is None:
return ""
return mymatch
@@ -2059,19 +2059,13 @@ class binarytree:
elif binpkg_format == "xpak":
if self._multi_instance:
pf = catsplit(cpv)[1]
- filename = "{}-{}.xpak".format(
- os.path.join(self.pkgdir, cpv.cp, pf),
- "1",
- )
+ filename = f"{os.path.join(self.pkgdir, cpv.cp, pf)}-1.xpak"
else:
filename = os.path.join(self.pkgdir, cpv + ".tbz2")
elif binpkg_format == "gpkg":
if self._multi_instance:
pf = catsplit(cpv)[1]
- filename = "{}-{}.gpkg.tar".format(
- os.path.join(self.pkgdir, cpv.cp, pf),
- "1",
- )
+ filename = f"{os.path.join(self.pkgdir, cpv.cp, pf)}-1.gpkg.tar"
else:
filename = os.path.join(self.pkgdir, cpv + ".gpkg.tar")
else:
diff --git a/lib/portage/dbapi/porttree.py b/lib/portage/dbapi/porttree.py
index 15e1fd6ff..51e197bb6 100644
--- a/lib/portage/dbapi/porttree.py
+++ b/lib/portage/dbapi/porttree.py
@@ -161,7 +161,7 @@ class _better_cache:
continue
for p in pkg_list:
try:
- atom = Atom("{}/{}".format(cat, p))
+ atom = Atom(f"{cat}/{p}")
except InvalidAtom:
continue
if atom != atom.cp:
@@ -574,7 +574,7 @@ class portdbapi(dbapi):
try:
cache = self.auxdb[repo_path]
chf = cache.validation_chf
- metadata["_%s_" % chf] = getattr(ebuild_hash, chf)
+ metadata[f"_{chf}_"] = getattr(ebuild_hash, chf)
except CacheError:
# Normally this shouldn't happen, so we'll show
# a traceback for debugging purposes.
@@ -600,7 +600,7 @@ class portdbapi(dbapi):
_("!!! aux_get(): ebuild for " "'%s' does not exist at:\n") % (cpv,),
noiselevel=-1,
)
- writemsg("!!! %s\n" % ebuild_path, noiselevel=-1)
+ writemsg(f"!!! {ebuild_path}\n", noiselevel=-1)
raise PortageKeyError(cpv)
# Pull pre-generated metadata from the metadata/cache/
@@ -886,9 +886,7 @@ class portdbapi(dbapi):
# since callers already handle it.
result.set_exception(
portage.exception.InvalidDependString(
- "getFetchMap(): '{}' has unsupported EAPI: '{}'".format(
- mypkg, eapi
- )
+ f"getFetchMap(): '{mypkg}' has unsupported EAPI: '{eapi}'"
)
)
return
@@ -1065,7 +1063,7 @@ class portdbapi(dbapi):
oroot + "/" + x, EmptyOnError=1, ignorecvs=1, dirsonly=1
):
try:
- atom = Atom("{}/{}".format(x, y))
+ atom = Atom(f"{x}/{y}")
except InvalidAtom:
continue
if atom != atom.cp:
@@ -1373,7 +1371,7 @@ class portdbapi(dbapi):
myval = ""
else:
- raise AssertionError("Invalid level argument: '%s'" % level)
+ raise AssertionError(f"Invalid level argument: '{level}'")
if self.frozen:
xcache_this_level = self.xcache.get(level)
@@ -1436,10 +1434,10 @@ class portdbapi(dbapi):
continue
except PortageException as e:
writemsg(
- "!!! Error: aux_get('{}', {})\n".format(mycpv, aux_keys),
+ f"!!! Error: aux_get('{mycpv}', {aux_keys})\n",
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
del e
continue
diff --git a/lib/portage/dbapi/vartree.py b/lib/portage/dbapi/vartree.py
index 86c2f2034..78197b533 100644
--- a/lib/portage/dbapi/vartree.py
+++ b/lib/portage/dbapi/vartree.py
@@ -339,7 +339,7 @@ class vardbapi(dbapi):
"""
lock, counter = self._slot_locks.get(slot_atom, (None, 0))
if lock is None:
- lock_path = self.getpath("{}:{}".format(slot_atom.cp, slot_atom.slot))
+ lock_path = self.getpath(f"{slot_atom.cp}:{slot_atom.slot}")
ensure_dirs(os.path.dirname(lock_path))
lock = lockfile(lock_path, wantnewlockfile=True)
self._slot_locks[slot_atom] = (lock, counter + 1)
@@ -498,7 +498,7 @@ class vardbapi(dbapi):
continue
if len(mysplit) > 1:
if ps[0] == mysplit[1]:
- cpv = "{}/{}".format(mysplit[0], x)
+ cpv = f"{mysplit[0]}/{x}"
metadata = dict(
zip(
self._aux_cache_keys,
@@ -1081,7 +1081,7 @@ class vardbapi(dbapi):
opts_list = portage.util.shlex_split(settings.get("QUICKPKG_DEFAULT_OPTS", ""))
if include_config is not None:
opts_list.append(
- "--include-config={}".format("y" if include_config else "n")
+ f"--include-config={'y' if include_config else 'n'}"
)
if include_unmodified_config is not None:
opts_list.append(
@@ -1111,7 +1111,7 @@ class vardbapi(dbapi):
)
await proc.wait()
if proc.returncode != os.EX_OK:
- raise PortageException("command failed: {}".format(tar_cmd))
+ raise PortageException(f"command failed: {tar_cmd}")
elif binpkg_format == "gpkg":
gpkg_tmp_fd, gpkg_tmp = tempfile.mkstemp(suffix=".gpkg.tar")
os.close(gpkg_tmp_fd)
@@ -1134,7 +1134,7 @@ class vardbapi(dbapi):
_(
"Config files excluded by QUICKPKG_DEFAULT_OPTS (see quickpkg(1) man page):"
)
- ] + ["\t{}".format(name) for name in excluded_config_files]
+ ] + [f"\t{name}" for name in excluded_config_files]
out = io.StringIO()
for line in log_lines:
portage.elog.messages.ewarn(line, phase="install", key=cpv, out=out)
@@ -1188,7 +1188,7 @@ class vardbapi(dbapi):
_("!!! COUNTER file is corrupt: '%s'\n") % self._counter_path,
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
except OSError as e:
# Silently allow ENOENT since files under
# /var/cache/ are allowed to disappear.
@@ -1197,7 +1197,7 @@ class vardbapi(dbapi):
_("!!! Unable to read COUNTER file: '%s'\n") % self._counter_path,
noiselevel=-1,
)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
del e
if self._cached_counter == counter:
@@ -1330,7 +1330,7 @@ class vardbapi(dbapi):
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
writemsg_level(
- "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
+ f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1
)
continue
@@ -1929,7 +1929,7 @@ class dblink:
(slot,) = db.aux_get(self.mycpv, ["SLOT"])
slot = slot.partition("/")[0]
- slot_atoms.append(portage.dep.Atom("{}:{}".format(self.mycpv.cp, slot)))
+ slot_atoms.append(portage.dep.Atom(f"{self.mycpv.cp}:{slot}"))
for blocker in self._blockers or []:
slot_atoms.append(blocker.slot_atom)
@@ -2350,7 +2350,7 @@ class dblink:
if others_in_slot is None:
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "{}:{}".format(portage.cpv_getkey(self.mycpv), slot)
+ f"{portage.cpv_getkey(self.mycpv)}:{slot}"
)
others_in_slot = []
for cur_cpv in slot_matches:
@@ -2451,7 +2451,7 @@ class dblink:
noiselevel=-1,
)
showMessage(
- "{}\n".format(eapi_unsupported), level=logging.ERROR, noiselevel=-1
+ f"{eapi_unsupported}\n", level=logging.ERROR, noiselevel=-1
)
elif os.path.isfile(myebuildpath):
phase = EbuildPhase(
@@ -2650,7 +2650,7 @@ class dblink:
def _show_unmerge(self, zing, desc, file_type, file_name):
self._display_merge(
- "{} {} {} {}\n".format(zing, desc.ljust(8), file_type, file_name)
+ f"{zing} {desc.ljust(8)} {file_type} {file_name}\n"
)
def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
@@ -2681,7 +2681,7 @@ class dblink:
others_in_slot = []
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "{}:{}".format(portage.cpv_getkey(self.mycpv), slot)
+ f"{portage.cpv_getkey(self.mycpv)}:{slot}"
)
for cur_cpv in slot_matches:
if cur_cpv == self.mycpv:
@@ -2744,7 +2744,7 @@ class dblink:
# administrative and pkg_postinst stuff.
self._eerror(
"postrm",
- ["Could not chmod or unlink '{}': {}".format(file_name, ose)],
+ [f"Could not chmod or unlink '{file_name}': {ose}"],
)
else:
@@ -3068,7 +3068,7 @@ class dblink:
flat_list.update(*protected_symlinks.values())
flat_list = sorted(flat_list)
for f in flat_list:
- lines.append("\t%s" % (os.path.join(real_root, f.lstrip(os.sep))))
+ lines.append(f"\t{os.path.join(real_root, f.lstrip(os.sep))}")
lines.append("")
self._elog("elog", "postrm", lines)
@@ -3113,7 +3113,7 @@ class dblink:
msg.append("")
for f in flat_list:
- msg.append("\t%s" % os.path.join(real_root, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(real_root, f.lstrip(os.path.sep))}")
msg.append("")
msg.append("Use the UNINSTALL_IGNORE variable to exempt specific symlinks")
@@ -3996,7 +3996,7 @@ class dblink:
if not dest_lstat:
raise AssertionError(
"unable to find non-directory "
- + "parent for '%s'" % dest_path
+ + f"parent for '{dest_path}'"
)
dest_path = parent_path
f = os.path.sep + dest_path[len(destroot) :]
@@ -4166,7 +4166,7 @@ class dblink:
msg.append("")
for path_list in suspicious_hardlinks:
for path, s in path_list:
- msg.append("\t%s" % path)
+ msg.append(f"\t{path}")
msg.append("")
msg.append(
_("See the Gentoo Security Handbook " "guide for advice on how to proceed.")
@@ -4396,7 +4396,7 @@ class dblink:
# Use _pkg_str discard the sub-slot part if necessary.
slot = _pkg_str(self.mycpv, slot=slot).slot
cp = self.mysplit[0]
- slot_atom = "{}:{}".format(cp, slot)
+ slot_atom = f"{cp}:{slot}"
self.lockdb()
try:
@@ -4724,7 +4724,7 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for f in rofilesystems:
- msg.append("\t%s" % f)
+ msg.append(f"\t{f}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4753,15 +4753,15 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for k, v in sorted(internal_collisions.items(), key=operator.itemgetter(0)):
- msg.append("\t%s" % os.path.join(destroot, k.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, k.lstrip(os.path.sep))}")
for (file1, file2), differences in sorted(v.items()):
msg.append(
- "\t\t%s" % os.path.join(destroot, file1.lstrip(os.path.sep))
+ f"\t\t{os.path.join(destroot, file1.lstrip(os.path.sep))}"
)
msg.append(
- "\t\t%s" % os.path.join(destroot, file2.lstrip(os.path.sep))
+ f"\t\t{os.path.join(destroot, file2.lstrip(os.path.sep))}"
)
- msg.append("\t\t\tDifferences: %s" % ", ".join(differences))
+ msg.append(f"\t\t\tDifferences: {', '.join(differences)}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4790,7 +4790,7 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for f in symlink_collisions:
- msg.append("\t%s" % os.path.join(destroot, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4843,7 +4843,7 @@ class dblink:
msg.append("")
for f in collisions:
- msg.append("\t%s" % os.path.join(destroot, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}")
eerror(msg)
@@ -4872,9 +4872,9 @@ class dblink:
for pkg in owners:
pkg = self.vartree.dbapi._pkg_str(pkg.mycpv, None)
- pkg_info_str = "{}{}{}".format(pkg, _slot_separator, pkg.slot)
+ pkg_info_str = f"{pkg}{_slot_separator}{pkg.slot}"
if pkg.repo != _unknown_repo:
- pkg_info_str += "{}{}".format(_repo_separator, pkg.repo)
+ pkg_info_str += f"{_repo_separator}{pkg.repo}"
pkg_info_strs[pkg] = pkg_info_str
finally:
@@ -4889,7 +4889,7 @@ class dblink:
msg.append(pkg_info_strs[pkg.mycpv])
for f in sorted(owned_files):
msg.append(
- "\t%s" % os.path.join(destroot, f.lstrip(os.path.sep))
+ f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}"
)
msg.append("")
collision_message_type(msg)
@@ -4968,7 +4968,7 @@ class dblink:
rval = self._pre_merge_backup(self._installed_instance, downgrade)
if rval != os.EX_OK:
showMessage(
- _("!!! FAILED preinst: ") + "quickpkg: %s\n" % rval,
+ _("!!! FAILED preinst: ") + f"quickpkg: {rval}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -5014,7 +5014,7 @@ class dblink:
encoding=_encodings["repo.content"],
errors="backslashreplace",
) as f:
- f.write("%s" % counter)
+ f.write(f"{counter}")
self.updateprotect()
@@ -5604,11 +5604,11 @@ class dblink:
msg.append(
_("Installation of a symlink is blocked by a directory:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(
_("This symlink will be merged with a different name:")
)
- msg.append(" '%s'" % newdest)
+ msg.append(f" '{newdest}'")
msg.append("")
self._eerror("preinst", msg)
mydest = newdest
@@ -5654,7 +5654,7 @@ class dblink:
],
)
- showMessage("{} {} -> {}\n".format(zing, mydest, myto))
+ showMessage(f"{zing} {mydest} -> {myto}\n")
outfile.write(
self._format_contents_line(
node_type="sym",
@@ -5670,7 +5670,7 @@ class dblink:
noiselevel=-1,
)
showMessage(
- "!!! {} -> {}\n".format(mydest, myto),
+ f"!!! {mydest} -> {myto}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -5719,7 +5719,7 @@ class dblink:
stat.S_ISLNK(mydmode) and os.path.isdir(mydest)
):
# a symlink to an existing directory will work for us; keep it:
- showMessage("--- %s/\n" % mydest)
+ showMessage(f"--- {mydest}/\n")
if bsd_chflags:
bsd_chflags.lchflags(mydest, dflags)
else:
@@ -5730,9 +5730,9 @@ class dblink:
msg.append(
_("Installation of a directory is blocked by a file:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(_("This file will be renamed to a different name:"))
- msg.append(" '%s'" % backup_dest)
+ msg.append(f" '{backup_dest}'")
msg.append("")
self._eerror("preinst", msg)
if (
@@ -5772,7 +5772,7 @@ class dblink:
bsd_chflags.lchflags(mydest, dflags)
os.chmod(mydest, mystat[0])
os.chown(mydest, mystat[4], mystat[5])
- showMessage(">>> %s/\n" % mydest)
+ showMessage(f">>> {mydest}/\n")
else:
try:
# destination doesn't exist
@@ -5793,7 +5793,7 @@ class dblink:
del e
os.chmod(mydest, mystat[0])
os.chown(mydest, mystat[4], mystat[5])
- showMessage(">>> %s/\n" % mydest)
+ showMessage(f">>> {mydest}/\n")
try:
self._merged_path(mydest, os.lstat(mydest))
@@ -5819,9 +5819,9 @@ class dblink:
msg.append(
_("Installation of a regular file is blocked by a directory:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(_("This file will be merged with a different name:"))
- msg.append(" '%s'" % newdest)
+ msg.append(f" '{newdest}'")
msg.append("")
self._eerror("preinst", msg)
mydest = newdest
@@ -5866,7 +5866,7 @@ class dblink:
mtime_ns=mymtime,
)
)
- showMessage("{} {}\n".format(zing, mydest))
+ showMessage(f"{zing} {mydest}\n")
else:
# we are merging a fifo or device node
zing = "!!!"
@@ -5980,10 +5980,10 @@ class dblink:
if md5_digest is not None:
fields.append(md5_digest)
elif symlink_target is not None:
- fields.append("-> {}".format(symlink_target))
+ fields.append(f"-> {symlink_target}")
if mtime_ns is not None:
fields.append(str(mtime_ns // 1000000000))
- return "{}\n".format(" ".join(fields))
+ return f"{' '.join(fields)}\n"
def _merged_path(self, path, lstatobj, exists=True):
previous_path = self._device_path_map.get(lstatobj.st_dev)
@@ -6193,7 +6193,7 @@ class dblink:
errors="backslashreplace",
) as f:
for x in mylist:
- f.write("%s\n" % x)
+ f.write(f"{x}\n")
def isregular(self):
"Is this a regular package (does it have a CATEGORY file? A dblink can be virtual *and* regular)"
@@ -6229,7 +6229,7 @@ class dblink:
trees = QueryCommand.get_db()[self.settings["EROOT"]]
bintree = trees["bintree"]
- for binpkg in reversed(bintree.dbapi.match("={}".format(backup_dblink.mycpv))):
+ for binpkg in reversed(bintree.dbapi.match(f"={backup_dblink.mycpv}")):
if binpkg.build_time == build_time:
return os.EX_OK
@@ -6269,7 +6269,7 @@ class dblink:
args=[
portage._python_interpreter,
quickpkg_binary,
- "={}".format(backup_dblink.mycpv),
+ f"={backup_dblink.mycpv}",
],
background=background,
env=env,
@@ -6396,12 +6396,12 @@ def write_contents(contents, root, f):
relative_filename = filename[root_len:]
if entry_type == "obj":
entry_type, mtime, md5sum = entry_data
- line = "{} {} {} {}\n".format(entry_type, relative_filename, md5sum, mtime)
+ line = f"{entry_type} {relative_filename} {md5sum} {mtime}\n"
elif entry_type == "sym":
entry_type, mtime, link = entry_data
- line = "{} {} -> {} {}\n".format(entry_type, relative_filename, link, mtime)
+ line = f"{entry_type} {relative_filename} -> {link} {mtime}\n"
else: # dir, dev, fif
- line = "{} {}\n".format(entry_type, relative_filename)
+ line = f"{entry_type} {relative_filename}\n"
f.write(line)
@@ -6449,7 +6449,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None, xattrs=Fals
if path.startswith(root):
arcname = "./" + path[len(root) :]
else:
- raise ValueError("invalid root argument: '%s'" % root)
+ raise ValueError(f"invalid root argument: '{root}'")
live_path = path
if (
"dir" == contents_type
diff --git a/lib/portage/debug.py b/lib/portage/debug.py
index ed47e72b5..b7106b799 100644
--- a/lib/portage/debug.py
+++ b/lib/portage/debug.py
@@ -69,12 +69,12 @@ class trace_handler:
my_repr = repr(arg)
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
- return "value=%s " % my_repr
+ return f"value={my_repr} "
if "exception" == event:
my_repr = repr(arg[1])
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
- return "type={} value={} ".format(arg[0], my_repr)
+ return f"type={arg[0]} value={my_repr} "
return ""
diff --git a/lib/portage/dep/__init__.py b/lib/portage/dep/__init__.py
index b8f249420..d48d5653c 100644
--- a/lib/portage/dep/__init__.py
+++ b/lib/portage/dep/__init__.py
@@ -461,9 +461,9 @@ def paren_enclose(mylist, unevaluated_atom=False, opconvert=False):
for x in mylist:
if isinstance(x, list):
if opconvert and x and x[0] == "||":
- mystrparts.append("{} ( {} )".format(x[0], paren_enclose(x[1:])))
+ mystrparts.append(f"{x[0]} ( {paren_enclose(x[1:])} )")
else:
- mystrparts.append("( %s )" % paren_enclose(x))
+ mystrparts.append(f"( {paren_enclose(x)} )")
else:
if unevaluated_atom:
x = getattr(x, "unevaluated_atom", x)
@@ -1163,10 +1163,10 @@ class _use_dep:
def __str__(self):
if not self.tokens:
return ""
- return "[{}]".format(",".join(self.tokens))
+ return f"[{','.join(self.tokens)}]"
def __repr__(self):
- return "portage.dep._use_dep(%s)" % repr(self.tokens)
+ return f"portage.dep._use_dep({repr(self.tokens)})"
def evaluate_conditionals(self, use):
"""
@@ -1670,12 +1670,7 @@ class Atom(str):
if not isinstance(eapi, str):
raise TypeError(
"expected eapi argument of "
- + "%s, got %s: %s"
- % (
- str,
- type(eapi),
- eapi,
- )
+ + f"{str}, got {type(eapi)}: {eapi}"
)
if self.slot and not eapi_attrs.slot_deps:
raise InvalidAtom(
@@ -1761,7 +1756,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
atom += _repo_separator + repo
@@ -1794,7 +1789,7 @@ class Atom(str):
False otherwise.
"""
if not isinstance(other, Atom):
- raise TypeError("expected {}, got {}".format(Atom, type(other)))
+ raise TypeError(f"expected {Atom}, got {type(other)}")
if self == other:
return True
@@ -1828,7 +1823,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use.evaluate_conditionals(use)
@@ -1861,7 +1856,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use.violated_conditionals(other_use, is_valid_flag, parent_use)
@@ -1882,7 +1877,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use._eval_qa_conditionals(use_mask, use_force)
diff --git a/lib/portage/dep/_dnf.py b/lib/portage/dep/_dnf.py
index 21de5344d..d9ee10bf2 100644
--- a/lib/portage/dep/_dnf.py
+++ b/lib/portage/dep/_dnf.py
@@ -24,7 +24,7 @@ def dnf_convert(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in {}".format(dep_struct)
+ ), f"Normalization error, nested conjunction found in {dep_struct}"
if any(isinstance(element, list) for element in x):
x_dnf = ["||"]
for element in x[1:]:
@@ -34,14 +34,10 @@ def dnf_convert(dep_struct):
# must be a conjunction.
assert (
element
- ), "Normalization error, empty conjunction found in {}".format(
- x
- )
+ ), f"Normalization error, empty conjunction found in {x}"
assert (
element[0] != "||"
- ), "Normalization error, nested disjunction found in {}".format(
- x
- )
+ ), f"Normalization error, nested disjunction found in {x}"
element = dnf_convert(element)
if contains_disjunction(element):
assert (
@@ -93,9 +89,7 @@ def contains_disjunction(dep_struct):
is_disjunction = dep_struct and dep_struct[0] == "||"
for x in dep_struct:
if isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in {}".format(
- dep_struct,
- )
+ assert x, f"Normalization error, empty conjunction found in {dep_struct}"
if x[0] == "||":
return True
if is_disjunction and contains_disjunction(x):
diff --git a/lib/portage/dep/_slot_operator.py b/lib/portage/dep/_slot_operator.py
index 6e59554a8..b5ffc61eb 100644
--- a/lib/portage/dep/_slot_operator.py
+++ b/lib/portage/dep/_slot_operator.py
@@ -106,10 +106,7 @@ def _eval_deps(dep_struct, vardbs):
except (KeyError, InvalidData):
pass
else:
- slot_part = "{}/{}=".format(
- best_version.slot,
- best_version.sub_slot,
- )
+ slot_part = f"{best_version.slot}/{best_version.sub_slot}="
x = x.with_slot(slot_part)
dep_struct[i] = x
break
diff --git a/lib/portage/dep/dep_check.py b/lib/portage/dep/dep_check.py
index d8ecc2e6a..2f674b38a 100644
--- a/lib/portage/dep/dep_check.py
+++ b/lib/portage/dep/dep_check.py
@@ -78,17 +78,15 @@ def _expand_new_virtuals(
newsplit.append(x)
continue
elif isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in {}".format(
- mysplit
- )
+ assert x, f"Normalization error, empty conjunction found in {mysplit}"
if is_disjunction:
assert (
x[0] != "||"
- ), "Normalization error, nested disjunction found in {}".format(mysplit)
+ ), f"Normalization error, nested disjunction found in {mysplit}"
else:
assert (
x[0] == "||"
- ), "Normalization error, nested conjunction found in {}".format(mysplit)
+ ), f"Normalization error, nested conjunction found in {mysplit}"
x_exp = _expand_new_virtuals(
x,
edebug,
@@ -109,9 +107,7 @@ def _expand_new_virtuals(
# must be a disjunction.
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in {}".format(
- x_exp,
- )
+ ), f"Normalization error, nested conjunction found in {x_exp}"
newsplit.extend(x[1:])
else:
newsplit.append(x)
@@ -255,7 +251,7 @@ def _expand_new_virtuals(
del mytrees["virt_parent"]
if not mycheck[0]:
- raise ParseError("{}: {} '{}'".format(pkg, mycheck[1], depstring))
+ raise ParseError(f"{pkg}: {mycheck[1]} '{depstring}'")
# Replace the original atom "x" with "virt_atom" which refers
# to the specific version of the virtual whose deps we're
@@ -348,7 +344,7 @@ def dep_zapdeps(
"""
if trees is None:
trees = portage.db
- writemsg("ZapDeps -- %s\n" % (use_binaries), 2)
+ writemsg(f"ZapDeps -- {use_binaries}\n", 2)
if not reduced or unreduced == ["||"] or dep_eval(reduced):
return []
@@ -474,7 +470,7 @@ def dep_zapdeps(
avail_pkg = [replacing]
if avail_pkg:
avail_pkg = avail_pkg[-1] # highest (ascending order)
- avail_slot = Atom("{}:{}".format(atom.cp, avail_pkg.slot))
+ avail_slot = Atom(f"{atom.cp}:{avail_pkg.slot}")
if not avail_pkg:
all_available = False
all_use_satisfied = False
@@ -529,7 +525,7 @@ def dep_zapdeps(
avail_pkg_use = avail_pkg_use[-1]
if avail_pkg_use != avail_pkg:
avail_pkg = avail_pkg_use
- avail_slot = Atom("{}:{}".format(atom.cp, avail_pkg.slot))
+ avail_slot = Atom(f"{atom.cp}:{avail_pkg.slot}")
if not replacing and downgrade_probe is not None and graph is not None:
highest_in_slot = mydbapi_match_pkgs(avail_slot)
@@ -908,7 +904,7 @@ def dep_check(
eapi=eapi,
)
except InvalidDependString as e:
- return [0, "{}".format(e)]
+ return [0, f"{e}"]
if mysplit == []:
# dependencies were reduced to nothing
@@ -933,7 +929,7 @@ def dep_check(
trees=trees,
)
except ParseError as e:
- return [0, "{}".format(e)]
+ return [0, f"{e}"]
dnf = False
if mysettings.local_config: # if not repoman
@@ -946,8 +942,8 @@ def dep_check(
return [0, _("Invalid token")]
writemsg("\n\n\n", 1)
- writemsg("mysplit: %s\n" % (mysplit), 1)
- writemsg("mysplit2: %s\n" % (mysplit2), 1)
+ writemsg(f"mysplit: {mysplit}\n", 1)
+ writemsg(f"mysplit2: {mysplit2}\n", 1)
selected_atoms = dep_zapdeps(
mysplit,
@@ -990,7 +986,7 @@ def _overlap_dnf(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in {}".format(dep_struct)
+ ), f"Normalization error, nested conjunction found in {dep_struct}"
order_map[id(x)] = i
prev_cp = None
for atom in _iter_flatten(x):
diff --git a/lib/portage/dep/soname/SonameAtom.py b/lib/portage/dep/soname/SonameAtom.py
index af651f7ef..327192ba9 100644
--- a/lib/portage/dep/soname/SonameAtom.py
+++ b/lib/portage/dep/soname/SonameAtom.py
@@ -43,14 +43,10 @@ class SonameAtom:
return True
def __repr__(self):
- return "{}('{}', '{}')".format(
- self.__class__.__name__,
- self.multilib_category,
- self.soname,
- )
+ return f"{self.__class__.__name__}('{self.multilib_category}', '{self.soname}')"
def __str__(self):
- return "{}: {}".format(self.multilib_category, self.soname)
+ return f"{self.multilib_category}: {self.soname}"
def match(self, pkg):
"""
diff --git a/lib/portage/dep/soname/multilib_category.py b/lib/portage/dep/soname/multilib_category.py
index bdb6c8c40..57bfcb4b0 100644
--- a/lib/portage/dep/soname/multilib_category.py
+++ b/lib/portage/dep/soname/multilib_category.py
@@ -197,6 +197,6 @@ def compute_multilib_category(elf_header):
if prefix is None or suffix is None:
category = None
else:
- category = "{}_{}".format(prefix, suffix)
+ category = f"{prefix}_{suffix}"
return category
diff --git a/lib/portage/elog/__init__.py b/lib/portage/elog/__init__.py
index 34fab999a..2d835ddb1 100644
--- a/lib/portage/elog/__init__.py
+++ b/lib/portage/elog/__init__.py
@@ -58,7 +58,7 @@ def _combine_logentries(logentries):
for msgtype, msgcontent in logentries[phase]:
if previous_type != msgtype:
previous_type = msgtype
- rValue.append("{}: {}".format(msgtype, phase))
+ rValue.append(f"{msgtype}: {phase}")
if isinstance(msgcontent, str):
rValue.append(msgcontent.rstrip("\n"))
else:
@@ -199,8 +199,8 @@ def elog_process(cpv, mysettings, phasefilter=None):
)
% str(s)
)
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
except AlarmSignal:
- writemsg("Timeout in elog_process for system '%s'\n" % s, noiselevel=-1)
+ writemsg(f"Timeout in elog_process for system '{s}'\n", noiselevel=-1)
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
diff --git a/lib/portage/elog/mod_mail.py b/lib/portage/elog/mod_mail.py
index 22083ac5c..47293afc9 100644
--- a/lib/portage/elog/mod_mail.py
+++ b/lib/portage/elog/mod_mail.py
@@ -41,4 +41,4 @@ def process(mysettings, key, logentries, fulltext):
try:
portage.mail.send_mail(mysettings, mymessage)
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
diff --git a/lib/portage/elog/mod_mail_summary.py b/lib/portage/elog/mod_mail_summary.py
index 17c123d4e..cd356911b 100644
--- a/lib/portage/elog/mod_mail_summary.py
+++ b/lib/portage/elog/mod_mail_summary.py
@@ -79,7 +79,7 @@ def _finalize(mysettings, items):
"process %(pid)d on host %(host)s:\n"
) % {"pid": portage.getpid(), "host": socket.getfqdn()}
for key in items:
- mybody += "- %s\n" % key
+ mybody += f"- {key}\n"
mymessage = portage.mail.create_message(
myfrom, myrecipient, mysubject, mybody, attachments=list(items.values())
@@ -97,6 +97,6 @@ def _finalize(mysettings, items):
"Timeout in finalize() for elog system 'mail_summary'\n", noiselevel=-1
)
except PortageException as e:
- writemsg("{}\n".format(e), noiselevel=-1)
+ writemsg(f"{e}\n", noiselevel=-1)
return
diff --git a/lib/portage/elog/mod_save.py b/lib/portage/elog/mod_save.py
index e526b331f..0cd49afed 100644
--- a/lib/portage/elog/mod_save.py
+++ b/lib/portage/elog/mod_save.py
@@ -64,7 +64,7 @@ def process(mysettings, key, logentries, fulltext):
) as elogfile:
elogfile.write(_unicode_decode(fulltext))
except OSError as e:
- func_call = "open('%s', 'w')" % elogfilename
+ func_call = f"open('{elogfilename}', 'w')"
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
elif e.errno == errno.EPERM:
diff --git a/lib/portage/elog/mod_save_summary.py b/lib/portage/elog/mod_save_summary.py
index f2913da5b..d2d10f475 100644
--- a/lib/portage/elog/mod_save_summary.py
+++ b/lib/portage/elog/mod_save_summary.py
@@ -47,7 +47,7 @@ def process(mysettings, key, logentries, fulltext):
errors="backslashreplace",
)
except OSError as e:
- func_call = "open('%s', 'a')" % elogfilename
+ func_call = f"open('{elogfilename}', 'a')"
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
elif e.errno == errno.EPERM:
diff --git a/lib/portage/elog/mod_syslog.py b/lib/portage/elog/mod_syslog.py
index 6e69a946a..b2e2583b4 100644
--- a/lib/portage/elog/mod_syslog.py
+++ b/lib/portage/elog/mod_syslog.py
@@ -27,6 +27,6 @@ def process(mysettings, key, logentries, fulltext):
if isinstance(msgcontent, str):
msgcontent = [msgcontent]
for line in msgcontent:
- line = "{}: {}: {}".format(key, phase, line)
+ line = f"{key}: {phase}: {line}"
syslog.syslog(_pri[msgtype], line.rstrip("\n"))
syslog.closelog()
diff --git a/lib/portage/emaint/main.py b/lib/portage/emaint/main.py
index a25701303..8f4743af9 100644
--- a/lib/portage/emaint/main.py
+++ b/lib/portage/emaint/main.py
@@ -72,30 +72,30 @@ def usage(module_controller):
_usage += "\n\n"
for line in textwrap.wrap(desc, 65):
- _usage += "%s\n" % line
+ _usage += f"{line}\n"
_usage += "\nCommands:\n"
- _usage += " %s" % "all".ljust(15) + "Perform all supported commands\n"
+ _usage += f" {'all'.ljust(15)}" + "Perform all supported commands\n"
textwrap.subsequent_indent = " ".ljust(17)
for mod in module_controller.module_names:
desc = textwrap.wrap(module_controller.get_description(mod), 65)
- _usage += " {}{}\n".format(mod.ljust(15), desc[0])
+ _usage += f" {mod.ljust(15)}{desc[0]}\n"
for d in desc[1:]:
- _usage += " {}{}\n".format(" ".ljust(15), d)
+ _usage += f" {' '.ljust(15)}{d}\n"
return _usage
def module_opts(module_controller, module):
- _usage = " %s module options:\n" % module
+ _usage = f" {module} module options:\n"
opts = module_controller.get_func_descriptions(module)
if opts == {}:
opts = DEFAULT_OPTIONS
for opt in sorted(opts):
optd = opts[opt]
if "short" in optd:
- opto = " {}, {}".format(optd["short"], optd["long"])
+ opto = f" {optd['short']}, {optd['long']}"
else:
- opto = " {}".format(optd["long"])
- _usage += "{} {}\n".format(opto.ljust(15), optd["help"])
+ opto = f" {optd['long']}"
+ _usage += f"{opto.ljust(15)} {optd['help']}\n"
_usage += "\n"
return _usage
@@ -195,7 +195,7 @@ def emaint_main(myargv):
if len(args) != 1:
parser.error("Incorrect number of arguments")
if args[0] not in module_names:
- parser.error("%s target is not a known target" % args[0])
+ parser.error(f"{args[0]} target is not a known target")
check_opt = None
func = status = long_action = None
@@ -206,7 +206,7 @@ def emaint_main(myargv):
if opt.status and getattr(options, opt.target, False):
if long_action is not None:
parser.error(
- "--{} and {} are exclusive options".format(long_action, opt.long)
+ f"--{long_action} and {opt.long} are exclusive options"
)
status = opt.status
func = opt.func
@@ -228,8 +228,7 @@ def emaint_main(myargv):
tasks = [module_controller.get_class(args[0])]
else:
portage.util.writemsg(
- "\nERROR: module '%s' does not have option '--%s'\n\n"
- % (args[0], long_action),
+ f"\nERROR: module '{args[0]}' does not have option '--{long_action}'\n\n",
noiselevel=-1,
)
portage.util.writemsg(module_opts(module_controller, args[0]), noiselevel=-1)
diff --git a/lib/portage/emaint/modules/binhost/binhost.py b/lib/portage/emaint/modules/binhost/binhost.py
index 9d924f7e1..e8c4e7684 100644
--- a/lib/portage/emaint/modules/binhost/binhost.py
+++ b/lib/portage/emaint/modules/binhost/binhost.py
@@ -86,9 +86,9 @@ class BinhostHandler:
missing.append(cpv)
if onProgress:
onProgress(maxval, i + 1)
- errors = ["'%s' is not in Packages" % cpv for cpv in missing]
+ errors = [f"'{cpv}' is not in Packages" for cpv in missing]
for cpv in stale:
- errors.append("'%s' is not in the repository" % cpv)
+ errors.append(f"'{cpv}' is not in the repository")
if errors:
return (False, errors)
return (True, None)
@@ -158,7 +158,7 @@ class BinhostHandler:
bintree._eval_use_flags(cpv, d)
except portage.exception.InvalidDependString:
writemsg(
- "!!! Invalid binary package: '%s'\n" % bintree.getname(cpv),
+ f"!!! Invalid binary package: '{bintree.getname(cpv)}'\n",
noiselevel=-1,
)
else:
diff --git a/lib/portage/emaint/modules/config/config.py b/lib/portage/emaint/modules/config/config.py
index 1129e9d91..8a4d66224 100644
--- a/lib/portage/emaint/modules/config/config.py
+++ b/lib/portage/emaint/modules/config/config.py
@@ -33,7 +33,7 @@ class CleanConfig:
keys = sorted(configs)
for config in keys:
if not os.path.exists(config):
- messages.append(" %s" % config)
+ messages.append(f" {config}")
if onProgress:
onProgress(maxval, i + 1)
i += 1
@@ -61,7 +61,7 @@ class CleanConfig:
if not os.path.exists(full_path):
modified = True
configs.pop(config)
- messages.append(" %s" % config)
+ messages.append(f" {config}")
if onProgress:
onProgress(maxval, i + 1)
i += 1
diff --git a/lib/portage/emaint/modules/logs/logs.py b/lib/portage/emaint/modules/logs/logs.py
index 26ffcd368..9bb99d530 100644
--- a/lib/portage/emaint/modules/logs/logs.py
+++ b/lib/portage/emaint/modules/logs/logs.py
@@ -63,7 +63,7 @@ class CleanLogs:
clean_cmd.remove("-mtime")
clean_cmd.pop(i)
else:
- clean_cmd[clean_cmd.index("-mtime") + 1] = "+%s" % str(num_of_days)
+ clean_cmd[clean_cmd.index("-mtime") + 1] = f"+{str(num_of_days)}"
if pretend:
if "-delete" in clean_cmd:
clean_cmd.remove("-delete")
@@ -98,7 +98,7 @@ class CleanLogs:
if rval in ERROR_MESSAGES:
msg.append(ERROR_MESSAGES[rval])
else:
- msg.append("PORTAGE_LOGDIR_CLEAN command returned %s" % rval)
+ msg.append(f"PORTAGE_LOGDIR_CLEAN command returned {rval}")
msg.append(
"See the make.conf(5) man page for "
"PORTAGE_LOGDIR_CLEAN usage instructions."
diff --git a/lib/portage/emaint/modules/merges/merges.py b/lib/portage/emaint/modules/merges/merges.py
index a45314cce..8b9ccbf0e 100644
--- a/lib/portage/emaint/modules/merges/merges.py
+++ b/lib/portage/emaint/modules/merges/merges.py
@@ -32,7 +32,7 @@ class TrackingFile:
@type failed_pkgs: dict
"""
tracking_path = self._tracking_path
- lines = ["{} {}".format(pkg, mtime) for pkg, mtime in failed_pkgs.items()]
+ lines = [f"{pkg} {mtime}" for pkg, mtime in failed_pkgs.items()]
portage.util.write_atomic(tracking_path, "\n".join(lines))
def load(self):
@@ -163,14 +163,14 @@ class MergesHandler:
portdb = portage.db[portage.root]["porttree"].dbapi
for failed_pkg in failed_pkgs:
# validate pkg name
- pkg_name = "%s" % failed_pkg.replace(MERGING_IDENTIFIER, "")
- pkg_atom = "=%s" % pkg_name
+ pkg_name = f"{failed_pkg.replace(MERGING_IDENTIFIER, '')}"
+ pkg_atom = f"={pkg_name}"
if not isvalidatom(pkg_atom):
- pkg_invalid_entries.add("'%s' is an invalid package atom." % pkg_atom)
+ pkg_invalid_entries.add(f"'{pkg_atom}' is an invalid package atom.")
if not portdb.cpv_exists(pkg_name):
pkg_invalid_entries.add(
- "'%s' does not exist in the ebuild repository." % pkg_name
+ f"'{pkg_name}' does not exist in the ebuild repository."
)
pkg_atoms.add(pkg_atom)
@@ -218,9 +218,9 @@ class MergesHandler:
if output:
results.append(output)
if proc.returncode != os.EX_OK:
- emerge_status = "Failed to emerge '%s'" % (" ".join(pkg_atoms))
+ emerge_status = f"Failed to emerge '{' '.join(pkg_atoms)}'"
else:
- emerge_status = "Successfully emerged '%s'" % (" ".join(pkg_atoms))
+ emerge_status = f"Successfully emerged '{' '.join(pkg_atoms)}'"
results.append(emerge_status)
return results
@@ -231,7 +231,7 @@ class MergesHandler:
errors = []
for pkg, mtime in failed_pkgs.items():
mtime_str = time.ctime(int(mtime))
- errors.append("'{}' failed to merge on '{}'".format(pkg, mtime_str))
+ errors.append(f"'{pkg}' failed to merge on '{mtime_str}'")
if errors:
return (False, errors)
return (True, None)
@@ -252,7 +252,7 @@ class MergesHandler:
try:
self._tracking_file.save(failed_pkgs)
except OSError as ex:
- errors = ["Unable to save failed merges to tracking file: %s\n" % str(ex)]
+ errors = [f"Unable to save failed merges to tracking file: {str(ex)}\n"]
errors.append(", ".join(sorted(failed_pkgs)))
return (False, errors)
self._remove_failed_dirs(failed_pkgs)
@@ -261,13 +261,13 @@ class MergesHandler:
)
# list any new failed merges
for pkg in sorted(self._scan()):
- results.append("'%s' still found as a failed merge." % pkg)
+ results.append(f"'{pkg}' still found as a failed merge.")
# reload config and remove successful packages from tracking file
portage._reset_legacy_globals()
vardb = portage.db[portage.root]["vartree"].dbapi
still_failed_pkgs = {}
for pkg, mtime in failed_pkgs.items():
- pkg_name = "%s" % pkg.replace(MERGING_IDENTIFIER, "")
+ pkg_name = f"{pkg.replace(MERGING_IDENTIFIER, '')}"
if not vardb.cpv_exists(pkg_name):
still_failed_pkgs[pkg] = mtime
self._tracking_file.save(still_failed_pkgs)
diff --git a/lib/portage/emaint/modules/move/move.py b/lib/portage/emaint/modules/move/move.py
index 5c2db5ffa..328f3b36a 100644
--- a/lib/portage/emaint/modules/move/move.py
+++ b/lib/portage/emaint/modules/move/move.py
@@ -82,13 +82,13 @@ class MoveHandler:
# If this update has already been applied to the same
# package build then silently continue.
for maybe_applied in match(
- "={}".format(cpv.replace(cpv.cp, str(newcp), 1))
+ f"={cpv.replace(cpv.cp, str(newcp), 1)}"
):
if maybe_applied.build_time == build_time:
break
else:
errors.append(
- "'{}' moved to '{}'".format(cpv, newcp)
+ f"'{cpv}' moved to '{newcp}'"
)
elif update_cmd[0] == "slotmove":
pkg, origslot, newslot = update_cmd[1:]
@@ -100,8 +100,7 @@ class MoveHandler:
continue
if repo_match(cpv.repo):
errors.append(
- "'%s' slot moved from '%s' to '%s'"
- % (cpv, origslot, newslot)
+ f"'{cpv}' slot moved from '{origslot}' to '{newslot}'"
)
if onProgress:
onProgress(0, 0)
@@ -135,7 +134,7 @@ class MoveHandler:
continue
metadata_updates = portage.update_dbentries(updates, metadata, parent=pkg)
if metadata_updates:
- errors.append("'%s' has outdated metadata" % cpv)
+ errors.append(f"'{cpv}' has outdated metadata")
if onProgress:
onProgress(maxval, i + 1)
diff --git a/lib/portage/emaint/modules/resume/resume.py b/lib/portage/emaint/modules/resume/resume.py
index 1203519c0..c80294f59 100644
--- a/lib/portage/emaint/modules/resume/resume.py
+++ b/lib/portage/emaint/modules/resume/resume.py
@@ -26,14 +26,14 @@ class CleanResume:
if d is None:
continue
if not isinstance(d, dict):
- messages.append("unrecognized resume list: '%s'" % k)
+ messages.append(f"unrecognized resume list: '{k}'")
continue
mergelist = d.get("mergelist")
if mergelist is None or not hasattr(mergelist, "__len__"):
- messages.append("unrecognized resume list: '%s'" % k)
+ messages.append(f"unrecognized resume list: '{k}'")
continue
messages.append(
- "resume list '%s' contains %d packages" % (k, len(mergelist))
+ f"resume list '{k}' contains {len(mergelist)} packages"
)
finally:
if onProgress:
diff --git a/lib/portage/emaint/modules/sync/sync.py b/lib/portage/emaint/modules/sync/sync.py
index 735e7aa00..fe68ac995 100644
--- a/lib/portage/emaint/modules/sync/sync.py
+++ b/lib/portage/emaint/modules/sync/sync.py
@@ -359,8 +359,7 @@ class SyncRepos:
messages = []
for rval in rvals:
messages.append(
- "Action: %s for repo: %s, returned code = %s"
- % (action, rval[0], rval[1])
+ f"Action: {action} for repo: {rval[0]}, returned code = {rval[1]}"
)
return messages
diff --git a/lib/portage/emaint/modules/world/world.py b/lib/portage/emaint/modules/world/world.py
index b0ddf427e..edb5a0f0a 100644
--- a/lib/portage/emaint/modules/world/world.py
+++ b/lib/portage/emaint/modules/world/world.py
@@ -64,8 +64,8 @@ class WorldHandler:
self._check_world(onProgress)
errors = []
if self.found:
- errors += ["'%s' is not a valid atom" % x for x in self.invalid]
- errors += ["'%s' is not installed" % x for x in self.not_installed]
+ errors += [f"'{x}' is not a valid atom" for x in self.invalid]
+ errors += [f"'{x}' is not installed" for x in self.not_installed]
else:
errors.append(self.world_file + " could not be opened for reading")
if errors:
@@ -87,7 +87,7 @@ class WorldHandler:
world_set.replace(self.okay)
except portage.exception.PortageException:
errors.append(
- "%s could not be opened for writing" % self.world_file
+ f"{self.world_file} could not be opened for writing"
)
if errors:
return (False, errors)
diff --git a/lib/portage/getbinpkg.py b/lib/portage/getbinpkg.py
index c35a8fa5a..135cd1310 100644
--- a/lib/portage/getbinpkg.py
+++ b/lib/portage/getbinpkg.py
@@ -616,7 +616,7 @@ def dir_get_metadata(
except tuple(_all_errors) as e:
# ftplib.FTP(host) can raise errors like this:
# socket.error: (111, 'Connection refused')
- sys.stderr.write("!!! {}\n".format(e))
+ sys.stderr.write(f"!!! {e}\n")
return {}
out = sys.stdout
@@ -688,7 +688,7 @@ def dir_get_metadata(
mytempfile.seek(0)
data = mytempfile.read()
except ValueError as e:
- sys.stderr.write("--- %s\n" % str(e))
+ sys.stderr.write(f"--- {str(e)}\n")
if trynum < 3:
sys.stderr.write(_("Retrying...\n"))
sys.stderr.flush()
diff --git a/lib/portage/gpkg.py b/lib/portage/gpkg.py
index 34c9bf845..23f6e0c61 100644
--- a/lib/portage/gpkg.py
+++ b/lib/portage/gpkg.py
@@ -1375,7 +1375,7 @@ class gpkg:
if path.startswith(root_dir):
arcname = "image/" + path[len(root_dir) :]
else:
- raise ValueError("invalid root argument: '%s'" % root_dir)
+ raise ValueError(f"invalid root argument: '{root_dir}'")
live_path = path
if (
"dir" == contents_type
@@ -1639,7 +1639,7 @@ class gpkg:
for f in container_files:
if f in container_files_unique:
raise InvalidBinaryPackageFormat(
- "Duplicate file %s exist, potential attack?" % f
+ f"Duplicate file {f} exist, potential attack?"
)
container_files_unique.append(f)
@@ -1817,7 +1817,7 @@ class gpkg:
compressor = _compressors[compression]
if mode not in compressor:
- raise InvalidCompressionMethod("{}: {}".format(compression, mode))
+ raise InvalidCompressionMethod(f"{compression}: {mode}")
if mode == "compress" and (
self.settings.get(f"BINPKG_COMPRESS_FLAGS_{compression.upper()}", None)
@@ -1960,7 +1960,7 @@ class gpkg:
try:
d = _unicode_decode(d, encoding=_encodings["fs"], errors="strict")
except UnicodeDecodeError as err:
- writemsg(colorize("BAD", "\n*** %s\n\n" % err), noiselevel=-1)
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
raise
d = os.path.join(parent, d)
@@ -1985,7 +1985,7 @@ class gpkg:
try:
f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
except UnicodeDecodeError as err:
- writemsg(colorize("BAD", "\n*** %s\n\n" % err), noiselevel=-1)
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
raise
filename_length = len(
@@ -2065,7 +2065,7 @@ class gpkg:
try:
path = _unicode_decode(path, encoding=_encodings["fs"], errors="strict")
except UnicodeDecodeError as err:
- writemsg(colorize("BAD", "\n*** %s\n\n" % err), noiselevel=-1)
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
raise
d, f = os.path.split(path)
diff --git a/lib/portage/locks.py b/lib/portage/locks.py
index baf17d7c6..45840faac 100644
--- a/lib/portage/locks.py
+++ b/lib/portage/locks.py
@@ -343,7 +343,7 @@ def _lockfile_iteration(
% lockfilename
)
writemsg(
- "\n!!! {}: {}\n".format(context_desc, e), noiselevel=-1
+ f"\n!!! {context_desc}: {e}\n", noiselevel=-1
)
time.sleep(_HARDLINK_POLL_LATENCY)
@@ -625,7 +625,7 @@ def hardlink_lockfile(
if e.errno in (errno.ENOENT, errno.ESTALE):
pass
else:
- func_call = "unlink('%s')" % myhardlock
+ func_call = f"unlink('{myhardlock}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
@@ -638,7 +638,7 @@ def hardlink_lockfile(
try:
myfd = os.open(lockfilename, os.O_CREAT | os.O_RDWR, 0o660)
except OSError as e:
- func_call = "open('%s')" % lockfilename
+ func_call = f"open('{lockfilename}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
@@ -686,7 +686,7 @@ def hardlink_lockfile(
try:
os.link(lockfilename, myhardlock)
except OSError as e:
- func_call = "link('{}', '{}')".format(lockfilename, myhardlock)
+ func_call = f"link('{lockfilename}', '{myhardlock}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
diff --git a/lib/portage/output.py b/lib/portage/output.py
index 9d8993d5f..64fbb97d1 100644
--- a/lib/portage/output.py
+++ b/lib/portage/output.py
@@ -284,7 +284,7 @@ def xtermTitle(mystr, raw=False):
if len(mystr) > _max_xtermTitle_len:
mystr = mystr[:_max_xtermTitle_len]
if not raw:
- mystr = "\x1b]0;%s\x07" % mystr
+ mystr = f"\x1b]0;{mystr}\x07"
# avoid potential UnicodeEncodeError
mystr = _unicode_encode(
@@ -386,7 +386,7 @@ def colormap():
"QAWARN",
"WARN",
):
- mycolors.append("PORTAGE_COLOR_{}=$'{}'".format(c, style_to_ansi_code(c)))
+ mycolors.append(f"PORTAGE_COLOR_{c}=$'{style_to_ansi_code(c)}'")
return "\n".join(mycolors)
@@ -811,7 +811,7 @@ class ProgressBar:
"%s" % self._label if self._label else "",
)
if len(self._desc) > self._desc_max_length: # truncate if too long
- self._desc = "%s..." % self._desc[: self._desc_max_length - 3]
+ self._desc = f"{self._desc[:self._desc_max_length - 3]}..."
if len(self._desc):
self._desc = self._desc.ljust(self._desc_max_length)
@@ -912,7 +912,7 @@ class TermProgressBar(ProgressBar):
percentage = 100 * curval // maxval
max_bar_width = bar_space - 1
_percent = ("%d%% " % percentage).rjust(percentage_str_width)
- image = "{}{}".format(self._desc, _percent)
+ image = f"{self._desc}{_percent}"
if cols < min_columns:
return image
@@ -956,7 +956,7 @@ def _init(config_root="/"):
try:
_parse_color_map(
config_root=config_root,
- onerror=lambda e: writemsg("%s\n" % str(e), noiselevel=-1),
+ onerror=lambda e: writemsg(f"{str(e)}\n", noiselevel=-1),
)
except FileNotFound:
pass
@@ -964,7 +964,7 @@ def _init(config_root="/"):
writemsg(_("Permission denied: '%s'\n") % str(e), noiselevel=-1)
del e
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
del e
diff --git a/lib/portage/package/ebuild/_config/LocationsManager.py b/lib/portage/package/ebuild/_config/LocationsManager.py
index 11f383ad8..6c54b8056 100644
--- a/lib/portage/package/ebuild/_config/LocationsManager.py
+++ b/lib/portage/package/ebuild/_config/LocationsManager.py
@@ -160,7 +160,7 @@ class LocationsManager:
_("!!! Unable to parse profile: '%s'\n") % self.profile_path,
noiselevel=-1,
)
- writemsg("!!! ParseError: %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! ParseError: {str(e)}\n", noiselevel=-1)
self.profiles = []
self.profiles_complex = []
diff --git a/lib/portage/package/ebuild/_ipc/QueryCommand.py b/lib/portage/package/ebuild/_ipc/QueryCommand.py
index def66c0be..66463e194 100644
--- a/lib/portage/package/ebuild/_ipc/QueryCommand.py
+++ b/lib/portage/package/ebuild/_ipc/QueryCommand.py
@@ -53,7 +53,7 @@ class QueryCommand(IpcCommand):
root = normalize_path(root or os.sep).rstrip(os.sep) + os.sep
if root not in db:
- return ("", "{}: Invalid ROOT: {}\n".format(cmd, root), 3)
+ return ("", f"{cmd}: Invalid ROOT: {root}\n", 3)
portdb = db[root]["porttree"].dbapi
vardb = db[root]["vartree"].dbapi
@@ -63,12 +63,12 @@ class QueryCommand(IpcCommand):
try:
atom = Atom(args[0], allow_repo=allow_repo)
except InvalidAtom:
- return ("", "{}: Invalid atom: {}\n".format(cmd, args[0]), 2)
+ return ("", f"{cmd}: Invalid atom: {args[0]}\n", 2)
try:
atom = Atom(args[0], allow_repo=allow_repo, eapi=eapi)
except InvalidAtom as e:
- warnings.append("QA Notice: {}: {}".format(cmd, e))
+ warnings.append(f"QA Notice: {cmd}: {e}")
use = self.settings.get("PORTAGE_BUILT_USE")
if use is None:
@@ -88,7 +88,7 @@ class QueryCommand(IpcCommand):
return ("", warnings_str, returncode)
if cmd == "best_version":
m = best(vardb.match(atom))
- return ("%s\n" % m, warnings_str, 0)
+ return (f"{m}\n", warnings_str, 0)
if cmd in (
"master_repositories",
"repository_path",
@@ -98,7 +98,7 @@ class QueryCommand(IpcCommand):
):
repo = _repo_name_re.match(args[0])
if repo is None:
- return ("", "{}: Invalid repository: {}\n".format(cmd, args[0]), 2)
+ return ("", f"{cmd}: Invalid repository: {args[0]}\n", 2)
try:
repo = portdb.repositories[args[0]]
except KeyError:
@@ -106,15 +106,15 @@ class QueryCommand(IpcCommand):
if cmd == "master_repositories":
return (
- "%s\n" % " ".join(x.name for x in repo.masters),
+ f"{' '.join(x.name for x in repo.masters)}\n",
warnings_str,
0,
)
if cmd == "repository_path":
- return ("%s\n" % repo.location, warnings_str, 0)
+ return (f"{repo.location}\n", warnings_str, 0)
if cmd == "available_eclasses":
return (
- "%s\n" % " ".join(sorted(repo.eclass_db.eclasses)),
+ f"{' '.join(sorted(repo.eclass_db.eclasses))}\n",
warnings_str,
0,
)
@@ -123,7 +123,7 @@ class QueryCommand(IpcCommand):
eclass = repo.eclass_db.eclasses[args[1]]
except KeyError:
return ("", warnings_str, 1)
- return ("%s\n" % eclass.location, warnings_str, 0)
+ return (f"{eclass.location}\n", warnings_str, 0)
if cmd == "license_path":
paths = reversed(
[
@@ -133,9 +133,9 @@ class QueryCommand(IpcCommand):
)
for path in paths:
if os.path.exists(path):
- return ("%s\n" % path, warnings_str, 0)
+ return (f"{path}\n", warnings_str, 0)
return ("", warnings_str, 1)
- return ("", "Invalid command: %s\n" % cmd, 3)
+ return ("", f"Invalid command: {cmd}\n", 3)
def _elog(self, elog_funcname, lines):
"""
diff --git a/lib/portage/package/ebuild/_metadata_invalid.py b/lib/portage/package/ebuild/_metadata_invalid.py
index b42adbcf2..426ad851f 100644
--- a/lib/portage/package/ebuild/_metadata_invalid.py
+++ b/lib/portage/package/ebuild/_metadata_invalid.py
@@ -27,7 +27,7 @@ def eapi_invalid(self, cpv, repo_name, settings, eapi_var, eapi_parsed, eapi_lin
# None means the assignment was not found, while an
# empty string indicates an (invalid) empty assingment.
msg.append(
- "\tvalid EAPI assignment must" " occur on or before line: %s" % eapi_lineno
+ f"\tvalid EAPI assignment must occur on or before line: {eapi_lineno}"
)
else:
msg.append(
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
index 7bf5dd141..68bb5dce3 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
@@ -38,11 +38,7 @@ class ManifestProcess(ForkProcess):
modified = mf.write(sign=False)
except PermissionDenied as e:
portage.writemsg(
- "!!! %s: %s\n"
- % (
- _("Permission Denied"),
- e,
- ),
+ f"!!! {_('Permission Denied')}: {e}\n",
noiselevel=-1,
)
return 1
diff --git a/lib/portage/package/ebuild/config.py b/lib/portage/package/ebuild/config.py
index 5e59932cf..b605b8e9a 100644
--- a/lib/portage/package/ebuild/config.py
+++ b/lib/portage/package/ebuild/config.py
@@ -133,8 +133,7 @@ def autouse(myvartree, use_cache=1, mysettings=None):
def check_config_instance(test):
if not isinstance(test, config):
raise TypeError(
- "Invalid type for config object: %s (should be %s)"
- % (test.__class__, config)
+ f"Invalid type for config object: {test.__class__} (should be {config})"
)
@@ -146,7 +145,7 @@ def best_from_dict(key, top_dict, key_order, EmptyOnError=1, FullCopy=1, AllowEm
return top_dict[x][key]
if EmptyOnError:
return ""
- raise KeyError("Key not found in list; '%s'" % key)
+ raise KeyError(f"Key not found in list; '{key}'")
def _lazy_iuse_regex(iuse_implicit):
@@ -158,7 +157,7 @@ def _lazy_iuse_regex(iuse_implicit):
# Escape anything except ".*" which is supposed to pass through from
# _get_implicit_iuse().
regex = sorted(re.escape(x) for x in iuse_implicit)
- regex = "^(%s)$" % "|".join(regex)
+ regex = f"^({'|'.join(regex)})$"
regex = regex.replace("\\.\\*", ".*")
return regex
@@ -166,7 +165,7 @@ def _lazy_iuse_regex(iuse_implicit):
class _iuse_implicit_match_cache:
def __init__(self, settings):
self._iuse_implicit_re = re.compile(
- "^(%s)$" % "|".join(settings._get_implicit_iuse())
+ f"^({'|'.join(settings._get_implicit_iuse())})$"
)
self._cache = {}
@@ -553,7 +552,7 @@ class config:
and user_auxdbmodule in self._module_aliases
):
warnings.warn(
- "'{}' is deprecated: {}".format(user_auxdbmodule, modules_file)
+ f"'{user_auxdbmodule}' is deprecated: {modules_file}"
)
self.modules["default"] = {
@@ -1337,7 +1336,7 @@ class config:
_("!!! Directory initialization failed: '%s'\n") % mydir,
noiselevel=-1,
)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
@property
def _keywords_manager(self):
@@ -2182,7 +2181,7 @@ class config:
"fi; "
"[[ -n ${___PORTAGE_IUSE_HASH[$1]} ]]; "
"}"
- ) % " ".join('["%s"]=1' % x for x in portage_iuse)
+ ) % " ".join(f'["{x}"]=1' for x in portage_iuse)
else:
portage_iuse = self._get_implicit_iuse()
portage_iuse.update(explicit_iuse)
@@ -2300,7 +2299,7 @@ class config:
if k in protected_keys or k in non_user_variables:
writemsg(
"!!! Illegal variable "
- + "'{}' assigned in '{}'\n".format(k, penvfile),
+ + f"'{k}' assigned in '{penvfile}'\n",
noiselevel=-1,
)
elif k in incrementals:
@@ -3183,7 +3182,7 @@ class config:
return ":".join(value)
if mykey == "PORTAGE_GID":
- return "%s" % portage_gid
+ return f"{portage_gid}"
for d in self.lookuplist:
try:
@@ -3256,8 +3255,7 @@ class config:
"set a value; will be thrown away at reset() time"
if not isinstance(myvalue, str):
raise ValueError(
- "Invalid type being used as a value: '%s': '%s'"
- % (str(mykey), str(myvalue))
+ f"Invalid type being used as a value: '{str(mykey)}': '{str(myvalue)}'"
)
# Avoid potential UnicodeDecodeError exceptions later.
diff --git a/lib/portage/package/ebuild/digestcheck.py b/lib/portage/package/ebuild/digestcheck.py
index 3fe64550c..cbd57fb58 100644
--- a/lib/portage/package/ebuild/digestcheck.py
+++ b/lib/portage/package/ebuild/digestcheck.py
@@ -80,7 +80,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
except DigestException as e:
eout.eend(1)
writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
- writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
+ writemsg(f"!!! {e.value[0]}\n", noiselevel=-1)
writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
diff --git a/lib/portage/package/ebuild/digestgen.py b/lib/portage/package/ebuild/digestgen.py
index 7d2f28c96..b7a570da9 100644
--- a/lib/portage/package/ebuild/digestgen.py
+++ b/lib/portage/package/ebuild/digestgen.py
@@ -56,7 +56,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
for myfile in fetchlist_dict[cpv]:
distfiles_map.setdefault(myfile, []).append(cpv)
except InvalidDependString as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
del e
return 0
mytree = os.path.dirname(os.path.dirname(mysettings["O"]))
@@ -171,7 +171,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
# digest does not match.
cmd = colorize(
"INFORM",
- "ebuild --force %s manifest" % os.path.basename(myebuild),
+ f"ebuild --force {os.path.basename(myebuild)} manifest",
)
writemsg(
(
@@ -181,7 +181,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
)
% myfile
)
- + "!!! %s\n" % cmd,
+ + f"!!! {cmd}\n",
noiselevel=-1,
)
return 0
@@ -227,7 +227,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
pv = pkg_key.split("/")[1]
for filename in auto_assumed:
if filename in fetchlist:
- writemsg_stdout(" {}::{}\n".format(pv, filename))
+ writemsg_stdout(f" {pv}::{filename}\n")
return 1
finally:
portage._doebuild_manifest_exempt_depend -= 1
diff --git a/lib/portage/package/ebuild/doebuild.py b/lib/portage/package/ebuild/doebuild.py
index d29451efa..95a29505c 100644
--- a/lib/portage/package/ebuild/doebuild.py
+++ b/lib/portage/package/ebuild/doebuild.py
@@ -642,8 +642,7 @@ def doebuild_environment(
except KeyError as e:
if binpkg_compression:
writemsg(
- "Warning: Invalid or unsupported compression method: %s\n"
- % e.args[0]
+ f"Warning: Invalid or unsupported compression method: {e.args[0]}\n"
)
else:
# Empty BINPKG_COMPRESS disables compression.
@@ -670,8 +669,7 @@ def doebuild_environment(
)[0]
except IndexError as e:
writemsg(
- "Warning: Invalid or unsupported compression method: %s\n"
- % e.args[0]
+ f"Warning: Invalid or unsupported compression method: {e.args[0]}\n"
)
else:
if find_binary(compression_binary) is None:
@@ -858,7 +856,7 @@ def doebuild(
if mydo not in validcommands:
validcommands.sort()
writemsg(
- "!!! doebuild: '%s' is not one of the following valid commands:" % mydo,
+ f"!!! doebuild: '{mydo}' is not one of the following valid commands:",
noiselevel=-1,
)
for vcount in range(len(validcommands)):
@@ -886,7 +884,7 @@ def doebuild(
if mydo not in clean_phases and not os.path.exists(myebuild):
writemsg(
- "!!! doebuild: {} not found for {}\n".format(myebuild, mydo), noiselevel=-1
+ f"!!! doebuild: {myebuild} not found for {mydo}\n", noiselevel=-1
)
return 1
@@ -957,7 +955,7 @@ def doebuild(
except DigestException as e:
out = portage.output.EOutput()
out.eerror(_("Digest verification failed:"))
- out.eerror("%s" % e.value[0])
+ out.eerror(f"{e.value[0]}")
out.eerror(_("Reason: %s") % e.value[1])
out.eerror(_("Got: %s") % e.value[2])
out.eerror(_("Expected: %s") % e.value[3])
@@ -1127,7 +1125,7 @@ def doebuild(
newstuff = True
else:
for x in alist:
- writemsg_stdout(">>> Checking %s's mtime...\n" % x)
+ writemsg_stdout(f">>> Checking {x}'s mtime...\n")
try:
x_st = os.stat(os.path.join(mysettings["DISTDIR"], x))
except OSError:
@@ -1230,9 +1228,7 @@ def doebuild(
else:
vardb = vartree.dbapi
cpv = mysettings.mycpv
- cpv_slot = "{}{}{}".format(
- cpv.cp, portage.dep._slot_separator, cpv.slot
- )
+ cpv_slot = f"{cpv.cp}{portage.dep._slot_separator}{cpv.slot}"
mysettings["REPLACING_VERSIONS"] = " ".join(
{
portage.versions.cpv_getversion(match)
@@ -1301,7 +1297,7 @@ def doebuild(
alist = _parse_uri_map(mysettings.mycpv, metadata, use=use)
aalist = _parse_uri_map(mysettings.mycpv, metadata)
except InvalidDependString as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(_("!!! Invalid SRC_URI for '%s'.\n") % mycpv, noiselevel=-1)
del e
return 1
@@ -1450,7 +1446,7 @@ def doebuild(
)
portage.util.ensure_dirs(parent_dir)
if not os.access(parent_dir, os.W_OK):
- raise PermissionDenied("access('%s', os.W_OK)" % parent_dir)
+ raise PermissionDenied(f"access('{parent_dir}', os.W_OK)")
retval = spawnebuild(
mydo,
actionmap,
@@ -1475,10 +1471,10 @@ def doebuild(
mysettings["PORTAGE_BUILDDIR"], "build-info"
)
build_info = {
- "BINPKGMD5": "%s\n" % pkg._metadata["MD5"],
+ "BINPKGMD5": f"{pkg._metadata['MD5']}\n",
}
if pkg.build_id is not None:
- build_info["BUILD_ID"] = "%s\n" % pkg.build_id
+ build_info["BUILD_ID"] = f"{pkg.build_id}\n"
for k, v in build_info.items():
with open(
_unicode_encode(
@@ -1833,7 +1829,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
if pkg.invalid:
for k, v in pkg.invalid.items():
for msg in v:
- msgs.append(" {}\n".format(msg))
+ msgs.append(f" {msg}\n")
if msgs:
portage.util.writemsg_level(
@@ -1867,7 +1863,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
),
noiselevel=-1,
)
- writemsg(" %s\n" % reduced_noise, noiselevel=-1)
+ writemsg(f" {reduced_noise}\n", noiselevel=-1)
normalized_required_use = " ".join(pkg._metadata["REQUIRED_USE"].split())
if reduced_noise != normalized_required_use:
writemsg(
@@ -1879,7 +1875,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
noiselevel=-1,
)
writemsg(
- " %s\n" % human_readable_required_use(normalized_required_use),
+ f" {human_readable_required_use(normalized_required_use)}\n",
noiselevel=-1,
)
writemsg("\n", noiselevel=-1)
@@ -2076,7 +2072,7 @@ def spawn(
free = True
if mysettings.mycpv is not None:
- keywords["opt_name"] = "[%s]" % mysettings.mycpv
+ keywords["opt_name"] = f"[{mysettings.mycpv}]"
else:
keywords["opt_name"] = "[{}/{}]".format(
mysettings.get("CATEGORY", ""),
@@ -2180,7 +2176,7 @@ def spawnebuild(
if not (mydo == "install" and "noauto" in mysettings.features):
check_file = os.path.join(
- mysettings["PORTAGE_BUILDDIR"], ".%sed" % mydo.rstrip("e")
+ mysettings["PORTAGE_BUILDDIR"], f".{mydo.rstrip('e')}ed"
)
if os.path.exists(check_file):
writemsg_stdout(
@@ -2314,10 +2310,10 @@ def _check_build_log(mysettings, out=None):
qa_configure_opts = qa_configure_opts.split()
if qa_configure_opts:
if len(qa_configure_opts) > 1:
- qa_configure_opts = "|".join("(%s)" % x for x in qa_configure_opts)
- qa_configure_opts = "^(%s)$" % qa_configure_opts
+ qa_configure_opts = "|".join(f"({x})" for x in qa_configure_opts)
+ qa_configure_opts = f"^({qa_configure_opts})$"
else:
- qa_configure_opts = "^%s$" % qa_configure_opts[0]
+ qa_configure_opts = f"^{qa_configure_opts[0]}$"
qa_configure_opts = re.compile(qa_configure_opts)
qa_am_maintainer_mode = []
@@ -2344,10 +2340,10 @@ def _check_build_log(mysettings, out=None):
if qa_am_maintainer_mode:
if len(qa_am_maintainer_mode) > 1:
- qa_am_maintainer_mode = "|".join("(%s)" % x for x in qa_am_maintainer_mode)
- qa_am_maintainer_mode = "^(%s)$" % qa_am_maintainer_mode
+ qa_am_maintainer_mode = "|".join(f"({x})" for x in qa_am_maintainer_mode)
+ qa_am_maintainer_mode = f"^({qa_am_maintainer_mode})$"
else:
- qa_am_maintainer_mode = "^%s$" % qa_am_maintainer_mode[0]
+ qa_am_maintainer_mode = f"^{qa_am_maintainer_mode[0]}$"
qa_am_maintainer_mode = re.compile(qa_am_maintainer_mode)
# Exclude output from dev-libs/yaz-3.0.47 which looks like this:
@@ -2418,8 +2414,8 @@ def _check_build_log(mysettings, out=None):
except (EOFError, zlib.error) as e:
_eerror(
[
- "portage encountered a zlib error: '{}'".format(e),
- "while reading the log file: '%s'" % logfile,
+ f"portage encountered a zlib error: '{e}'",
+ f"while reading the log file: '{logfile}'",
]
)
finally:
@@ -2469,7 +2465,7 @@ def _check_build_log(mysettings, out=None):
if configure_opts_warn:
msg = [_("QA Notice: Unrecognized configure options:")]
msg.append("")
- msg.extend("\t%s" % x for x in configure_opts_warn)
+ msg.extend(f"\t{x}" for x in configure_opts_warn)
_eqawarn(msg)
if make_jobserver:
@@ -2525,7 +2521,7 @@ def _post_src_install_write_metadata(settings):
encoding=_encodings["repo.content"],
errors="strict",
) as f:
- f.write("{:.0f}\n".format(time.time()))
+ f.write(f"{time.time():.0f}\n")
use = frozenset(settings["PORTAGE_USE"].split())
for k in _vdb_use_conditional_keys:
@@ -2563,7 +2559,7 @@ def _post_src_install_write_metadata(settings):
encoding=_encodings["repo.content"],
errors="strict",
) as f:
- f.write("%s\n" % v)
+ f.write(f"{v}\n")
if eapi_attrs.slot_operator:
deps = evaluate_slot_operator_equal_deps(settings, use, QueryCommand.get_db())
@@ -2585,7 +2581,7 @@ def _post_src_install_write_metadata(settings):
encoding=_encodings["repo.content"],
errors="strict",
) as f:
- f.write("%s\n" % v)
+ f.write(f"{v}\n")
def _preinst_bsdflags(mysettings):
@@ -2605,8 +2601,7 @@ def _preinst_bsdflags(mysettings):
% (_shell_quote(mysettings["D"]),)
)
os.system(
- "chflags -R nosunlnk,nouunlnk %s 2>/dev/null"
- % (_shell_quote(mysettings["D"]),)
+ f"chflags -R nosunlnk,nouunlnk {_shell_quote(mysettings['D'])} 2>/dev/null"
)
@@ -2668,10 +2663,10 @@ def _post_src_install_uid_fix(mysettings, out):
qa_desktop_file = qa_desktop_file.split()
if qa_desktop_file:
if len(qa_desktop_file) > 1:
- qa_desktop_file = "|".join("(%s)" % x for x in qa_desktop_file)
- qa_desktop_file = "^(%s)$" % qa_desktop_file
+ qa_desktop_file = "|".join(f"({x})" for x in qa_desktop_file)
+ qa_desktop_file = f"^({qa_desktop_file})$"
else:
- qa_desktop_file = "^%s$" % qa_desktop_file[0]
+ qa_desktop_file = f"^{qa_desktop_file[0]}$"
qa_desktop_file = re.compile(qa_desktop_file)
while True:
@@ -2784,7 +2779,7 @@ def _post_src_install_uid_fix(mysettings, out):
if not fixlafiles_announced:
fixlafiles_announced = True
writemsg("Fixing .la files\n", fd=out)
- writemsg(" %s\n" % fpath[len(destdir) :], fd=out)
+ writemsg(f" {fpath[len(destdir):]}\n", fd=out)
# write_atomic succeeds even in some cases in which
# a normal write might fail due to file permission
# settings on some operating systems such as HP-UX
@@ -2926,10 +2921,10 @@ def _post_src_install_soname_symlinks(mysettings, out):
qa_soname_no_symlink = metadata.get("QA_SONAME_NO_SYMLINK", "").split()
if qa_soname_no_symlink:
if len(qa_soname_no_symlink) > 1:
- qa_soname_no_symlink = "|".join("(%s)" % x for x in qa_soname_no_symlink)
- qa_soname_no_symlink = "^(%s)$" % qa_soname_no_symlink
+ qa_soname_no_symlink = "|".join(f"({x})" for x in qa_soname_no_symlink)
+ qa_soname_no_symlink = f"^({qa_soname_no_symlink})$"
else:
- qa_soname_no_symlink = "^%s$" % qa_soname_no_symlink[0]
+ qa_soname_no_symlink = f"^{qa_soname_no_symlink[0]}$"
qa_soname_no_symlink = re.compile(qa_soname_no_symlink)
libpaths = set(portage.util.getlibpaths(mysettings["ROOT"], env=mysettings))
@@ -3018,7 +3013,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
portage.util.writemsg_level(
- "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
+ f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1
)
continue
@@ -3108,7 +3103,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
if unrecognized_elf_files:
qa_msg = ["QA Notice: Unrecognized ELF file(s):"]
qa_msg.append("")
- qa_msg.extend("\t%s" % str(entry).rstrip() for entry in unrecognized_elf_files)
+ qa_msg.extend(f"\t{str(entry).rstrip()}" for entry in unrecognized_elf_files)
qa_msg.append("")
for line in qa_msg:
eqawarn(line, key=mysettings.mycpv, out=out)
diff --git a/lib/portage/package/ebuild/fetch.py b/lib/portage/package/ebuild/fetch.py
index b72cbc32f..ecc1acdd5 100644
--- a/lib/portage/package/ebuild/fetch.py
+++ b/lib/portage/package/ebuild/fetch.py
@@ -371,9 +371,7 @@ def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
digests = _apply_hash_filter(digests, hash_filter)
if _check_digests(filename, digests, show_errors=show_errors):
eout.ebegin(
- "{} {} ;-)".format(
- os.path.basename(filename), " ".join(sorted(digests))
- )
+ f"{os.path.basename(filename)} {' '.join(sorted(digests))} ;-)"
)
eout.eend(0)
else:
@@ -675,7 +673,7 @@ class MirrorLayoutConfig:
ret = []
for val in self.structure:
if not self.validate_structure(val):
- raise ValueError("Unsupported structure: {}".format(val))
+ raise ValueError(f"Unsupported structure: {val}")
if val[0] == "flat":
ret.append(FlatLayout(*val[1:]))
elif val[0] == "filename-hash":
@@ -713,7 +711,7 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
if ts >= time.time() - 86400:
mirror_conf.deserialize(data)
else:
- tmpfile = ".layout.conf.%s" % urlparse(mirror_url).hostname
+ tmpfile = f".layout.conf.{urlparse(mirror_url).hostname}"
try:
if mirror_url[:1] == "/":
tmpfile = os.path.join(mirror_url, "layout.conf")
@@ -1093,7 +1091,7 @@ def fetch(
writemsg(_("!!! No known mirror by the name: %s\n") % (mirrorname))
else:
writemsg(_("Invalid mirror definition in SRC_URI:\n"), noiselevel=-1)
- writemsg(" %s\n" % (myuri), noiselevel=-1)
+ writemsg(f" {myuri}\n", noiselevel=-1)
else:
if (restrict_fetch and not override_fetch) or force_mirror:
# Only fetch from specific mirrors is allowed.
@@ -1132,7 +1130,7 @@ def fetch(
_ensure_distdir(mysettings, mysettings["DISTDIR"])
except PortageException as e:
if not os.path.isdir(mysettings["DISTDIR"]):
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Directory Not Found: DISTDIR='%s'\n")
% mysettings["DISTDIR"],
@@ -1218,7 +1216,7 @@ def fetch(
vfs_stat = os.statvfs(mysettings["DISTDIR"])
except OSError as e:
writemsg_level(
- "!!! statvfs('{}'): {}\n".format(mysettings["DISTDIR"], e),
+ f"!!! statvfs('{mysettings['DISTDIR']}'): {e}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -1504,7 +1502,7 @@ def fetch(
):
eout = EOutput()
eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
- eout.ebegin("{} size ;-)".format(myfile))
+ eout.ebegin(f"{myfile} size ;-)")
eout.eend(0)
continue
else:
@@ -1556,7 +1554,7 @@ def fetch(
digests = list(digests)
digests.sort()
eout.ebegin(
- "{} {} ;-)".format(myfile, " ".join(digests))
+ f"{myfile} {' '.join(digests)} ;-)"
)
eout.eend(0)
continue # fetch any remaining files
@@ -1736,7 +1734,7 @@ def fetch(
try:
variables["DIGESTS"] = " ".join(
[
- "{}:{}".format(k.lower(), v)
+ f"{k.lower()}:{v}"
for k, v in mydigests[myfile].items()
if k != "size"
]
@@ -1947,8 +1945,7 @@ def fetch(
)
if digests:
eout.ebegin(
- "%s %s ;-)"
- % (myfile, " ".join(sorted(digests)))
+ f"{myfile} {' '.join(sorted(digests))} ;-)"
)
eout.eend(0)
fetched = 2
diff --git a/lib/portage/package/ebuild/getmaskingstatus.py b/lib/portage/package/ebuild/getmaskingstatus.py
index 3a24a37b0..530e338e0 100644
--- a/lib/portage/package/ebuild/getmaskingstatus.py
+++ b/lib/portage/package/ebuild/getmaskingstatus.py
@@ -90,9 +90,9 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
properties = metadata["PROPERTIES"]
restrict = metadata["RESTRICT"]
if not eapi_is_supported(eapi):
- return [_MaskReason("EAPI", "EAPI %s" % eapi)]
+ return [_MaskReason("EAPI", f"EAPI {eapi}")]
if _eapi_is_deprecated(eapi) and not installed:
- return [_MaskReason("EAPI", "EAPI %s" % eapi)]
+ return [_MaskReason("EAPI", f"EAPI {eapi}")]
egroups = settings.configdict["backupenv"].get("ACCEPT_KEYWORDS", "").split()
global_accept_keywords = settings.get("ACCEPT_KEYWORDS", "")
pgroups = global_accept_keywords.split()
@@ -185,7 +185,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
msg.append("in RESTRICT")
rValue.append(_MaskReason("RESTRICT", " ".join(msg)))
except InvalidDependString as e:
- rValue.append(_MaskReason("invalid", "RESTRICT: {}".format(e)))
+ rValue.append(_MaskReason("invalid", f"RESTRICT: {e}"))
# Only show KEYWORDS masks for installed packages
# if they're not masked for any other reason.
diff --git a/lib/portage/package/ebuild/prepare_build_dirs.py b/lib/portage/package/ebuild/prepare_build_dirs.py
index ad73141c5..e39378109 100644
--- a/lib/portage/package/ebuild/prepare_build_dirs.py
+++ b/lib/portage/package/ebuild/prepare_build_dirs.py
@@ -54,7 +54,7 @@ def prepare_build_dirs(myroot=None, settings=None, cleanup=False):
if errno.ENOENT == oe.errno:
pass
elif errno.EPERM == oe.errno:
- writemsg("%s\n" % oe, noiselevel=-1)
+ writemsg(f"{oe}\n", noiselevel=-1)
writemsg(
_("Operation Not Permitted: rmtree('%s')\n") % clean_dir,
noiselevel=-1,
@@ -72,7 +72,7 @@ def prepare_build_dirs(myroot=None, settings=None, cleanup=False):
if errno.EEXIST == oe.errno:
pass
elif errno.EPERM == oe.errno:
- writemsg("%s\n" % oe, noiselevel=-1)
+ writemsg(f"{oe}\n", noiselevel=-1)
writemsg(
_("Operation Not Permitted: makedirs('%s')\n") % dir_path,
noiselevel=-1,
@@ -291,7 +291,7 @@ def _prepare_features_dirs(mysettings):
except PortageException as e:
failure = True
- writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"\n!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Failed resetting perms on %s='%s'\n")
% (kwargs["basedir_var"], basedir),
@@ -315,7 +315,7 @@ def _prepare_workdir(mysettings):
else:
raise ValueError()
if parsed_mode & 0o7777 != parsed_mode:
- raise ValueError("Invalid file mode: %s" % mode)
+ raise ValueError(f"Invalid file mode: {mode}")
else:
workdir_mode = parsed_mode
except KeyError as e:
@@ -324,7 +324,7 @@ def _prepare_workdir(mysettings):
)
except ValueError as e:
if len(str(e)) > 0:
- writemsg("%s\n" % e)
+ writemsg(f"{e}\n")
writemsg(
_("!!! Unable to parse PORTAGE_WORKDIR_MODE='%s', using %s.\n")
% (mysettings["PORTAGE_WORKDIR_MODE"], oct(workdir_mode))
@@ -362,7 +362,7 @@ def _prepare_workdir(mysettings):
mode=0o2770,
)
except PortageException as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Permission issues with PORTAGE_LOGDIR='%s'\n")
% mysettings["PORTAGE_LOGDIR"],
@@ -394,7 +394,7 @@ def _prepare_workdir(mysettings):
log_subdir = os.path.join(logdir, "build", mysettings["CATEGORY"])
mysettings["PORTAGE_LOG_FILE"] = os.path.join(
log_subdir,
- "{}:{}.log{}".format(mysettings["PF"], logid_time, compress_log_ext),
+ f"{mysettings['PF']}:{logid_time}.log{compress_log_ext}",
)
else:
log_subdir = logdir
@@ -415,17 +415,17 @@ def _prepare_workdir(mysettings):
try:
_ensure_log_subdirs(logdir, log_subdir)
except PortageException as e:
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
if os.access(log_subdir, os.W_OK):
logdir_subdir_ok = True
else:
writemsg(
- "!!! {}: {}\n".format(_("Permission Denied"), log_subdir),
+ f"!!! {_('Permission Denied')}: {log_subdir}\n",
noiselevel=-1,
)
- tmpdir_log_path = os.path.join(mysettings["T"], "build.log%s" % compress_log_ext)
+ tmpdir_log_path = os.path.join(mysettings["T"], f"build.log{compress_log_ext}")
if not logdir_subdir_ok:
# NOTE: When sesandbox is enabled, the local SELinux security policies
# may not allow output to be piped out of the sesandbox domain. The
diff --git a/lib/portage/process.py b/lib/portage/process.py
index 40636ee94..ac270d99a 100644
--- a/lib/portage/process.py
+++ b/lib/portage/process.py
@@ -84,13 +84,13 @@ if _fd_dir is not None:
raise
return range(max_fd_limit)
-elif os.path.isdir("/proc/%s/fd" % portage.getpid()):
+elif os.path.isdir(f"/proc/{portage.getpid()}/fd"):
# In order for this function to work in forked subprocesses,
# os.getpid() must be called from inside the function.
def get_open_fds():
return (
int(fd)
- for fd in os.listdir("/proc/%s/fd" % portage.getpid())
+ for fd in os.listdir(f"/proc/{portage.getpid()}/fd")
if fd.isdigit()
)
@@ -442,7 +442,7 @@ def spawn(
# We need to catch _any_ exception so that it doesn't
# propagate out of this function and cause exiting
# with anything other than os._exit()
- writemsg("{}:\n {}\n".format(e, " ".join(mycommand)), noiselevel=-1)
+ writemsg(f"{e}:\n {' '.join(mycommand)}\n", noiselevel=-1)
traceback.print_exc()
sys.stderr.flush()
@@ -458,7 +458,7 @@ def spawn(
os._exit(1)
if not isinstance(pid, int):
- raise AssertionError("fork returned non-integer: {}".format(repr(pid)))
+ raise AssertionError(f"fork returned non-integer: {repr(pid)}")
# Add the pid to our local and the global pid lists.
mypids.append(pid)
@@ -574,7 +574,7 @@ def _configure_loopback_interface():
rtnl.add_address(ifindex, socket.AF_INET6, "fd::1", 8)
except OSError as e:
writemsg(
- "Unable to configure loopback interface: %s\n" % e.strerror, noiselevel=-1
+ f"Unable to configure loopback interface: {e.strerror}\n", noiselevel=-1
)
diff --git a/lib/portage/proxy/lazyimport.py b/lib/portage/proxy/lazyimport.py
index 89d7d3abe..da0f98f9f 100644
--- a/lib/portage/proxy/lazyimport.py
+++ b/lib/portage/proxy/lazyimport.py
@@ -136,7 +136,7 @@ class _LazyImportFrom(_LazyImport):
except AttributeError:
# Try to import it as a submodule
try:
- __import__("{}.{}".format(name, attr_name))
+ __import__(f"{name}.{attr_name}")
except ImportError:
pass
# If it's a submodule, this will succeed. Otherwise, it may
diff --git a/lib/portage/repository/config.py b/lib/portage/repository/config.py
index f540265dd..5e6097eac 100644
--- a/lib/portage/repository/config.py
+++ b/lib/portage/repository/config.py
@@ -629,7 +629,7 @@ class RepoConfig:
d = {}
for k in self.__slots__:
d[k] = getattr(self, k, None)
- return "{}".format(d)
+ return f"{d}"
class RepoConfigLoader:
@@ -1292,7 +1292,7 @@ class RepoConfigLoader:
):
if repo_name != repo.name:
continue
- config_string += "\n[%s]\n" % repo_name
+ config_string += f"\n[{repo_name}]\n"
for key in sorted(keys):
if key == "main_repo" and repo_name != "DEFAULT":
continue
@@ -1318,7 +1318,7 @@ class RepoConfigLoader:
" ".join(x.name for x in getattr(repo, key)),
)
for o, v in repo.module_specific_options.items():
- config_string += "{} = {}\n".format(o, v)
+ config_string += f"{o} = {v}\n"
return config_string.lstrip("\n")
diff --git a/lib/portage/repository/storage/hardlink_quarantine.py b/lib/portage/repository/storage/hardlink_quarantine.py
index ad9e64bcc..a02d8b96e 100644
--- a/lib/portage/repository/storage/hardlink_quarantine.py
+++ b/lib/portage/repository/storage/hardlink_quarantine.py
@@ -48,7 +48,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
p.start()
if await p.async_wait() != os.EX_OK:
raise RepoStorageException(
- "command exited with status {}: {}".format(p.returncode, " ".join(cmd))
+ f"command exited with status {p.returncode}: {' '.join(cmd)}"
)
async def init_update(self):
@@ -70,7 +70,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
"--exclude=/lost+found",
"--exclude=/packages",
"--exclude",
- "/{}".format(os.path.basename(update_location)),
+ f"/{os.path.basename(update_location)}",
self._user_location + "/",
update_location + "/",
]
@@ -99,7 +99,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
"--exclude=/lost+found",
"--exclude=/packages",
"--exclude",
- "/{}".format(os.path.basename(update_location)),
+ f"/{os.path.basename(update_location)}",
update_location + "/",
self._user_location + "/",
]
diff --git a/lib/portage/repository/storage/hardlink_rcu.py b/lib/portage/repository/storage/hardlink_rcu.py
index 4fd87a24b..51a7ce435 100644
--- a/lib/portage/repository/storage/hardlink_rcu.py
+++ b/lib/portage/repository/storage/hardlink_rcu.py
@@ -125,7 +125,7 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
p.start()
if await p.async_wait() != os.EX_OK:
raise RepoStorageException(
- "command exited with status {}: {}".format(p.returncode, " ".join(cmd))
+ f"command exited with status {p.returncode}: {' '.join(cmd)}"
)
async def init_update(self):
@@ -216,7 +216,7 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
os.unlink(new_symlink)
except OSError:
pass
- os.symlink("snapshots/{}".format(new_id), new_symlink)
+ os.symlink(f"snapshots/{new_id}", new_symlink)
# If SyncManager.pre_sync creates an empty directory where
# self._latest_symlink is suppose to be (which is normal if
diff --git a/lib/portage/sync/controller.py b/lib/portage/sync/controller.py
index 79dfb19db..05ced1347 100644
--- a/lib/portage/sync/controller.py
+++ b/lib/portage/sync/controller.py
@@ -176,7 +176,7 @@ class SyncManager:
self.exitcode = exitcode
self.updatecache_flg = updatecache_flg
if exitcode == 0:
- msg = "=== Sync completed for %s" % self.repo.name
+ msg = f"=== Sync completed for {self.repo.name}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n")
if self.callback:
@@ -190,7 +190,7 @@ class SyncManager:
_hooks = self.hooks["postsync.d"]
for filepath in _hooks:
writemsg_level(
- "Spawning post_sync hook: %s\n" % (_unicode_decode(_hooks[filepath])),
+ f"Spawning post_sync hook: {_unicode_decode(_hooks[filepath])}\n",
level=logging.ERROR,
noiselevel=4,
)
@@ -212,9 +212,7 @@ class SyncManager:
return succeeded
def pre_sync(self, repo):
- msg = ">>> Syncing repository '{}' into '{}'...".format(
- repo.name, repo.location
- )
+ msg = f">>> Syncing repository '{repo.name}' into '{repo.location}'..."
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n")
try:
@@ -250,7 +248,7 @@ class SyncManager:
pw = pwd.getpwuid(int(username))
except (ValueError, KeyError):
writemsg(
- "!!! User '%s' invalid or does not exist\n" % username,
+ f"!!! User '{username}' invalid or does not exist\n",
noiselevel=-1,
)
return (logname, user, group, home)
@@ -268,7 +266,7 @@ class SyncManager:
pw = grp.getgrgid(int(groupname))
except (ValueError, KeyError):
writemsg(
- "!!! Group '%s' invalid or does not exist\n" % groupname,
+ f"!!! Group '{groupname}' invalid or does not exist\n",
noiselevel=-1,
)
return (logname, user, group, home)
diff --git a/lib/portage/sync/modules/git/git.py b/lib/portage/sync/modules/git/git.py
index 88f19cccf..5ad6e61eb 100644
--- a/lib/portage/sync/modules/git/git.py
+++ b/lib/portage/sync/modules/git/git.py
@@ -49,7 +49,7 @@ class GitSync(NewBase):
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(
- self.xterm_titles, "Created new directory %s" % self.repo.location
+ self.xterm_titles, f"Created new directory {self.repo.location}"
)
except OSError:
return (1, False)
@@ -90,7 +90,7 @@ class GitSync(NewBase):
if self.repo.module_specific_options.get("sync-git-clone-extra-opts"):
git_cmd_opts += (
- " %s" % self.repo.module_specific_options["sync-git-clone-extra-opts"]
+ f" {self.repo.module_specific_options['sync-git-clone-extra-opts']}"
)
git_cmd = "{} clone{} {} .".format(
self.bin_command,
@@ -100,11 +100,11 @@ class GitSync(NewBase):
writemsg_level(git_cmd + "\n")
exitcode = portage.process.spawn_bash(
- "cd {} ; exec {}".format(portage._shell_quote(self.repo.location), git_cmd),
+ f"cd {portage._shell_quote(self.repo.location)} ; exec {git_cmd}",
**self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git clone error in %s" % self.repo.location
+ msg = f"!!! git clone error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -252,7 +252,7 @@ class GitSync(NewBase):
if self.repo.module_specific_options.get("sync-git-pull-extra-opts"):
git_cmd_opts += (
- " %s" % self.repo.module_specific_options["sync-git-pull-extra-opts"]
+ f" {self.repo.module_specific_options['sync-git-pull-extra-opts']}"
)
self.add_safe_directory()
@@ -271,7 +271,7 @@ class GitSync(NewBase):
)
).rstrip("\n")
except subprocess.CalledProcessError as e:
- msg = "!!! git rev-parse error in %s" % self.repo.location
+ msg = f"!!! git rev-parse error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (e.returncode, False)
@@ -289,7 +289,7 @@ class GitSync(NewBase):
**self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git gc error in %s" % self.repo.location
+ msg = f"!!! git gc error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -308,17 +308,17 @@ class GitSync(NewBase):
)
exitcode = portage.process.spawn_bash(
- "cd {} ; exec {}".format(portage._shell_quote(self.repo.location), git_cmd),
+ f"cd {portage._shell_quote(self.repo.location)} ; exec {git_cmd}",
**self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git fetch error in %s" % self.repo.location
+ msg = f"!!! git fetch error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
- if not self.verify_head(revision="refs/remotes/%s" % remote_branch):
+ if not self.verify_head(revision=f"refs/remotes/{remote_branch}"):
return (1, False)
if not self.repo.volatile:
@@ -339,7 +339,7 @@ class GitSync(NewBase):
)
if exitcode != os.EX_OK:
- msg = "!!! git clean error in %s" % self.repo.location
+ msg = f"!!! git clean error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -365,7 +365,7 @@ class GitSync(NewBase):
else:
merge_cmd = [self.bin_command, "merge"]
- merge_cmd.append("refs/remotes/%s" % remote_branch)
+ merge_cmd.append(f"refs/remotes/{remote_branch}")
if quiet:
merge_cmd.append("--quiet")
@@ -386,7 +386,7 @@ class GitSync(NewBase):
)
if exitcode != os.EX_OK:
- msg = "!!! git merge error in %s" % self.repo.location
+ msg = f"!!! git merge error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -419,15 +419,14 @@ class GitSync(NewBase):
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
out.einfo(
- "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ f"Using keys from {self.repo.sync_openpgp_key_path}"
)
with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
except (GematoException, asyncio.TimeoutError) as e:
writemsg_level(
- "!!! Verification impossible due to keyring problem:\n%s\n"
- % (e,),
+ f"!!! Verification impossible due to keyring problem:\n{e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -468,7 +467,7 @@ class GitSync(NewBase):
expl = "no signature"
else:
expl = "unknown issue"
- out.eerror("No valid signature found: {}".format(expl))
+ out.eerror(f"No valid signature found: {expl}")
return False
finally:
if openpgp_env is not None:
diff --git a/lib/portage/sync/modules/mercurial/mercurial.py b/lib/portage/sync/modules/mercurial/mercurial.py
index bd8135c05..984a0ddde 100644
--- a/lib/portage/sync/modules/mercurial/mercurial.py
+++ b/lib/portage/sync/modules/mercurial/mercurial.py
@@ -35,7 +35,7 @@ class MercurialSync(NewBase):
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(
- self.xterm_titles, "Created new directory %s" % self.repo.location
+ self.xterm_titles, f"Created new directory {self.repo.location}"
)
except OSError:
return (1, False)
@@ -87,7 +87,7 @@ class MercurialSync(NewBase):
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
- msg = "!!! hg clone error in %s" % self.repo.location
+ msg = f"!!! hg clone error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -130,7 +130,7 @@ class MercurialSync(NewBase):
" %s"
% self.repo.module_specific_options["sync-mercurial-pull-extra-opts"]
)
- hg_cmd = "{} pull -u{}".format(self.bin_command, hg_cmd_opts)
+ hg_cmd = f"{self.bin_command} pull -u{hg_cmd_opts}"
writemsg_level(hg_cmd + "\n")
rev_cmd = [self.bin_command, "id", "--id", "--rev", "tip"]
@@ -144,7 +144,7 @@ class MercurialSync(NewBase):
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
- msg = "!!! hg pull error in %s" % self.repo.location
+ msg = f"!!! hg pull error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
diff --git a/lib/portage/sync/modules/rsync/rsync.py b/lib/portage/sync/modules/rsync/rsync.py
index a99225d48..87aadd0e1 100644
--- a/lib/portage/sync/modules/rsync/rsync.py
+++ b/lib/portage/sync/modules/rsync/rsync.py
@@ -160,7 +160,7 @@ class RsyncSync(NewBase):
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
out.einfo(
- "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ f"Using keys from {self.repo.sync_openpgp_key_path}"
)
with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
@@ -226,7 +226,7 @@ class RsyncSync(NewBase):
)[1:5]
except ValueError:
writemsg_level(
- "!!! sync-uri is invalid: %s\n" % syncuri,
+ f"!!! sync-uri is invalid: {syncuri}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -285,10 +285,10 @@ class RsyncSync(NewBase):
for addrinfo in addrinfos:
if addrinfo[0] == AF_INET:
- ips_v4.append("%s" % addrinfo[4][0])
+ ips_v4.append(f"{addrinfo[4][0]}")
elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
# IPv6 addresses need to be enclosed in square brackets
- ips_v6.append("[%s]" % addrinfo[4][0])
+ ips_v6.append(f"[{addrinfo[4][0]}]")
random.shuffle(ips_v4)
random.shuffle(ips_v6)
@@ -335,7 +335,7 @@ class RsyncSync(NewBase):
dosyncuri = uris.pop()
elif maxretries < 0 or retries > maxretries:
writemsg(
- "!!! Exhausted addresses for %s\n" % _unicode_decode(hostname),
+ f"!!! Exhausted addresses for {_unicode_decode(hostname)}\n",
noiselevel=-1,
)
return (1, False)
@@ -447,30 +447,30 @@ class RsyncSync(NewBase):
out.ewarn(
"You may want to try using another mirror and/or reporting this one:"
)
- out.ewarn(" {}".format(dosyncuri))
+ out.ewarn(f" {dosyncuri}")
out.ewarn("")
out.quiet = quiet
- out.einfo("Manifest timestamp: {} UTC".format(ts.ts))
+ out.einfo(f"Manifest timestamp: {ts.ts} UTC")
out.einfo("Valid OpenPGP signature found:")
out.einfo(
"- primary key: %s"
% (m.openpgp_signature.primary_key_fingerprint)
)
- out.einfo("- subkey: %s" % (m.openpgp_signature.fingerprint))
+ out.einfo(f"- subkey: {m.openpgp_signature.fingerprint}")
out.einfo(
- "- timestamp: %s UTC" % (m.openpgp_signature.timestamp)
+ f"- timestamp: {m.openpgp_signature.timestamp} UTC"
)
# if nothing has changed, skip the actual Manifest
# verification
if not local_state_unchanged:
- out.ebegin("Verifying {}".format(download_dir))
+ out.ebegin(f"Verifying {download_dir}")
m.assert_directory_verifies()
out.eend(0)
except GematoException as e:
writemsg_level(
- "!!! Manifest verification failed:\n{}\n".format(e),
+ f"!!! Manifest verification failed:\n{e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -496,7 +496,7 @@ class RsyncSync(NewBase):
elif exitcode == SERVER_OUT_OF_DATE:
exitcode = 1
elif exitcode == EXCEEDED_MAX_RETRIES:
- sys.stderr.write(">>> Exceeded PORTAGE_RSYNC_RETRIES: %s\n" % maxretries)
+ sys.stderr.write(f">>> Exceeded PORTAGE_RSYNC_RETRIES: {maxretries}\n")
exitcode = 1
elif exitcode > 0:
msg = []
@@ -508,7 +508,7 @@ class RsyncSync(NewBase):
"that sync-uri attribute for repository '%s' is proper."
% self.repo.name
)
- msg.append("sync-uri: '%s'" % self.repo.sync_uri)
+ msg.append(f"sync-uri: '{self.repo.sync_uri}'")
elif exitcode == 11:
msg.append("Rsync has reported that there is a File IO error. Normally")
msg.append(
@@ -519,7 +519,7 @@ class RsyncSync(NewBase):
% self.repo.name
)
msg.append("and try again after the problem has been fixed.")
- msg.append("Location of repository: '%s'" % self.repo.location)
+ msg.append(f"Location of repository: '{self.repo.location}'")
elif exitcode == 20:
msg.append("Rsync was killed before it finished.")
else:
@@ -547,7 +547,7 @@ class RsyncSync(NewBase):
os.makedirs(self.repo.location)
self.logger(
self.self.xterm_titles,
- "Created New Directory %s " % self.repo.location,
+ f"Created New Directory {self.repo.location} ",
)
except OSError:
return (1, False)
@@ -605,16 +605,16 @@ class RsyncSync(NewBase):
portage.writemsg(
yellow("WARNING:")
+ " adding required option "
- + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
+ + f"{opt} not included in PORTAGE_RSYNC_OPTS\n"
)
rsync_opts.append(opt)
for exclude in ("distfiles", "local", "packages"):
- opt = "--exclude=/%s" % exclude
+ opt = f"--exclude=/{exclude}"
if opt not in rsync_opts:
portage.writemsg(
yellow("WARNING:")
- + " adding required option %s not included in " % opt
+ + f" adding required option {opt} not included in "
+ "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n"
)
rsync_opts.append(opt)
@@ -635,7 +635,7 @@ class RsyncSync(NewBase):
portage.writemsg(
yellow("WARNING:")
+ " adding required option "
- + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
+ + f"{opt} not included in PORTAGE_RSYNC_OPTS\n"
)
rsync_opts.append(opt)
return rsync_opts
@@ -775,20 +775,18 @@ class RsyncSync(NewBase):
)
print(">>>")
print(
- ">>> In order to force sync, remove '%s'."
- % self.servertimestampfile
+ f">>> In order to force sync, remove '{self.servertimestampfile}'."
)
print(">>>")
print()
elif (servertimestamp != 0) and (servertimestamp < timestamp):
- self.logger(self.xterm_titles, ">>> Server out of date: %s" % syncuri)
+ self.logger(self.xterm_titles, f">>> Server out of date: {syncuri}")
print()
print(">>>")
- print(">>> SERVER OUT OF DATE: %s" % syncuri)
+ print(f">>> SERVER OUT OF DATE: {syncuri}")
print(">>>")
print(
- ">>> In order to force sync, remove '%s'."
- % self.servertimestampfile
+ f">>> In order to force sync, remove '{self.servertimestampfile}'."
)
print(">>>")
print()
diff --git a/lib/portage/sync/modules/svn/svn.py b/lib/portage/sync/modules/svn/svn.py
index 35ce3b75e..0be9b5e8f 100644
--- a/lib/portage/sync/modules/svn/svn.py
+++ b/lib/portage/sync/modules/svn/svn.py
@@ -59,7 +59,7 @@ class SVNSync(NewBase):
# svn update
exitcode = portage.process.spawn_bash(
- "cd {}; exec svn update".format(portage._shell_quote(self.repo.location)),
+ f"cd {portage._shell_quote(self.repo.location)}; exec svn update",
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
@@ -77,7 +77,7 @@ class SVNSync(NewBase):
@rtype: (int, bool)
"""
exitcode = portage.process.spawn_bash(
- "cd {}; exec svn upgrade".format(portage._shell_quote(self.repo.location)),
+ f"cd {portage._shell_quote(self.repo.location)}; exec svn upgrade",
**self.spawn_kwargs
)
if exitcode != os.EX_OK:
diff --git a/lib/portage/sync/modules/webrsync/webrsync.py b/lib/portage/sync/modules/webrsync/webrsync.py
index 148a04f41..c00378de4 100644
--- a/lib/portage/sync/modules/webrsync/webrsync.py
+++ b/lib/portage/sync/modules/webrsync/webrsync.py
@@ -97,7 +97,7 @@ class WebRsync(SyncBase):
out = portage.output.EOutput(quiet=quiet)
try:
out.einfo(
- "Using keys from {}".format(self.repo.sync_openpgp_key_path)
+ f"Using keys from {self.repo.sync_openpgp_key_path}"
)
with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
@@ -106,8 +106,7 @@ class WebRsync(SyncBase):
self.spawn_kwargs["env"]["PORTAGE_TEMP_GPG_DIR"] = openpgp_env.home
except (GematoException, asyncio.TimeoutError) as e:
writemsg_level(
- "!!! Verification impossible due to keyring problem:\n%s\n"
- % (e,),
+ f"!!! Verification impossible due to keyring problem:\n{e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -126,7 +125,7 @@ class WebRsync(SyncBase):
exitcode = portage.process.spawn(webrsync_cmd, **self.spawn_kwargs)
if exitcode != os.EX_OK:
- msg = "!!! emerge-webrsync error in %s" % self.repo.location
+ msg = f"!!! emerge-webrsync error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
diff --git a/lib/portage/sync/old_tree_timestamp.py b/lib/portage/sync/old_tree_timestamp.py
index 44fe16728..950878ce5 100644
--- a/lib/portage/sync/old_tree_timestamp.py
+++ b/lib/portage/sync/old_tree_timestamp.py
@@ -82,9 +82,7 @@ def old_tree_timestamp_warn(portdir, settings):
warnsync = float(settings.get(var_name, default_warnsync))
except ValueError:
writemsg_level(
- "!!! {} contains non-numeric value: {}\n".format(
- var_name, settings[var_name]
- ),
+ f"!!! {var_name} contains non-numeric value: {settings[var_name]}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -96,7 +94,7 @@ def old_tree_timestamp_warn(portdir, settings):
if (unixtime - 86400 * warnsync) > lastsync:
out = EOutput()
if have_english_locale():
- out.ewarn("Last emerge --sync was %s ago." % whenago(unixtime - lastsync))
+ out.ewarn(f"Last emerge --sync was {whenago(unixtime - lastsync)} ago.")
else:
out.ewarn(
_("Last emerge --sync was %s.")
diff --git a/lib/portage/sync/syncbase.py b/lib/portage/sync/syncbase.py
index bd12b875e..c8e261a49 100644
--- a/lib/portage/sync/syncbase.py
+++ b/lib/portage/sync/syncbase.py
@@ -62,18 +62,17 @@ class SyncBase:
"""
if self.bin_command is None:
msg = [
- "Command not found: %s" % self._bin_command,
- 'Type "emerge %s" to enable %s support.'
- % (self.bin_pkg, self._bin_command),
+ f"Command not found: {self._bin_command}",
+ f'Type "emerge {self.bin_pkg}" to enable {self._bin_command} support.',
]
for l in msg:
- writemsg_level("!!! %s\n" % l, level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"!!! {l}\n", level=logging.ERROR, noiselevel=-1)
return False
try:
self.repo_storage
except RepoStorageException as e:
- writemsg_level("!!! {}\n".format(e), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"!!! {e}\n", level=logging.ERROR, noiselevel=-1)
return False
return True
@@ -178,7 +177,7 @@ class SyncBase:
try:
retry_count = int(self.repo.sync_openpgp_key_refresh_retry_count)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-count: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-count: {e}")
else:
if retry_count <= 0:
return None
@@ -192,7 +191,7 @@ class SyncBase:
)
except Exception as e:
errors.append(
- "sync-openpgp-key-refresh-retry-overall-timeout: {}".format(e)
+ f"sync-openpgp-key-refresh-retry-overall-timeout: {e}"
)
else:
if retry_overall_timeout < 0:
@@ -213,7 +212,7 @@ class SyncBase:
self.repo.sync_openpgp_key_refresh_retry_delay_mult
)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-delay-mult: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-delay-mult: {e}")
else:
if retry_delay_mult <= 0:
errors.append(
@@ -229,7 +228,7 @@ class SyncBase:
self.repo.sync_openpgp_key_refresh_retry_delay_exp_base
)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-delay-exp: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-delay-exp: {e}")
else:
if retry_delay_exp_base <= 0:
errors.append(
@@ -243,11 +242,11 @@ class SyncBase:
lines.append("!!! Retry disabled for openpgp key refresh:")
lines.append("")
for msg in errors:
- lines.append(" {}".format(msg))
+ lines.append(f" {msg}")
lines.append("")
for line in lines:
- writemsg_level("{}\n".format(line), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"{line}\n", level=logging.ERROR, noiselevel=-1)
return None
@@ -315,7 +314,7 @@ class SyncBase:
keyserver=self.repo.sync_openpgp_keyserver
)
except Exception as e:
- writemsg_level("{}\n".format(e), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"{e}\n", level=logging.ERROR, noiselevel=-1)
raise # retry
# The ThreadPoolExecutor that asyncio uses by default
diff --git a/lib/portage/tests/__init__.py b/lib/portage/tests/__init__.py
index e28c4af3b..2aa97830d 100644
--- a/lib/portage/tests/__init__.py
+++ b/lib/portage/tests/__init__.py
@@ -69,7 +69,7 @@ def main():
argv0 = Path(sys.argv[0])
- usage = "usage: %s [options] [tests to run]" % argv0.name
+ usage = f"usage: {argv0.name} [options] [tests to run]"
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument(
"-l", "--list", help="list all tests", action="store_true", dest="list_tests"
@@ -89,7 +89,7 @@ def main():
for mydir in getTestDirs(basedir):
testsubdir = mydir.name
for name in getTestNames(mydir):
- print("{}/{}/{}.py".format(testdir, testsubdir, name))
+ print(f"{testdir}/{testsubdir}/{name}.py")
return os.EX_OK
if len(options.tests) > 1:
@@ -120,7 +120,7 @@ def getTestFromCommandLine(args, base_path):
f = realpath.relative_to(path)
if not f.name.startswith("test") or not f.suffix == ".py":
- raise Exception("Invalid argument: '%s'" % arg)
+ raise Exception(f"Invalid argument: '{arg}'")
mymodule = f.stem
result.extend(getTestsFromFiles(path, base_path, [mymodule]))
@@ -273,17 +273,17 @@ class TestCase(unittest.TestCase):
testMethod()
ok = True
except unittest.SkipTest as e:
- result.addPortageSkip(self, "{}: SKIP: {}".format(testMethod, str(e)))
+ result.addPortageSkip(self, f"{testMethod}: SKIP: {str(e)}")
except self.failureException:
if self.portage_skip is not None:
if self.portage_skip is True:
- result.addPortageSkip(self, "%s: SKIP" % testMethod)
+ result.addPortageSkip(self, f"{testMethod}: SKIP")
else:
result.addPortageSkip(
- self, "{}: SKIP: {}".format(testMethod, self.portage_skip)
+ self, f"{testMethod}: SKIP: {self.portage_skip}"
)
elif self.todo:
- result.addTodo(self, "%s: TODO" % testMethod)
+ result.addTodo(self, f"{testMethod}: TODO")
else:
result.addFailure(self, sys.exc_info())
except (KeyboardInterrupt, SystemExit):
@@ -322,13 +322,13 @@ class TestCase(unittest.TestCase):
excName = excClass.__name__
else:
excName = str(excClass)
- raise self.failureException("{} not raised: {}".format(excName, msg))
+ raise self.failureException(f"{excName} not raised: {msg}")
def assertNotExists(self, path):
"""Make sure |path| does not exist"""
path = Path(path)
if path.exists():
- raise self.failureException("path exists when it should not: %s" % path)
+ raise self.failureException(f"path exists when it should not: {path}")
class TextTestRunner(unittest.TextTestRunner):
diff --git a/lib/portage/tests/bin/test_doins.py b/lib/portage/tests/bin/test_doins.py
index cb6b3a9dc..3b4b9d2b2 100644
--- a/lib/portage/tests/bin/test_doins.py
+++ b/lib/portage/tests/bin/test_doins.py
@@ -99,7 +99,7 @@ class DoIns(setup_env.BinTestCase):
uid = os.lstat(os.path.join(env["S"], "test")).st_uid
pw = pwd.getpwuid(uid)
# Similary to testDoInsOptionUid, use user name.
- env["INSOPTIONS"] = "-o %s" % pw.pw_name
+ env["INSOPTIONS"] = f"-o {pw.pw_name}"
doins("test")
st = os.lstat(env["D"] + "/test")
if st.st_uid != uid:
@@ -134,7 +134,7 @@ class DoIns(setup_env.BinTestCase):
gid = os.lstat(os.path.join(env["S"], "test")).st_gid
gr = grp.getgrgid(gid)
# Similary to testDoInsOptionUid, use group name.
- env["INSOPTIONS"] = "-g %s" % gr.gr_name
+ env["INSOPTIONS"] = f"-g {gr.gr_name}"
doins("test")
st = os.lstat(env["D"] + "/test")
if st.st_gid != gid:
diff --git a/lib/portage/tests/bin/test_eapi7_ver_funcs.py b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
index c71d90913..6754344b7 100644
--- a/lib/portage/tests/bin/test_eapi7_ver_funcs.py
+++ b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
@@ -15,10 +15,10 @@ class TestEAPI7VerFuncs(TestCase):
"""
with tempfile.NamedTemporaryFile("w") as test_script:
test_script.write(
- 'source "{}"/eapi7-ver-funcs.sh\n'.format(PORTAGE_BIN_PATH)
+ f'source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh\n'
)
for cmd, exp in test_cases:
- test_script.write("{}\n".format(cmd))
+ test_script.write(f"{cmd}\n")
test_script.flush()
s = subprocess.Popen(
@@ -32,7 +32,7 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
self.assertEqual(
- result, exp, "{} -> {}; expected: {}".format(cmd, result, exp)
+ result, exp, f"{cmd} -> {result}; expected: {exp}"
)
def _test_return(self, test_cases):
@@ -41,10 +41,10 @@ class TestEAPI7VerFuncs(TestCase):
"""
with tempfile.NamedTemporaryFile("w+") as test_script:
test_script.write(
- 'source "{}"/eapi7-ver-funcs.sh\n'.format(PORTAGE_BIN_PATH)
+ f'source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh\n'
)
for cmd, exp in test_cases:
- test_script.write("{}; echo $?\n".format(cmd))
+ test_script.write(f"{cmd}; echo $?\n")
test_script.flush()
s = subprocess.Popen(
@@ -58,7 +58,7 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
self.assertEqual(
- result, exp, "{} -> {}; expected: {}".format(cmd, result, exp)
+ result, exp, f"{cmd} -> {result}; expected: {exp}"
)
def _test_fail(self, test_cases):
@@ -67,13 +67,10 @@ class TestEAPI7VerFuncs(TestCase):
"""
for cmd in test_cases:
- test = """
-source "{}"/eapi7-ver-funcs.sh
+ test = f"""
+source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh
die() {{ exit 1; }}
-{}""".format(
- PORTAGE_BIN_PATH,
- cmd,
- )
+{cmd}"""
s = subprocess.Popen(
["bash", "-c", test], stdout=subprocess.PIPE, stderr=subprocess.PIPE
@@ -82,8 +79,7 @@ die() {{ exit 1; }}
self.assertEqual(
s.returncode,
1,
- '"%s" did not fail; output: %s; %s)'
- % (cmd, sout.decode(), serr.decode()),
+ f'"{cmd}" did not fail; output: {sout.decode()}; {serr.decode()})',
)
def test_ver_cut(self):
diff --git a/lib/portage/tests/dbapi/test_auxdb.py b/lib/portage/tests/dbapi/test_auxdb.py
index 41ca4936b..5917762dc 100644
--- a/lib/portage/tests/dbapi/test_auxdb.py
+++ b/lib/portage/tests/dbapi/test_auxdb.py
@@ -48,7 +48,7 @@ class AuxdbTestCase(TestCase):
"foo": ("inherit bar",),
"bar": (
"EXPORT_FUNCTIONS src_prepare",
- 'DEPEND="{}"'.format(eclass_depend),
+ f'DEPEND="{eclass_depend}"',
"bar_src_prepare() { default; }",
),
}
@@ -56,7 +56,7 @@ class AuxdbTestCase(TestCase):
playground = ResolverPlayground(
ebuilds=ebuilds,
eclasses=eclasses,
- user_config={"modules": ("portdbapi.auxdbmodule = %s" % auxdbmodule,)},
+ user_config={"modules": (f"portdbapi.auxdbmodule = {auxdbmodule}",)},
)
portdb = playground.trees[playground.eroot]["porttree"].dbapi
diff --git a/lib/portage/tests/dbapi/test_fakedbapi.py b/lib/portage/tests/dbapi/test_fakedbapi.py
index c5fe96e37..33ebbe30a 100644
--- a/lib/portage/tests/dbapi/test_fakedbapi.py
+++ b/lib/portage/tests/dbapi/test_fakedbapi.py
@@ -101,9 +101,7 @@ class TestFakedbapi(TestCase):
self.assertEqual(
fakedb.match(atom),
expected_result,
- "fakedb.match('{}') = {} != {}".format(
- atom, result, expected_result
- ),
+ f"fakedb.match('{atom}') = {result} != {expected_result}",
)
finally:
shutil.rmtree(tempdir)
diff --git a/lib/portage/tests/dbapi/test_portdb_cache.py b/lib/portage/tests/dbapi/test_portdb_cache.py
index a782853d6..773b87e44 100644
--- a/lib/portage/tests/dbapi/test_portdb_cache.py
+++ b/lib/portage/tests/dbapi/test_portdb_cache.py
@@ -163,7 +163,7 @@ class PortdbCacheTestCase(TestCase):
),
# Test auto-detection and preference for md5-cache when both
# cache formats are available but layout.conf is absent.
- (BASH_BINARY, "-c", "rm %s" % portage._shell_quote(layout_conf_path)),
+ (BASH_BINARY, "-c", f"rm {portage._shell_quote(layout_conf_path)}"),
python_cmd
+ (
textwrap.dedent(
@@ -231,7 +231,7 @@ class PortdbCacheTestCase(TestCase):
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index {} failed".format(i))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
diff --git a/lib/portage/tests/dep/testAtom.py b/lib/portage/tests/dep/testAtom.py
index 54f364b1f..447a72915 100644
--- a/lib/portage/tests/dep/testAtom.py
+++ b/lib/portage/tests/dep/testAtom.py
@@ -218,27 +218,27 @@ class TestAtom(TestCase):
self.assertEqual(
op,
a.operator,
- msg="Atom('{}').operator = {} == '{}'".format(atom, a.operator, op),
+ msg=f"Atom('{atom}').operator = {a.operator} == '{op}'",
)
self.assertEqual(
- cp, a.cp, msg="Atom('{}').cp = {} == '{}'".format(atom, a.cp, cp)
+ cp, a.cp, msg=f"Atom('{atom}').cp = {a.cp} == '{cp}'"
)
if ver is not None:
- cpv = "{}-{}".format(cp, ver)
+ cpv = f"{cp}-{ver}"
else:
cpv = cp
self.assertEqual(
- cpv, a.cpv, msg="Atom('{}').cpv = {} == '{}'".format(atom, a.cpv, cpv)
+ cpv, a.cpv, msg=f"Atom('{atom}').cpv = {a.cpv} == '{cpv}'"
)
self.assertEqual(
slot,
a.slot,
- msg="Atom('{}').slot = {} == '{}'".format(atom, a.slot, slot),
+ msg=f"Atom('{atom}').slot = {a.slot} == '{slot}'",
)
self.assertEqual(
repo,
a.repo,
- msg="Atom('{}').repo == {} == '{}'".format(atom, a.repo, repo),
+ msg=f"Atom('{atom}').repo == {a.repo} == '{repo}'",
)
if a.use:
@@ -248,7 +248,7 @@ class TestAtom(TestCase):
self.assertEqual(
use,
returned_use,
- msg="Atom('{}').use = {} == '{}'".format(atom, returned_use, use),
+ msg=f"Atom('{atom}').use = {returned_use} == '{use}'",
)
for atom, allow_wildcard, allow_repo in tests_xfail:
@@ -306,7 +306,7 @@ class TestAtom(TestCase):
self.assertEqual(
v,
getattr(a, k),
- msg="Atom('{}').{} = {} == '{}'".format(atom, k, getattr(a, k), v),
+ msg=f"Atom('{atom}').{k} = {getattr(a, k)} == '{v}'",
)
def test_intersects(self):
@@ -327,7 +327,7 @@ class TestAtom(TestCase):
self.assertEqual(
Atom(atom).intersects(Atom(other)),
expected_result,
- "{} and {} should intersect: {}".format(atom, other, expected_result),
+ f"{atom} and {other} should intersect: {expected_result}",
)
def test_violated_conditionals(self):
diff --git a/lib/portage/tests/dep/testStandalone.py b/lib/portage/tests/dep/testStandalone.py
index b11e2fc38..0140f1410 100644
--- a/lib/portage/tests/dep/testStandalone.py
+++ b/lib/portage/tests/dep/testStandalone.py
@@ -32,12 +32,12 @@ class TestStandalone(TestCase):
self.assertEqual(
cpvequal(cpv1, cpv2),
expected_result,
- "cpvequal('{}', '{}') != {}".format(cpv1, cpv2, expected_result),
+ f"cpvequal('{cpv1}', '{cpv2}') != {expected_result}",
)
for cpv1, cpv2 in test_cases_xfail:
self.assertRaisesMsg(
- "cpvequal({}, {})".format(cpv1, cpv2),
+ f"cpvequal({cpv1}, {cpv2})",
PortageException,
cpvequal,
cpv1,
diff --git a/lib/portage/tests/dep/test_dep_getusedeps.py b/lib/portage/tests/dep/test_dep_getusedeps.py
index 8bd9c2c1e..804618a2b 100644
--- a/lib/portage/tests/dep/test_dep_getusedeps.py
+++ b/lib/portage/tests/dep/test_dep_getusedeps.py
@@ -23,7 +23,7 @@ class DepGetUseDeps(TestCase):
if slot:
cpv += ":" + slot
if isinstance(use, tuple):
- cpv += "[{}]".format(",".join(use))
+ cpv += f"[{','.join(use)}]"
self.assertEqual(dep_getusedeps(cpv), use)
else:
if len(use):
diff --git a/lib/portage/tests/dep/test_get_operator.py b/lib/portage/tests/dep/test_get_operator.py
index c2fc0a397..9cdf6196f 100644
--- a/lib/portage/tests/dep/test_get_operator.py
+++ b/lib/portage/tests/dep/test_get_operator.py
@@ -30,7 +30,7 @@ class GetOperator(TestCase):
self.assertEqual(
result,
test[1],
- msg="get_operator({}) != {}".format(test[0] + atom, test[1]),
+ msg=f"get_operator({test[0] + atom}) != {test[1]}",
)
result = get_operator("sys-apps/portage")
diff --git a/lib/portage/tests/dep/test_get_required_use_flags.py b/lib/portage/tests/dep/test_get_required_use_flags.py
index 02650b5be..b7aea0d18 100644
--- a/lib/portage/tests/dep/test_get_required_use_flags.py
+++ b/lib/portage/tests/dep/test_get_required_use_flags.py
@@ -41,7 +41,7 @@ class TestCheckRequiredUse(TestCase):
for required_use in test_cases_xfail:
self.assertRaisesMsg(
- "REQUIRED_USE: '{}'".format(required_use),
+ f"REQUIRED_USE: '{required_use}'",
InvalidDependString,
get_required_use_flags,
required_use,
diff --git a/lib/portage/tests/dep/test_isjustname.py b/lib/portage/tests/dep/test_isjustname.py
index 3ee0c86c3..9c062f552 100644
--- a/lib/portage/tests/dep/test_isjustname.py
+++ b/lib/portage/tests/dep/test_isjustname.py
@@ -19,10 +19,10 @@ class IsJustName(TestCase):
if len(ver):
self.assertFalse(
isjustname(cat + pkg + ver),
- msg="isjustname(%s) is True!" % (cat + pkg + ver),
+ msg=f"isjustname({cat + pkg + ver}) is True!",
)
else:
self.assertTrue(
isjustname(cat + pkg + ver),
- msg="isjustname(%s) is False!" % (cat + pkg + ver),
+ msg=f"isjustname({cat + pkg + ver}) is False!",
)
diff --git a/lib/portage/tests/dep/test_isvalidatom.py b/lib/portage/tests/dep/test_isvalidatom.py
index 100d9209c..adcf3d92f 100644
--- a/lib/portage/tests/dep/test_isvalidatom.py
+++ b/lib/portage/tests/dep/test_isvalidatom.py
@@ -230,5 +230,5 @@ class IsValidAtom(TestCase):
)
),
test_case.expected,
- msg="isvalidatom({}) != {}".format(test_case.atom, test_case.expected),
+ msg=f"isvalidatom({test_case.atom}) != {test_case.expected}",
)
diff --git a/lib/portage/tests/dep/test_match_from_list.py b/lib/portage/tests/dep/test_match_from_list.py
index 5a405d89f..c1241ab1d 100644
--- a/lib/portage/tests/dep/test_match_from_list.py
+++ b/lib/portage/tests/dep/test_match_from_list.py
@@ -16,7 +16,7 @@ class Package:
self.cp = atom.cp
slot = atom.slot
if atom.sub_slot:
- slot = "{}/{}".format(slot, atom.sub_slot)
+ slot = f"{slot}/{atom.sub_slot}"
if not slot:
slot = "0"
self.cpv = _pkg_str(atom.cpv, slot=slot, repo=atom.repo)
diff --git a/lib/portage/tests/dep/test_use_reduce.py b/lib/portage/tests/dep/test_use_reduce.py
index e17675eda..c60e71d58 100644
--- a/lib/portage/tests/dep/test_use_reduce.py
+++ b/lib/portage/tests/dep/test_use_reduce.py
@@ -54,7 +54,7 @@ class UseReduceTestCase:
subset=self.subset,
)
except InvalidDependString as e:
- raise InvalidDependString("{}: {}".format(e, self.deparray))
+ raise InvalidDependString(f"{e}: {self.deparray}")
class UseReduce(TestCase):
diff --git a/lib/portage/tests/ebuild/test_fetch.py b/lib/portage/tests/ebuild/test_fetch.py
index 859b14fad..47bc2a445 100644
--- a/lib/portage/tests/ebuild/test_fetch.py
+++ b/lib/portage/tests/ebuild/test_fetch.py
@@ -135,7 +135,7 @@ class EbuildFetchTestCase(TestCase):
fetch_bin = portage.process.find_binary(fetchcommand[0])
if fetch_bin is None:
self.skipTest(
- "FETCHCOMMAND not found: {}".format(playground.settings["FETCHCOMMAND"])
+ f"FETCHCOMMAND not found: {playground.settings['FETCHCOMMAND']}"
)
eubin = os.path.join(playground.eprefix, "usr", "bin")
os.symlink(fetch_bin, os.path.join(eubin, os.path.basename(fetch_bin)))
@@ -143,9 +143,7 @@ class EbuildFetchTestCase(TestCase):
resume_bin = portage.process.find_binary(resumecommand[0])
if resume_bin is None:
self.skipTest(
- "RESUMECOMMAND not found: {}".format(
- playground.settings["RESUMECOMMAND"]
- )
+ f"RESUMECOMMAND not found: {playground.settings['RESUMECOMMAND']}"
)
if resume_bin != fetch_bin:
os.symlink(resume_bin, os.path.join(eubin, os.path.basename(resume_bin)))
@@ -162,7 +160,7 @@ class EbuildFetchTestCase(TestCase):
for layout_lines in mirror_layouts:
settings = config(clone=playground.settings)
- layout_data = "".join("{}\n".format(line) for line in layout_lines)
+ layout_data = "".join(f"{line}\n" for line in layout_lines)
mirror_conf = MirrorLayoutConfig()
mirror_conf.read_from_file(io.StringIO(layout_data))
layouts = mirror_conf.get_all_layouts()
@@ -182,7 +180,7 @@ class EbuildFetchTestCase(TestCase):
for layout in layouts:
content["/distfiles/" + layout.get_path(filename)] = v
# upstream path
- content["/distfiles/{}.txt".format(k)] = v
+ content[f"/distfiles/{k}.txt"] = v
shutil.rmtree(settings["DISTDIR"])
os.makedirs(settings["DISTDIR"])
@@ -204,9 +202,7 @@ class EbuildFetchTestCase(TestCase):
# Demonstrate that fetch preserves a stale file in DISTDIR when no digests are given.
foo_uri = {
"foo": (
- "{scheme}://{host}:{port}/distfiles/foo".format(
- scheme=scheme, host=host, port=server.server_port
- ),
+ f"{scheme}://{host}:{server.server_port}/distfiles/foo",
)
}
foo_path = os.path.join(settings["DISTDIR"], "foo")
@@ -457,7 +453,7 @@ class EbuildFetchTestCase(TestCase):
self.assertEqual(f.read(), distfiles[k])
# Test PORTAGE_RO_DISTDIRS
- settings["PORTAGE_RO_DISTDIRS"] = '"{}"'.format(ro_distdir)
+ settings["PORTAGE_RO_DISTDIRS"] = f'"{ro_distdir}"'
orig_fetchcommand = settings["FETCHCOMMAND"]
orig_resumecommand = settings["RESUMECOMMAND"]
try:
@@ -580,13 +576,13 @@ class EbuildFetchTestCase(TestCase):
emdisopts, portdb, asyncio.get_event_loop()
) as emdisconf:
# Copy revisions from bar to foo.
- for revision_key in emdisconf.content_db["filename:{}".format("bar")]:
+ for revision_key in emdisconf.content_db["filename:bar"]:
emdisconf.content_db.add(
DistfileName("foo", digests=dict(revision_key))
)
# Copy revisions from foo to bar.
- for revision_key in emdisconf.content_db["filename:{}".format("foo")]:
+ for revision_key in emdisconf.content_db["filename:foo"]:
emdisconf.content_db.add(
DistfileName("bar", digests=dict(revision_key))
)
diff --git a/lib/portage/tests/ebuild/test_spawn.py b/lib/portage/tests/ebuild/test_spawn.py
index 433b53c81..2c9e50349 100644
--- a/lib/portage/tests/ebuild/test_spawn.py
+++ b/lib/portage/tests/ebuild/test_spawn.py
@@ -22,7 +22,7 @@ class SpawnTestCase(TestCase):
null_fd = os.open("/dev/null", os.O_RDWR)
test_string = 2 * "blah blah blah\n"
proc = SpawnProcess(
- args=[BASH_BINARY, "-c", "echo -n '%s'" % test_string],
+ args=[BASH_BINARY, "-c", f"echo -n '{test_string}'"],
env={},
fd_pipes={0: portage._get_stdin().fileno(), 1: null_fd, 2: null_fd},
scheduler=global_event_loop(),
diff --git a/lib/portage/tests/ebuild/test_use_expand_incremental.py b/lib/portage/tests/ebuild/test_use_expand_incremental.py
index 8421968f5..b09a2ac72 100644
--- a/lib/portage/tests/ebuild/test_use_expand_incremental.py
+++ b/lib/portage/tests/ebuild/test_use_expand_incremental.py
@@ -105,7 +105,7 @@ class UseExpandIncrementalTestCase(TestCase):
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
@@ -121,7 +121,7 @@ class UseExpandIncrementalTestCase(TestCase):
settings.setcpv(pkg)
expected = frozenset(expected_use)
got = frozenset(settings["PORTAGE_USE"].split())
- self.assertEqual(got, expected, "{} != {}".format(got, expected))
+ self.assertEqual(got, expected, f"{got} != {expected}")
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_config_protect.py b/lib/portage/tests/emerge/test_config_protect.py
index d4525f38b..745f1e37f 100644
--- a/lib/portage/tests/emerge/test_config_protect.py
+++ b/lib/portage/tests/emerge/test_config_protect.py
@@ -287,7 +287,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
index 7b07ce0b6..71f018fef 100644
--- a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
+++ b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
@@ -162,7 +162,7 @@ src_install() {
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index {} failed".format(i))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
if isinstance(args[0], dict):
@@ -185,7 +185,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.debug = False
diff --git a/lib/portage/tests/emerge/test_emerge_slot_abi.py b/lib/portage/tests/emerge/test_emerge_slot_abi.py
index 303259049..4dde205eb 100644
--- a/lib/portage/tests/emerge/test_emerge_slot_abi.py
+++ b/lib/portage/tests/emerge/test_emerge_slot_abi.py
@@ -164,7 +164,7 @@ class SlotAbiEmergeTestCase(TestCase):
for i, args in enumerate(test_commands):
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index {} failed".format(i))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
@@ -180,7 +180,7 @@ class SlotAbiEmergeTestCase(TestCase):
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_simple.py b/lib/portage/tests/emerge/test_simple.py
index 650da9a60..86200e35f 100644
--- a/lib/portage/tests/emerge/test_simple.py
+++ b/lib/portage/tests/emerge/test_simple.py
@@ -235,7 +235,7 @@ call_has_and_best_version() {
installed=installed,
debug=debug,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -566,7 +566,7 @@ call_has_and_best_version() {
binrepos_conf_file = os.path.join(os.sep, eprefix, BINREPOS_CONF_FILE)
with open(binrepos_conf_file, "w") as f:
f.write("[test-binhost]\n")
- f.write("sync-uri = {}\n".format(binhost_uri))
+ f.write(f"sync-uri = {binhost_uri}\n")
fetchcommand = portage.util.shlex_split(playground.settings["FETCHCOMMAND"])
fetch_bin = portage.process.find_binary(fetchcommand[0])
if fetch_bin is not None:
@@ -720,7 +720,7 @@ move dev-util/git dev-vcs/git
portage.writemsg(output)
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args {}".format(args)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
binhost_server.__exit__(None, None, None)
diff --git a/lib/portage/tests/env/config/test_PackageKeywordsFile.py b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
index 8f777795a..fb7e0d167 100644
--- a/lib/portage/tests/env/config/test_PackageKeywordsFile.py
+++ b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
@@ -34,7 +34,7 @@ class PackageKeywordsFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for c in self.cpv:
- f.write("{} {}\n".format(c, " ".join(self.keywords)))
+ f.write(f"{c} {' '.join(self.keywords)}\n")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PackageUseFile.py b/lib/portage/tests/env/config/test_PackageUseFile.py
index 2126de0a8..587ac7d04 100644
--- a/lib/portage/tests/env/config/test_PackageUseFile.py
+++ b/lib/portage/tests/env/config/test_PackageUseFile.py
@@ -30,7 +30,7 @@ class PackageUseFileTestCase(TestCase):
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
- f.write("{} {}".format(self.cpv, " ".join(self.useflags)))
+ f.write(f"{self.cpv} {' '.join(self.useflags)}")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PortageModulesFile.py b/lib/portage/tests/env/config/test_PortageModulesFile.py
index fffe87ec9..8033eac47 100644
--- a/lib/portage/tests/env/config/test_PortageModulesFile.py
+++ b/lib/portage/tests/env/config/test_PortageModulesFile.py
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for k, v in self.items.items():
- f.write("{}={}\n".format(k, v))
+ f.write(f"{k}={v}\n")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/lint/test_import_modules.py b/lib/portage/tests/lint/test_import_modules.py
index 1cf277812..d0465f388 100644
--- a/lib/portage/tests/lint/test_import_modules.py
+++ b/lib/portage/tests/lint/test_import_modules.py
@@ -23,7 +23,7 @@ class ImportModulesTestCase(TestCase):
__import__(mod)
except ImportError as e:
if mod not in expected_failures:
- self.assertTrue(False, "failed to import '{}': {}".format(mod, e))
+ self.assertTrue(False, f"failed to import '{mod}': {e}")
del e
def _iter_modules(self, base_dir):
diff --git a/lib/portage/tests/news/test_NewsItem.py b/lib/portage/tests/news/test_NewsItem.py
index 5fca58674..d5fbc10e0 100644
--- a/lib/portage/tests/news/test_NewsItem.py
+++ b/lib/portage/tests/news/test_NewsItem.py
@@ -57,42 +57,42 @@ The revdep-rebuild tool is provided by app-portage/gentoolkit.
def testDisplayIfProfile(self):
tmpItem = self.fakeItem[:].replace(
- "#Display-If-Profile:", "Display-If-Profile: %s" % self.profile
+ "#Display-If-Profile:", f"Display-If-Profile: {self.profile}"
)
item = self._processItem(tmpItem)
try:
self.assertTrue(
item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ msg=f"Expected {tmpItem} to be relevant, but it was not!",
)
finally:
os.unlink(item.path)
def testDisplayIfInstalled(self):
tmpItem = self.fakeItem[:].replace(
- "#Display-If-Installed:", "Display-If-Installed: %s" % "sys-apps/portage"
+ "#Display-If-Installed:", f"Display-If-Installed: {'sys-apps/portage'}"
)
try:
item = self._processItem(tmpItem)
self.assertTrue(
item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ msg=f"Expected {tmpItem} to be relevant, but it was not!",
)
finally:
os.unlink(item.path)
def testDisplayIfKeyword(self):
tmpItem = self.fakeItem[:].replace(
- "#Display-If-Keyword:", "Display-If-Keyword: %s" % self.keywords
+ "#Display-If-Keyword:", f"Display-If-Keyword: {self.keywords}"
)
try:
item = self._processItem(tmpItem)
self.assertTrue(
item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ msg=f"Expected {tmpItem} to be relevant, but it was not!",
)
finally:
os.unlink(item.path)
@@ -106,4 +106,4 @@ The revdep-rebuild tool is provided by app-portage/gentoolkit.
try:
return NewsItem(filename, 0)
except TypeError:
- self.fail("Error while processing news item %s" % filename)
+ self.fail(f"Error while processing news item {filename}")
diff --git a/lib/portage/tests/process/test_PipeLogger.py b/lib/portage/tests/process/test_PipeLogger.py
index eb578758e..d4b5e6175 100644
--- a/lib/portage/tests/process/test_PipeLogger.py
+++ b/lib/portage/tests/process/test_PipeLogger.py
@@ -72,5 +72,5 @@ class PipeLoggerTestCase(TestCase):
self._testPipeLoggerToPipe(test_string, loop)
)
self.assertEqual(
- test_string, output, "x = {}, len(output) = {}".format(x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_PopenProcess.py b/lib/portage/tests/process/test_PopenProcess.py
index f8cc8fda4..0aa9ce171 100644
--- a/lib/portage/tests/process/test_PopenProcess.py
+++ b/lib/portage/tests/process/test_PopenProcess.py
@@ -93,10 +93,10 @@ class PopenPipeTestCase(TestCase):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = {}, len(output) = {}".format(x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
output = self._testPipeLogger(test_string)
self.assertEqual(
- test_string, output, "x = {}, len(output) = {}".format(x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_PopenProcessBlockingIO.py b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
index cf30856cd..5d29c8d0b 100644
--- a/lib/portage/tests/process/test_PopenProcessBlockingIO.py
+++ b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
@@ -69,5 +69,5 @@ class PopenPipeBlockingIOTestCase(TestCase):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = {}, len(output) = {}".format(x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_poll.py b/lib/portage/tests/process/test_poll.py
index c4c330793..e55dc3c9b 100644
--- a/lib/portage/tests/process/test_poll.py
+++ b/lib/portage/tests/process/test_poll.py
@@ -103,7 +103,7 @@ class PipeReaderTestCase(TestCase):
self.assertEqual(
test_string,
output,
- "x = {}, len(output) = {}".format(x, len(output)),
+ f"x = {x}, len(output) = {len(output)}",
)
finally:
if cleanup is not None:
diff --git a/lib/portage/tests/process/test_unshare_net.py b/lib/portage/tests/process/test_unshare_net.py
index a5372434f..1f96bb8b0 100644
--- a/lib/portage/tests/process/test_unshare_net.py
+++ b/lib/portage/tests/process/test_unshare_net.py
@@ -31,7 +31,7 @@ class UnshareNetTestCase(TestCase):
errno_value = portage.process._unshare_validate(CLONE_NEWNET)
if errno_value != 0:
self.skipTest(
- "Unable to unshare: %s" % (errno.errorcode.get(errno_value, "?"))
+ f"Unable to unshare: {errno.errorcode.get(errno_value, '?')}"
)
env = os.environ.copy()
diff --git a/lib/portage/tests/resolver/ResolverPlayground.py b/lib/portage/tests/resolver/ResolverPlayground.py
index 6cd10acda..5323bb01c 100644
--- a/lib/portage/tests/resolver/ResolverPlayground.py
+++ b/lib/portage/tests/resolver/ResolverPlayground.py
@@ -266,7 +266,7 @@ class ResolverPlayground:
repo_name_file = os.path.join(profile_path, "repo_name")
with open(repo_name_file, "w") as f:
- f.write("%s\n" % repo)
+ f.write(f"{repo}\n")
return self._repositories[repo]["location"]
@@ -310,9 +310,9 @@ class ResolverPlayground:
with open(ebuild_path, "w") as f:
if copyright_header is not None:
f.write(copyright_header)
- f.write('EAPI="%s"\n' % eapi)
+ f.write(f'EAPI="{eapi}\"\n')
for k, v in metadata.items():
- f.write('{}="{}"\n'.format(k, v))
+ f.write(f'{k}="{v}\"\n')
if misc_content is not None:
f.write(misc_content)
@@ -332,7 +332,7 @@ class ResolverPlayground:
portdb = self.trees[self.eroot]["porttree"].dbapi
tmpsettings["O"] = ebuild_dir
if not digestgen(mysettings=tmpsettings, myportdb=portdb):
- raise AssertionError("digest creation failed for %s" % ebuild_path)
+ raise AssertionError(f"digest creation failed for {ebuild_path}")
def _create_binpkgs(self, binpkgs):
# When using BUILD_ID, there can be mutiple instances for the
@@ -369,13 +369,13 @@ class ResolverPlayground:
if "BUILD_ID" in metadata:
if binpkg_format == "xpak":
binpkg_path = os.path.join(
- category_dir, pn, "{}-{}.xpak".format(pf, metadata["BUILD_ID"])
+ category_dir, pn, f"{pf}-{metadata['BUILD_ID']}.xpak"
)
elif binpkg_format == "gpkg":
binpkg_path = os.path.join(
category_dir,
pn,
- "{}-{}.gpkg.tar".format(pf, metadata["BUILD_ID"]),
+ f"{pf}-{metadata['BUILD_ID']}.gpkg.tar",
)
else:
raise InvalidBinaryPackageFormat(binpkg_format)
@@ -433,13 +433,13 @@ class ResolverPlayground:
metadata["repository"] = repo
for k, v in metadata.items():
with open(os.path.join(vdb_pkg_dir, k), "w") as f:
- f.write("%s\n" % v)
+ f.write(f"{v}\n")
ebuild_path = os.path.join(vdb_pkg_dir, a.cpv.split("/")[1] + ".ebuild")
with open(ebuild_path, "w") as f:
- f.write('EAPI="%s"\n' % metadata.pop("EAPI", "0"))
+ f.write(f"EAPI=\"{metadata.pop('EAPI', '0')}\"\n")
for k, v in metadata.items():
- f.write('{}="{}"\n'.format(k, v))
+ f.write(f'{k}="{v}\"\n')
env_path = os.path.join(vdb_pkg_dir, "environment.bz2")
with bz2.BZ2File(env_path, mode="w") as f:
@@ -492,7 +492,7 @@ class ResolverPlayground:
fnmatch.fnmatch(config_file, os.path.join(x, "*"))
for x in self.config_files
):
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
if config_file in ("layout.conf",):
file_name = os.path.join(repo_dir, "metadata", config_file)
@@ -504,7 +504,7 @@ class ResolverPlayground:
os.makedirs(os.path.dirname(file_name))
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Temporarily write empty value of masters until it becomes default.
# TODO: Delete all references to "# use implicit masters" when empty value becomes default.
if config_file == "layout.conf" and not any(
@@ -519,12 +519,12 @@ class ResolverPlayground:
for eclass_name, eclass_content in eclasses.items():
with open(
- os.path.join(eclass_dir, "{}.eclass".format(eclass_name)), "w"
+ os.path.join(eclass_dir, f"{eclass_name}.eclass"), "w"
) as f:
if isinstance(eclass_content, str):
eclass_content = [eclass_content]
for line in eclass_content:
- f.write("{}\n".format(line))
+ f.write(f"{line}\n")
# Temporarily write empty value of masters until it becomes default.
if not repo_config or "layout.conf" not in repo_config:
@@ -560,12 +560,12 @@ class ResolverPlayground:
if profile:
for config_file, lines in profile.items():
if config_file not in self.config_files:
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
file_name = os.path.join(sub_profile_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Create profile symlink
os.symlink(
@@ -620,12 +620,12 @@ class ResolverPlayground:
for config_file, lines in configs.items():
if config_file not in self.config_files:
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
file_name = os.path.join(user_config_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Create /usr/share/portage/config/make.globals
make_globals_path = os.path.join(
@@ -661,7 +661,7 @@ class ResolverPlayground:
file_name = os.path.join(set_config_dir, sets_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
def _create_world(self, world, world_sets):
# Create /var/lib/portage/world
@@ -673,11 +673,11 @@ class ResolverPlayground:
with open(world_file, "w") as f:
for atom in world:
- f.write("%s\n" % atom)
+ f.write(f"{atom}\n")
with open(world_set_file, "w") as f:
for atom in world_sets:
- f.write("%s\n" % atom)
+ f.write(f"{atom}\n")
def _load_config(self):
@@ -782,7 +782,7 @@ class ResolverPlayground:
portdb = self.trees[eroot]["porttree"].dbapi
portdb.close_caches()
if self.debug:
- print("\nEROOT=%s" % self.eroot)
+ print(f"\nEROOT={self.eroot}")
else:
shutil.rmtree(self.eroot)
if hasattr(self, "_orig_eprefix"):
@@ -816,7 +816,7 @@ class ResolverPlaygroundTestCase:
checks = dict.fromkeys(result.checks)
for key, value in self._checks.items():
if not key in checks:
- raise KeyError("Not an available check: '%s'" % key)
+ raise KeyError(f"Not an available check: '{key}'")
checks[key] = value
fail_msgs = []
@@ -976,14 +976,14 @@ def _mergelist_str(x, depgraph):
repo_str = _repo_separator + x.repo
build_id_str = ""
if x.type_name == "binary" and x.cpv.build_id is not None:
- build_id_str = "-%s" % x.cpv.build_id
+ build_id_str = f"-{x.cpv.build_id}"
mergelist_str = x.cpv + build_id_str + repo_str
if x.built:
if x.operation == "merge":
desc = x.type_name
else:
desc = x.operation
- mergelist_str = "[{}]{}".format(desc, mergelist_str)
+ mergelist_str = f"[{desc}]{mergelist_str}"
if x.root != depgraph._frozen_config._running_root.root:
mergelist_str += "{targetroot}"
return mergelist_str
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
index 77fdac425..d1ecc078c 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
@@ -148,7 +148,7 @@ class BuildIdProfileFormatTestCase(TestCase):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
_user_config = user_config.copy()
- _user_config["make.conf"] += ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
debug=False,
binpkgs=binpkgs,
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
index d7d70fd5b..3c5d4f7a9 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
@@ -108,7 +108,7 @@ class RebuiltBinariesCase(TestCase):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
_user_config = user_config.copy()
- _user_config["make.conf"] += ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
debug=False,
binpkgs=binpkgs,
diff --git a/lib/portage/tests/resolver/soname/test_autounmask.py b/lib/portage/tests/resolver/soname/test_autounmask.py
index ecf4b61cb..d547ebcf5 100644
--- a/lib/portage/tests/resolver/soname/test_autounmask.py
+++ b/lib/portage/tests/resolver/soname/test_autounmask.py
@@ -99,7 +99,7 @@ class SonameAutoUnmaskTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/soname/test_downgrade.py b/lib/portage/tests/resolver/soname/test_downgrade.py
index d5a951694..da2d90305 100644
--- a/lib/portage/tests/resolver/soname/test_downgrade.py
+++ b/lib/portage/tests/resolver/soname/test_downgrade.py
@@ -133,7 +133,7 @@ class SonameDowngradeTestCase(TestCase):
with self.subTest(binpkg_format=binpkg_format):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
- user_config["make.conf"] = ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
binpkgs=binpkgs,
ebuilds=ebuilds,
@@ -232,7 +232,7 @@ class SonameDowngradeTestCase(TestCase):
with self.subTest(binpkg_format=binpkg_format):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
- user_config["make.conf"] = ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
ebuilds=ebuilds,
binpkgs=binpkgs,
diff --git a/lib/portage/tests/resolver/soname/test_or_choices.py b/lib/portage/tests/resolver/soname/test_or_choices.py
index 5c8f35295..66b02a1b7 100644
--- a/lib/portage/tests/resolver/soname/test_or_choices.py
+++ b/lib/portage/tests/resolver/soname/test_or_choices.py
@@ -97,7 +97,7 @@ class SonameOrChoicesTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/soname/test_reinstall.py b/lib/portage/tests/resolver/soname/test_reinstall.py
index b50105688..f548d619a 100644
--- a/lib/portage/tests/resolver/soname/test_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_reinstall.py
@@ -86,7 +86,7 @@ class SonameReinstallTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/soname/test_skip_update.py b/lib/portage/tests/resolver/soname/test_skip_update.py
index dabbb781a..8b2505e64 100644
--- a/lib/portage/tests/resolver/soname/test_skip_update.py
+++ b/lib/portage/tests/resolver/soname/test_skip_update.py
@@ -85,7 +85,7 @@ class SonameSkipUpdateTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
index 05ffc41db..c44bfb0bb 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
@@ -94,7 +94,7 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
@@ -140,21 +140,21 @@ class SonameSlotConflictReinstallTestCase(TestCase):
expected_mergelist = ["[binary]app-misc/A-1", "[binary]app-misc/B-2"]
for i in range(5):
- binpkgs["app-misc/C%sC-1" % i] = {
+ binpkgs[f"app-misc/C{i}C-1"] = {
"DEPEND": "app-misc/B",
"RDEPEND": "app-misc/B",
"REQUIRES": "x86_32: libB-2.so",
}
- installed["app-misc/C%sC-1" % i] = {
+ installed[f"app-misc/C{i}C-1"] = {
"DEPEND": "app-misc/B",
"RDEPEND": "app-misc/B",
"REQUIRES": "x86_32: libB-1.so",
}
for x in ("DEPEND", "RDEPEND"):
- binpkgs["app-misc/A-1"][x] += " app-misc/C%sC" % i
+ binpkgs["app-misc/A-1"][x] += f" app-misc/C{i}C"
- expected_mergelist.append("[binary]app-misc/C%sC-1" % i)
+ expected_mergelist.append(f"[binary]app-misc/C{i}C-1")
test_cases = (
ResolverPlaygroundTestCase(
@@ -185,7 +185,7 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -281,7 +281,7 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -368,7 +368,7 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
index 0b581cb2d..07874b422 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
@@ -102,7 +102,7 @@ class SonameSlotConflictUpdateTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/soname/test_soname_provided.py b/lib/portage/tests/resolver/soname/test_soname_provided.py
index c2ead9bbb..0ac605ce6 100644
--- a/lib/portage/tests/resolver/soname/test_soname_provided.py
+++ b/lib/portage/tests/resolver/soname/test_soname_provided.py
@@ -77,7 +77,7 @@ class SonameProvidedTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfiable.py b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
index 2b6e07f4a..d66129324 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfiable.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
@@ -71,7 +71,7 @@ class SonameUnsatisfiableTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfied.py b/lib/portage/tests/resolver/soname/test_unsatisfied.py
index 392db4c9e..172a22a7d 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfied.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfied.py
@@ -84,7 +84,7 @@ class SonameUnsatisfiedTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
index 043ca7cea..40dc48a7b 100644
--- a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
+++ b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
@@ -69,7 +69,7 @@ class AutounmaskBinpkgUseTestCase(TestCase):
installed=installed,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_bdeps.py b/lib/portage/tests/resolver/test_bdeps.py
index ded50fc51..ee98c6d7e 100644
--- a/lib/portage/tests/resolver/test_bdeps.py
+++ b/lib/portage/tests/resolver/test_bdeps.py
@@ -198,7 +198,7 @@ class BdepsTestCase(TestCase):
binpkgs=binpkgs,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
index 835c93310..93f0c3f3f 100644
--- a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
+++ b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
@@ -138,7 +138,7 @@ class BinaryPkgEbuildVisibilityTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_changed_deps.py b/lib/portage/tests/resolver/test_changed_deps.py
index f845d4bba..6d7941326 100644
--- a/lib/portage/tests/resolver/test_changed_deps.py
+++ b/lib/portage/tests/resolver/test_changed_deps.py
@@ -118,7 +118,7 @@ class ChangedDepsTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
index ce8eb3b64..54d28f7c9 100644
--- a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
+++ b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
@@ -70,7 +70,7 @@ class CompeteIfNewSubSlotWithoutRevBumpTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_disjunctive_depend_order.py b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
index 1f94386c7..110259465 100644
--- a/lib/portage/tests/resolver/test_disjunctive_depend_order.py
+++ b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
@@ -84,7 +84,7 @@ class DisjunctiveDependOrderTestCase(TestCase):
binpkgs=binpkgs,
ebuilds=ebuilds,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_multirepo.py b/lib/portage/tests/resolver/test_multirepo.py
index 1c0002b27..3ab665e6f 100644
--- a/lib/portage/tests/resolver/test_multirepo.py
+++ b/lib/portage/tests/resolver/test_multirepo.py
@@ -250,7 +250,7 @@ class MultirepoTestCase(TestCase):
installed=installed,
sets=sets,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -403,7 +403,7 @@ class MultirepoTestCase(TestCase):
with self.subTest(binpkg_format=binpkg_format):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
- user_config["make.conf"] = ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
ebuilds=ebuilds, installed=installed, user_config=user_config
)
diff --git a/lib/portage/tests/resolver/test_package_tracker.py b/lib/portage/tests/resolver/test_package_tracker.py
index 5502c688f..93578a596 100644
--- a/lib/portage/tests/resolver/test_package_tracker.py
+++ b/lib/portage/tests/resolver/test_package_tracker.py
@@ -20,7 +20,7 @@ class PackageTrackerTestCase(TestCase):
def make_pkg(self, root, atom, repo="test_repo"):
atom = Atom(atom)
- slot_atom = Atom("{}:{}".format(atom.cp, atom.slot))
+ slot_atom = Atom(f"{atom.cp}:{atom.slot}")
slot = atom.slot
return self.FakePackage(
diff --git a/lib/portage/tests/resolver/test_profile_default_eapi.py b/lib/portage/tests/resolver/test_profile_default_eapi.py
index f85a72df0..f09c5e810 100644
--- a/lib/portage/tests/resolver/test_profile_default_eapi.py
+++ b/lib/portage/tests/resolver/test_profile_default_eapi.py
@@ -113,7 +113,7 @@ class ProfileDefaultEAPITestCase(TestCase):
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
diff --git a/lib/portage/tests/resolver/test_profile_package_set.py b/lib/portage/tests/resolver/test_profile_package_set.py
index ba1fbd863..725e0808f 100644
--- a/lib/portage/tests/resolver/test_profile_package_set.py
+++ b/lib/portage/tests/resolver/test_profile_package_set.py
@@ -103,7 +103,7 @@ class ProfilePackageSetTestCase(TestCase):
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
diff --git a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
index db8cdcb0c..0456e28d3 100644
--- a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
@@ -57,7 +57,7 @@ class RegularSlotChangeWithoutRevBumpTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_simple.py b/lib/portage/tests/resolver/test_simple.py
index 854cf31e0..3c75d9707 100644
--- a/lib/portage/tests/resolver/test_simple.py
+++ b/lib/portage/tests/resolver/test_simple.py
@@ -88,7 +88,7 @@ class SimpleResolverTestCase(TestCase):
binpkgs=binpkgs,
installed=installed,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_slot_abi.py b/lib/portage/tests/resolver/test_slot_abi.py
index 7fa917762..d71f47e30 100644
--- a/lib/portage/tests/resolver/test_slot_abi.py
+++ b/lib/portage/tests/resolver/test_slot_abi.py
@@ -133,7 +133,7 @@ class SlotAbiTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -268,7 +268,7 @@ class SlotAbiTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -482,7 +482,7 @@ class SlotAbiTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_slot_abi_downgrade.py b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
index ca4ce50cf..896215f29 100644
--- a/lib/portage/tests/resolver/test_slot_abi_downgrade.py
+++ b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
@@ -111,7 +111,7 @@ class SlotAbiDowngradeTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
@@ -222,7 +222,7 @@ class SlotAbiDowngradeTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/resolver/test_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
index d324ec3d1..037550e1a 100644
--- a/lib/portage/tests/resolver/test_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
@@ -86,7 +86,7 @@ class SlotChangeWithoutRevBumpTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_slot_conflict_rebuild.py b/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
index a3327d9fa..52f4aa184 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
@@ -100,21 +100,21 @@ class SlotConflictRebuildTestCase(TestCase):
expected_mergelist = ["app-misc/A-1", "app-misc/B-2"]
for i in range(5):
- ebuilds["app-misc/C%sC-1" % i] = {
+ ebuilds[f"app-misc/C{i}C-1"] = {
"EAPI": "5",
"DEPEND": "app-misc/B:=",
"RDEPEND": "app-misc/B:=",
}
- installed["app-misc/C%sC-1" % i] = {
+ installed[f"app-misc/C{i}C-1"] = {
"EAPI": "5",
"DEPEND": "app-misc/B:1/1=",
"RDEPEND": "app-misc/B:1/1=",
}
for x in ("DEPEND", "RDEPEND"):
- ebuilds["app-misc/A-1"][x] += " app-misc/C%sC" % i
+ ebuilds["app-misc/A-1"][x] += f" app-misc/C{i}C"
- expected_mergelist.append("app-misc/C%sC-1" % i)
+ expected_mergelist.append(f"app-misc/C{i}C-1")
test_cases = (
ResolverPlaygroundTestCase(
diff --git a/lib/portage/tests/resolver/test_slot_operator_autounmask.py b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
index 77ba7e2c4..88071fe25 100644
--- a/lib/portage/tests/resolver/test_slot_operator_autounmask.py
+++ b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
@@ -124,7 +124,7 @@ class SlotOperatorAutoUnmaskTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_slot_operator_bdeps.py b/lib/portage/tests/resolver/test_slot_operator_bdeps.py
index 6f0e5f7e1..13d08072d 100644
--- a/lib/portage/tests/resolver/test_slot_operator_bdeps.py
+++ b/lib/portage/tests/resolver/test_slot_operator_bdeps.py
@@ -100,7 +100,7 @@ class SlotOperatorBdependTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
@@ -202,7 +202,7 @@ class SlotOperatorBdependTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_slot_operator_rebuild.py b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
index 3bf9cc497..4e9543747 100644
--- a/lib/portage/tests/resolver/test_slot_operator_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
@@ -87,7 +87,7 @@ class SlotOperatorRebuildTestCase(TestCase):
world=world,
debug=False,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
try:
diff --git a/lib/portage/tests/resolver/test_slot_operator_unsolved.py b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
index 4576eb13b..2933be7f8 100644
--- a/lib/portage/tests/resolver/test_slot_operator_unsolved.py
+++ b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
@@ -80,7 +80,7 @@ class SlotOperatorUnsolvedTestCase(TestCase):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
_user_config = user_config.copy()
- _user_config["make.conf"] += ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
ebuilds=ebuilds,
binpkgs=binpkgs,
diff --git a/lib/portage/tests/resolver/test_useflags.py b/lib/portage/tests/resolver/test_useflags.py
index 2da63a681..0af1cb558 100644
--- a/lib/portage/tests/resolver/test_useflags.py
+++ b/lib/portage/tests/resolver/test_useflags.py
@@ -126,7 +126,7 @@ class UseFlagsTestCase(TestCase):
with self.subTest(binpkg_format=binpkg_format):
print(colorize("HILITE", binpkg_format), end=" ... ")
sys.stdout.flush()
- user_config["make.conf"] = ('BINPKG_FORMAT="%s"' % binpkg_format,)
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
playground = ResolverPlayground(
ebuilds=ebuilds,
binpkgs=binpkgs,
diff --git a/lib/portage/tests/sets/shell/testShell.py b/lib/portage/tests/sets/shell/testShell.py
index dcbdafeb8..01c4831cd 100644
--- a/lib/portage/tests/sets/shell/testShell.py
+++ b/lib/portage/tests/sets/shell/testShell.py
@@ -22,7 +22,7 @@ class CommandOutputSetTestCase(TestCase):
command = find_binary("bash")
command += " -c '"
for a in params:
- command += ' echo -e "%s" ; ' % a
+ command += f' echo -e "{a}" ; '
command += "'"
s = CommandOutputSet(command)
atoms = s.getAtoms()
diff --git a/lib/portage/tests/sync/test_sync_local.py b/lib/portage/tests/sync/test_sync_local.py
index 914014216..b83f88963 100644
--- a/lib/portage/tests/sync/test_sync_local.py
+++ b/lib/portage/tests/sync/test_sync_local.py
@@ -94,9 +94,7 @@ class SyncLocalTestCase(TestCase):
break
else:
raise AssertionError(
- "{} binary not found in {} or {}".format(
- cmd, self.bindir, self.sbindir
- )
+ f"{cmd} binary not found in {self.bindir} or {self.sbindir}"
)
git_binary = find_binary("git")
@@ -141,7 +139,7 @@ class SyncLocalTestCase(TestCase):
) as f:
f.write(
bump_timestamp.timestamp.strftime(
- "%s\n" % TIMESTAMP_FORMAT,
+ f"{TIMESTAMP_FORMAT}\n",
)
)
@@ -324,7 +322,7 @@ class SyncLocalTestCase(TestCase):
def hg_init_global_config():
with open(os.path.join(homedir, ".hgrc"), "w") as f:
f.write(
- "[ui]\nusername = {} <{}>\n".format(committer_name, committer_email)
+ f"[ui]\nusername = {committer_name} <{committer_email}>\n"
)
hg_repo_create = (
@@ -417,7 +415,7 @@ class SyncLocalTestCase(TestCase):
with open(timestamp_path, "w") as f:
f.write(
bump_timestamp.timestamp.strftime(
- "%s\n" % TIMESTAMP_FORMAT,
+ f"{TIMESTAMP_FORMAT}\n",
)
)
@@ -483,11 +481,7 @@ class SyncLocalTestCase(TestCase):
self.assertEqual(
os.EX_OK,
proc.returncode,
- "%s failed in %s"
- % (
- cmd,
- cwd,
- ),
+ f"{cmd} failed in {cwd}",
)
finally:
diff --git a/lib/portage/tests/unicode/test_string_format.py b/lib/portage/tests/unicode/test_string_format.py
index 142ad17a2..528717408 100644
--- a/lib/portage/tests/unicode/test_string_format.py
+++ b/lib/portage/tests/unicode/test_string_format.py
@@ -28,11 +28,11 @@ class StringFormatTestCase(TestCase):
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
dependency_arg = DependencyArg(arg=arg_unicode)
- formatted_str = "{}".format(dependency_arg)
+ formatted_str = f"{dependency_arg}"
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "{}".format(dependency_arg)
+ formatted_str = f"{dependency_arg}"
self.assertEqual(formatted_str, arg_unicode)
def testPortageException(self):
@@ -43,11 +43,11 @@ class StringFormatTestCase(TestCase):
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
e = PortageException(arg_unicode)
- formatted_str = "{}".format(e)
+ formatted_str = f"{e}"
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "{}".format(e)
+ formatted_str = f"{e}"
self.assertEqual(formatted_str, arg_unicode)
def testUseFlagDisplay(self):
@@ -59,9 +59,9 @@ class StringFormatTestCase(TestCase):
for arg_unicode in self.unicode_strings:
e = UseFlagDisplay(arg_unicode, enabled, forced)
- formatted_str = "{}".format(e)
+ formatted_str = f"{e}"
self.assertEqual(isinstance(formatted_str, str), True)
# Test the __str__ method which returns unicode in python3
- formatted_str = "{}".format(e)
+ formatted_str = f"{e}"
self.assertEqual(isinstance(formatted_str, str), True)
diff --git a/lib/portage/tests/update/test_move_ent.py b/lib/portage/tests/update/test_move_ent.py
index d026a82f8..31b1ff4ea 100644
--- a/lib/portage/tests/update/test_move_ent.py
+++ b/lib/portage/tests/update/test_move_ent.py
@@ -59,7 +59,7 @@ class MoveEntTestCase(TestCase):
ebuilds=ebuilds,
installed=installed,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/update/test_move_slot_ent.py b/lib/portage/tests/update/test_move_slot_ent.py
index baa169bc3..a66497d37 100644
--- a/lib/portage/tests/update/test_move_slot_ent.py
+++ b/lib/portage/tests/update/test_move_slot_ent.py
@@ -87,7 +87,7 @@ class MoveSlotEntTestCase(TestCase):
ebuilds=ebuilds,
installed=installed,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/update/test_update_dbentry.py b/lib/portage/tests/update/test_update_dbentry.py
index 695a246a2..ae9d0d66b 100644
--- a/lib/portage/tests/update/test_update_dbentry.py
+++ b/lib/portage/tests/update/test_update_dbentry.py
@@ -236,7 +236,7 @@ class UpdateDbentryTestCase(TestCase):
installed=installed,
world=world,
user_config={
- "make.conf": ('BINPKG_FORMAT="%s"' % binpkg_format,),
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
},
)
diff --git a/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py b/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
index 7806b56d7..eef7ad5b8 100644
--- a/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
+++ b/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
@@ -68,7 +68,7 @@ sys.exit(os.EX_OK)
except Exception:
portage.writemsg(
"".join(
- "{}\n".format(line)
+ f"{line}\n"
for line in out.decode(errors="replace").splitlines()[:50]
),
noiselevel=-1,
diff --git a/lib/portage/tests/util/test_getconfig.py b/lib/portage/tests/util/test_getconfig.py
index 8486938f6..ae79b2105 100644
--- a/lib/portage/tests/util/test_getconfig.py
+++ b/lib/portage/tests/util/test_getconfig.py
@@ -71,9 +71,9 @@ class GetConfigTestCase(TestCase):
# Format like env_update formats /etc/profile.env.
for k, v in cases.items():
if v.startswith("$") and not v.startswith("${"):
- line = "export {}=$'{}'\n".format(k, v[1:])
+ line = f"export {k}=$'{v[1:]}'\n"
else:
- line = "export {}='{}'\n".format(k, v)
+ line = f"export {k}='{v}'\n"
f.write(_unicode_encode(line))
f.flush()
diff --git a/lib/portage/tests/util/test_install_mask.py b/lib/portage/tests/util/test_install_mask.py
index 0ed98b3a5..1bc4461d6 100644
--- a/lib/portage/tests/util/test_install_mask.py
+++ b/lib/portage/tests/util/test_install_mask.py
@@ -188,10 +188,10 @@ class InstallMaskTestCase(TestCase):
install_mask = InstallMask("/foo/")
install_mask_dir(tmp_dir, install_mask)
self.assertFalse(
- os.path.lexists(link_name), "failed to remove {}".format(link_name)
+ os.path.lexists(link_name), f"failed to remove {link_name}"
)
self.assertFalse(
- os.path.lexists(base_dir), "failed to remove {}".format(base_dir)
+ os.path.lexists(base_dir), f"failed to remove {base_dir}"
)
finally:
shutil.rmtree(tmp_dir)
diff --git a/lib/portage/tests/util/test_varExpand.py b/lib/portage/tests/util/test_varExpand.py
index b2cbee009..94b656acc 100644
--- a/lib/portage/tests/util/test_varExpand.py
+++ b/lib/portage/tests/util/test_varExpand.py
@@ -11,7 +11,7 @@ class VarExpandTestCase(TestCase):
varDict = {"a": "5", "b": "7", "c": "-5"}
for key in varDict:
- result = varexpand("$%s" % key, varDict)
+ result = varexpand(f"${key}", varDict)
self.assertFalse(
result != varDict[key],
@@ -56,8 +56,7 @@ class VarExpandTestCase(TestCase):
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandDoubleQuotes(self):
@@ -68,8 +67,7 @@ class VarExpandTestCase(TestCase):
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandSingleQuotes(self):
@@ -80,8 +78,7 @@ class VarExpandTestCase(TestCase):
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandFail(self):
@@ -91,7 +88,7 @@ class VarExpandTestCase(TestCase):
testVars = ["fail"]
for var in testVars:
- result = varexpand("$%s" % var, varDict)
+ result = varexpand(f"${var}", varDict)
self.assertFalse(
len(result),
msg="Got %s == %s, from varexpand(%s, %s)"
diff --git a/lib/portage/tests/util/test_xattr.py b/lib/portage/tests/util/test_xattr.py
index 49d7b756e..59ff7b40f 100644
--- a/lib/portage/tests/util/test_xattr.py
+++ b/lib/portage/tests/util/test_xattr.py
@@ -167,4 +167,4 @@ class StandardTest(TestCase):
"""Make sure the exported API matches"""
for mod in self.MODULES:
for f in self.FUNCS:
- self.assertTrue(hasattr(mod, f), "{} func missing in {}".format(f, mod))
+ self.assertTrue(hasattr(mod, f), f"{f} func missing in {mod}")
diff --git a/lib/portage/tests/versions/test_vercmp.py b/lib/portage/tests/versions/test_vercmp.py
index d8f12a833..45777efd9 100644
--- a/lib/portage/tests/versions/test_vercmp.py
+++ b/lib/portage/tests/versions/test_vercmp.py
@@ -27,7 +27,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) <= 0,
- msg="{} < {}? Wrong!".format(test[0], test[1]),
+ msg=f"{test[0]} < {test[1]}? Wrong!",
)
def testVerCmpLess(self):
@@ -57,7 +57,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) >= 0,
- msg="{} > {}? Wrong!".format(test[0], test[1]),
+ msg=f"{test[0]} > {test[1]}? Wrong!",
)
def testVerCmpEqual(self):
@@ -73,7 +73,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) != 0,
- msg="{} != {}? Wrong!".format(test[0], test[1]),
+ msg=f"{test[0]} != {test[1]}? Wrong!",
)
def testVerNotEqual(self):
@@ -96,5 +96,5 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) == 0,
- msg="{} == {}? Wrong!".format(test[0], test[1]),
+ msg=f"{test[0]} == {test[1]}? Wrong!",
)
diff --git a/lib/portage/update.py b/lib/portage/update.py
index 64dfad7e9..fe9f5a753 100644
--- a/lib/portage/update.py
+++ b/lib/portage/update.py
@@ -430,7 +430,7 @@ def update_config_files(
contents.insert(
pos + 1,
line.replace(
- "{}".format(atom), "{}".format(new_atom), 1
+ f"{atom}", f"{new_atom}", 1
),
)
# we've inserted an additional line, so we need to
@@ -450,10 +450,10 @@ def update_config_files(
try:
write_atomic(updating_file, "".join(file_contents[x]))
except PortageException as e:
- writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"\n!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! An error occurred while updating a config file:")
- + " '%s'\n" % updating_file,
+ + f" '{updating_file}'\n",
noiselevel=-1,
)
continue
--git a/lib/portage/util/ExtractKernelVersion.py b/lib/portage/util/ExtractKernelVersion.py
index 66e275994..5914dd020 100644
--- a/lib/portage/util/ExtractKernelVersion.py
+++ b/lib/portage/util/ExtractKernelVersion.py
@@ -79,7 +79,7 @@ def ExtractKernelVersion(base_dir):
for file_path, file_errors in loader_errors.items():
for error_str in file_errors:
writemsg_level(
- "{}: {}\n".format(file_path, error_str),
+ f"{file_path}: {error_str}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/__init__.py b/lib/portage/util/__init__.py
index b6ced6a4f..4c3c3e44f 100644
--- a/lib/portage/util/__init__.py
+++ b/lib/portage/util/__init__.py
@@ -706,7 +706,7 @@ def writedict(mydict, myfilename, writekey=True):
lines.append(v + "\n")
else:
for k, v in mydict.items():
- lines.append("{} {}\n".format(k, " ".join(v)))
+ lines.append(f"{k} {' '.join(v)}\n")
write_atomic(myfilename, "".join(lines))
@@ -736,7 +736,7 @@ class _getconfig_shlex(shlex.shlex):
if e.errno == PermissionDenied.errno:
raise PermissionDenied(newfile)
if e.errno not in (errno.ENOENT, errno.ENOTDIR):
- writemsg("open('{}', 'r'): {}\n".format(newfile, e), noiselevel=-1)
+ writemsg(f"open('{newfile}', 'r'): {e}\n", noiselevel=-1)
raise
msg = self.error_leader()
@@ -746,7 +746,7 @@ class _getconfig_shlex(shlex.shlex):
msg += _("%s: No such file or directory") % newfile
if self.__portage_tolerant:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
else:
raise ParseError(msg)
return (newfile, io.StringIO())
@@ -801,7 +801,7 @@ def getconfig(
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mycfg)
if e.errno != errno.ENOENT:
- writemsg("open('{}', 'r'): {}\n".format(mycfg, e), noiselevel=-1)
+ writemsg(f"open('{mycfg}', 'r'): {e}\n", noiselevel=-1)
if e.errno not in (errno.EISDIR,):
raise
return None
@@ -864,7 +864,7 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
elif equ != "=":
@@ -872,7 +872,7 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
val = _unicode_decode(lex.get_token())
@@ -883,14 +883,14 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
if _invalid_var_name_re.search(key) is not None:
msg = lex.error_leader() + _("Invalid variable name '%s'") % (key,)
if not tolerant:
raise ParseError(msg)
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
continue
if expand:
@@ -905,8 +905,8 @@ def getconfig(
except Exception as e:
if isinstance(e, ParseError) or lex is None:
raise
- msg = "{}{}".format(lex.error_leader(), e)
- writemsg("%s\n" % msg, noiselevel=-1)
+ msg = f"{lex.error_leader()}{e}"
+ writemsg(f"{msg}\n", noiselevel=-1)
raise
return mykeys
@@ -1082,7 +1082,7 @@ def dump_traceback(msg, noiselevel=1):
stack = traceback.extract_tb(info[2])
error = str(info[1])
writemsg("\n====================================\n", noiselevel=noiselevel)
- writemsg("%s\n\n" % msg, noiselevel=noiselevel)
+ writemsg(f"{msg}\n\n", noiselevel=noiselevel)
for line in traceback.format_list(stack):
writemsg(line, noiselevel=noiselevel)
if error:
@@ -1125,7 +1125,7 @@ class cmp_sort_key:
def __lt__(self, other):
if other.__class__ is not self.__class__:
raise TypeError(
- "Expected type {}, got {}".format(self.__class__, other.__class__)
+ f"Expected type {self.__class__}, got {other.__class__}"
)
return self._cmp_func(self._obj, other._obj) < 0
@@ -1192,7 +1192,7 @@ def _do_stat(filename, follow_links=True):
return os.stat(filename)
return os.lstat(filename)
except OSError as oe:
- func_call = "stat('%s')" % filename
+ func_call = f"stat('{filename}')"
if oe.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
if oe.errno == errno.EACCES:
@@ -1287,7 +1287,7 @@ def apply_permissions(
os.chmod(filename, new_mode)
modified = True
except OSError as oe:
- func_call = "chmod('{}', {})".format(filename, oct(new_mode))
+ func_call = f"chmod('{filename}', {oct(new_mode)})"
if oe.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
elif oe.errno == errno.EACCES:
@@ -1560,7 +1560,7 @@ def write_atomic(file_path, content, **kwargs):
except OSError as e:
if f:
f.abort()
- func_call = "write_atomic('%s')" % file_path
+ func_call = f"write_atomic('{file_path}')"
if e.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
elif e.errno == errno.EACCES:
@@ -1589,7 +1589,7 @@ def ensure_dirs(dir_path, **kwargs):
os.makedirs(dir_path)
created_dir = True
except OSError as oe:
- func_call = "makedirs('%s')" % dir_path
+ func_call = f"makedirs('{dir_path}')"
if oe.errno in (errno.EEXIST,):
pass
else:
@@ -1718,11 +1718,7 @@ class LazyItemsDict(UserDict):
raise TypeError(
"LazyItemsDict "
+ "deepcopy is unsafe with lazy items that are "
- + "not singletons: key=%s value=%s"
- % (
- k,
- lazy_item,
- )
+ + f"not singletons: key={k} value={lazy_item}"
)
UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
return result
@@ -1962,7 +1958,7 @@ def find_updated_config_files(target_root, config_protect):
if stat.S_ISDIR(mymode):
mycommand = (
- "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
+ f"find '{x}' -name '.*' -type d -prune -o -name '._cfg????_*'"
)
else:
mycommand = (
diff --git a/lib/portage/util/_dyn_libs/LinkageMapELF.py b/lib/portage/util/_dyn_libs/LinkageMapELF.py
index 2845e494d..197ebacea 100644
--- a/lib/portage/util/_dyn_libs/LinkageMapELF.py
+++ b/lib/portage/util/_dyn_libs/LinkageMapELF.py
@@ -309,7 +309,7 @@ class LinkageMapELF:
entry = NeededEntry.parse("scanelf", l)
except InvalidData as e:
writemsg_level(
- "\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1
+ f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1
)
continue
try:
@@ -392,7 +392,7 @@ class LinkageMapELF:
try:
entry = NeededEntry.parse(location, l)
except InvalidData as e:
- writemsg_level("\n{}\n\n".format(e), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1)
continue
# If NEEDED.ELF.2 contains the new multilib category field,
@@ -409,7 +409,7 @@ class LinkageMapELF:
expand = {"ORIGIN": os.path.dirname(entry.filename)}
entry.runpaths = frozenset(
normalize_path(
- varexpand(x, expand, error_leader=lambda: "%s: " % location)
+ varexpand(x, expand, error_leader=lambda: f"{location}: ")
)
for x in entry.runpaths
)
@@ -662,15 +662,14 @@ class LinkageMapELF:
if debug:
if not os.path.isfile(lib):
writemsg_level(
- _("Missing library:") + " {}\n".format(lib),
+ _("Missing library:") + f" {lib}\n",
level=logging.DEBUG,
noiselevel=-1,
)
else:
writemsg_level(
_("Possibly missing symlink:")
- + "%s\n"
- % (os.path.join(os.path.dirname(lib), soname)),
+ + f"{os.path.join(os.path.dirname(lib), soname)}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -719,7 +718,7 @@ class LinkageMapELF:
os = _os_merge
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("{} ({}) not in object list".format(obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
basename = os.path.basename(obj)
soname = self._obj_properties[obj_key].soname
return (
@@ -770,10 +769,10 @@ class LinkageMapELF:
else:
obj_key = self._obj_key_cache.get(obj)
if obj_key is None:
- raise KeyError("%s not in object list" % obj)
+ raise KeyError(f"{obj} not in object list")
obj_props = self._obj_properties.get(obj_key)
if obj_props is None:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
if obj_props.owner is None:
return ()
return (obj_props.owner,)
@@ -793,10 +792,10 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
return self._obj_properties[obj_key].soname
if obj not in self._obj_key_cache:
- raise KeyError("%s not in object list" % obj)
+ raise KeyError(f"{obj} not in object list")
return self._obj_properties[self._obj_key_cache[obj]].soname
def findProviders(self, obj):
@@ -831,11 +830,11 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
else:
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("{} ({}) not in object list".format(obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
obj_props = self._obj_properties[obj_key]
arch = obj_props.arch
@@ -910,13 +909,13 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
objs = self._obj_properties[obj_key].alt_paths
else:
objs = {obj}
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("{} ({}) not in object list".format(obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
# If there is another version of this lib with the
# same soname and the soname symlink points to that
diff --git a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
index c60b52156..7b7276778 100644
--- a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
+++ b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
@@ -147,7 +147,7 @@ class PreservedLibsRegistry:
except OSError as e:
if e.errno != PermissionDenied.errno:
writemsg_level(
- "!!! {} {}\n".format(e, self._filename),
+ f"!!! {e} {self._filename}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/_dyn_libs/display_preserved_libs.py b/lib/portage/util/_dyn_libs/display_preserved_libs.py
index 31e2c6285..0deda0f16 100644
--- a/lib/portage/util/_dyn_libs/display_preserved_libs.py
+++ b/lib/portage/util/_dyn_libs/display_preserved_libs.py
@@ -20,7 +20,7 @@ def display_preserved_libs(vardb, verbose=False):
linkmap.rebuild()
except portage.exception.CommandNotFound as e:
portage.util.writemsg_level(
- "!!! Command Not Found: {}\n".format(e), level=logging.ERROR, noiselevel=-1
+ f"!!! Command Not Found: {e}\n", level=logging.ERROR, noiselevel=-1
)
else:
search_for_owners = set()
@@ -54,7 +54,7 @@ def display_preserved_libs(vardb, verbose=False):
all_preserved.update(*plibdata.values())
for cpv in plibdata:
- print(colorize("WARN", ">>>") + " package: %s" % cpv)
+ print(colorize("WARN", ">>>") + f" package: {cpv}")
samefile_map = {}
for f in plibdata[cpv]:
obj_key = linkmap._obj_key(f)
@@ -67,7 +67,7 @@ def display_preserved_libs(vardb, verbose=False):
for alt_paths in samefile_map.values():
alt_paths = sorted(alt_paths)
for p in alt_paths:
- print(colorize("WARN", " * ") + " - {}".format(p))
+ print(colorize("WARN", " * ") + f" - {p}")
f = alt_paths[0]
consumers = consumer_map.get(f, [])
consumers_non_preserved = [c for c in consumers if c not in all_preserved]
@@ -94,7 +94,7 @@ def display_preserved_libs(vardb, verbose=False):
owners_desc = ", ".join(x.mycpv for x in owners.get(c, []))
print(
colorize("WARN", " * ")
- + " used by {} ({})".format(c, owners_desc)
+ + f" used by {c} ({owners_desc})"
)
if not verbose and len(consumers) > max_display:
print(
diff --git a/lib/portage/util/_dyn_libs/soname_deps.py b/lib/portage/util/_dyn_libs/soname_deps.py
index b1d03bad8..7d65209a4 100644
--- a/lib/portage/util/_dyn_libs/soname_deps.py
+++ b/lib/portage/util/_dyn_libs/soname_deps.py
@@ -67,7 +67,7 @@ class SonameDepsProcessor:
if multilib_cat is None:
# This usage is invalid. The caller must ensure that
# the multilib category data is supplied here.
- raise AssertionError("Missing multilib category data: %s" % entry.filename)
+ raise AssertionError(f"Missing multilib category data: {entry.filename}")
self._basename_map.setdefault(os.path.basename(entry.filename), []).append(
entry
@@ -85,7 +85,7 @@ class SonameDepsProcessor:
varexpand(
x,
expand,
- error_leader=lambda: "%s: DT_RUNPATH: " % entry.filename,
+ error_leader=lambda: f"{entry.filename}: DT_RUNPATH: ",
)
)
for x in entry.runpaths
diff --git a/lib/portage/util/_info_files.py b/lib/portage/util/_info_files.py
index b20906f58..99d2bdce7 100644
--- a/lib/portage/util/_info_files.py
+++ b/lib/portage/util/_info_files.py
@@ -77,7 +77,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
proc = subprocess.Popen(
[
"/usr/bin/install-info",
- "--dir-file=%s" % os.path.join(inforoot, "dir"),
+ f"--dir-file={os.path.join(inforoot, 'dir')}",
os.path.join(inforoot, x),
],
env=dict(os.environ, LANG="C", LANGUAGE="C"),
diff --git a/lib/portage/util/_path.py b/lib/portage/util/_path.py
index 82fe95870..d99f15664 100644
--- a/lib/portage/util/_path.py
+++ b/lib/portage/util/_path.py
@@ -12,7 +12,7 @@ def exists_raise_eaccess(path):
os.stat(path)
except OSError as e:
if e.errno == PermissionDenied.errno:
- raise PermissionDenied("stat('%s')" % path)
+ raise PermissionDenied(f"stat('{path}')")
return False
else:
return True
@@ -23,7 +23,7 @@ def isdir_raise_eaccess(path):
st = os.stat(path)
except OSError as e:
if e.errno == PermissionDenied.errno:
- raise PermissionDenied("stat('%s')" % path)
+ raise PermissionDenied(f"stat('{path}')")
return False
else:
return stat.S_ISDIR(st.st_mode)
diff --git a/lib/portage/util/_pty.py b/lib/portage/util/_pty.py
index c70da8511..9372e556c 100644
--- a/lib/portage/util/_pty.py
+++ b/lib/portage/util/_pty.py
@@ -58,7 +58,7 @@ def _create_pty_or_pipe(copy_term_size=None):
got_pty = True
except OSError as e:
_disable_openpty = True
- writemsg("openpty failed: '%s'\n" % str(e), noiselevel=-1)
+ writemsg(f"openpty failed: '{str(e)}'\n", noiselevel=-1)
del e
master_fd, slave_fd = os.pipe()
diff --git a/lib/portage/util/_xattr.py b/lib/portage/util/_xattr.py
index 89eb4a366..41b396d0b 100644
--- a/lib/portage/util/_xattr.py
+++ b/lib/portage/util/_xattr.py
@@ -59,7 +59,7 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def get(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "{}.{}".format(namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["getfattr", "--absolute-names", "-n", name, item]
if nofollow:
cmd += ["-h"]
@@ -75,14 +75,14 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def set(cls, item, name, value, _flags=0, namespace=None):
if namespace:
- name = "{}.{}".format(namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["setfattr", "-n", name, "-v", value, item]
cls._call(cmd)
@classmethod
def remove(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "{}.{}".format(namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["setfattr", "-x", name, item]
if nofollow:
cmd += ["-h"]
@@ -93,12 +93,12 @@ class _XattrSystemCommands(_XattrGetAll):
cmd = ["getfattr", "-d", "--absolute-names", item]
if nofollow:
cmd += ["-h"]
- cmd += ["-m", ("^%s[.]" % namespace) if namespace else "-"]
+ cmd += ["-m", (f"^{namespace}[.]") if namespace else "-"]
proc = cls._call(cmd, stdout=subprocess.PIPE)
ret = []
if namespace:
- namespace = "%s." % namespace
+ namespace = f"{namespace}."
for name, value in cls._parse_output(proc.stdout):
if namespace:
if name.startswith(namespace):
diff --git a/lib/portage/util/changelog.py b/lib/portage/util/changelog.py
index 9aeb6fe1a..bcf90eb99 100644
--- a/lib/portage/util/changelog.py
+++ b/lib/portage/util/changelog.py
@@ -41,7 +41,7 @@ class ChangeLogTypeSort(str):
return second in ("EBUILD", "MISC", "AUX")
if first is None:
return False
- raise ValueError("Unknown file type '%s'" % first)
+ raise ValueError(f"Unknown file type '{first}'")
def __lt__(self, other):
"""
diff --git a/lib/portage/util/configparser.py b/lib/portage/util/configparser.py
index 9f39dffe1..be7d87bc0 100644
--- a/lib/portage/util/configparser.py
+++ b/lib/portage/util/configparser.py
@@ -72,7 +72,5 @@ def read_configs(parser, paths):
read_file(p, **kwargs)
else:
raise TypeError(
- "Unsupported type {!r} of element {!r} of 'paths' argument".format(
- type(p), p
- )
+ f"Unsupported type {type(p)!r} of element {p!r} of 'paths' argument"
)
diff --git a/lib/portage/util/digraph.py b/lib/portage/util/digraph.py
index 0a8307ce4..3e736792c 100644
--- a/lib/portage/util/digraph.py
+++ b/lib/portage/util/digraph.py
@@ -312,18 +312,14 @@ class digraph:
writemsg(s, noiselevel=-1)
for node in self.nodes:
- output("{} ".format(node))
+ output(f"{node} ")
if self.nodes[node][0]:
output("depends on\n")
else:
output("(no children)\n")
for child, priorities in self.nodes[node][0].items():
output(
- " %s (%s)\n"
- % (
- child,
- priorities[-1],
- )
+ f" {child} ({priorities[-1]})\n"
)
def bfs(self, start, ignore_priority=None):
diff --git a/lib/portage/util/env_update.py b/lib/portage/util/env_update.py
index 0e5e346aa..d76042a6b 100644
--- a/lib/portage/util/env_update.py
+++ b/lib/portage/util/env_update.py
@@ -143,12 +143,12 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
try:
myconfig = getconfig(file_path, expand=False)
except ParseError as e:
- writemsg("!!! '%s'\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! '{str(e)}'\n", noiselevel=-1)
del e
continue
if myconfig is None:
# broken symlink or file removed by a concurrent process
- writemsg("!!! File Not Found: '%s'\n" % file_path, noiselevel=-1)
+ writemsg(f"!!! File Not Found: '{file_path}'\n", noiselevel=-1)
continue
config_list.append(myconfig)
@@ -244,7 +244,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
newprelink.write("# contents of /etc/env.d directory\n")
for x in sorted(potential_lib_dirs) + ["bin", "sbin"]:
- newprelink.write("-l /{}\n".format(x))
+ newprelink.write(f"-l /{x}\n")
prelink_paths = set()
prelink_paths |= set(specials.get("LDPATH", []))
prelink_paths |= set(specials.get("PATH", []))
@@ -265,9 +265,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
plmasked = 1
break
if not plmasked:
- newprelink.write("-h {}\n".format(x))
+ newprelink.write(f"-h {x}\n")
for x in prelink_path_mask:
- newprelink.write("-b {}\n".format(x))
+ newprelink.write(f"-b {x}\n")
newprelink.close()
# Migration code path. If /etc/prelink.conf was generated by us, then
@@ -343,7 +343,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
and "CBUILD" in settings
and settings["CHOST"] != settings["CBUILD"]
):
- ldconfig = find_binary("%s-ldconfig" % settings["CHOST"])
+ ldconfig = find_binary(f"{settings['CHOST']}-ldconfig")
else:
ldconfig = os.path.join(eroot, "sbin", "ldconfig")
@@ -363,7 +363,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
writemsg_level(
_(">>> Regenerating %setc/ld.so.cache...\n") % (target_root,)
)
- os.system("cd / ; {} -X -r '{}'".format(ldconfig, target_root))
+ os.system(f"cd / ; {ldconfig} -X -r '{target_root}'")
elif ostype in ("FreeBSD", "DragonFly"):
writemsg_level(
_(">>> Regenerating %svar/run/ld-elf.so.hints...\n") % target_root
@@ -395,9 +395,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
for k in env_keys:
v = env[k]
if v.startswith("$") and not v.startswith("${"):
- outfile.write("export {}=$'{}'\n".format(k, v[1:]))
+ outfile.write(f"export {k}=$'{v[1:]}'\n")
else:
- outfile.write("export {}='{}'\n".format(k, v))
+ outfile.write(f"export {k}='{v}'\n")
# Create the systemd user environment configuration file
# /etc/environment.d/10-gentoo-env.conf with the
@@ -433,5 +433,5 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
outfile = atomic_ofstream(os.path.join(eroot, "etc", "csh.env"))
outfile.write(cenvnotice)
for x in env_keys:
- outfile.write("setenv {} '{}'\n".format(x, env[x]))
+ outfile.write(f"setenv {x} '{env[x]}'\n")
outfile.close()
diff --git a/lib/portage/util/futures/executor/fork.py b/lib/portage/util/futures/executor/fork.py
index 9abe66ac3..231931b2c 100644
--- a/lib/portage/util/futures/executor/fork.py
+++ b/lib/portage/util/futures/executor/fork.py
@@ -91,9 +91,7 @@ class ForkExecutor:
# distinguish between kill and crash
future.set_exception(
Exception(
- "pid {} crashed or killed, exitcode {}".format(
- proc.pid, proc.returncode
- )
+ f"pid {proc.pid} crashed or killed, exitcode {proc.returncode}"
)
)
@@ -122,7 +120,7 @@ class _ExceptionWithTraceback:
tb = traceback.format_exception(type(exc), exc, exc.__traceback__)
tb = "".join(tb)
self.exc = exc
- self.tb = '\n"""\n%s"""' % tb
+ self.tb = f'\n""\"\n{tb}"""'
def __reduce__(self):
return _rebuild_exc, (self.exc, self.tb)
diff --git a/lib/portage/util/hooks.py b/lib/portage/util/hooks.py
index c6367118f..cbb15f123 100644
--- a/lib/portage/util/hooks.py
+++ b/lib/portage/util/hooks.py
@@ -46,7 +46,7 @@ def perform_hooks(rel_directory, *argv, prefix="/"):
if retval != portage.os.EX_OK:
writemsg_level(
- " {} Spawn failed for: {}, {}\n".format(bad("*"), name, filepath),
+ f" {bad('*')} Spawn failed for: {name}, {filepath}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/locale.py b/lib/portage/util/locale.py
index 54b1e11a6..0d0c12015 100644
--- a/lib/portage/util/locale.py
+++ b/lib/portage/util/locale.py
@@ -77,19 +77,19 @@ def _check_locale(silent):
if uc != ruc:
msg.extend(
[
- " {} -> {}".format(chars(lc), chars(ruc)),
+ f" {chars(lc)} -> {chars(ruc)}",
" %28s: %s" % ("expected", chars(uc)),
]
)
if lc != rlc:
msg.extend(
[
- " {} -> {}".format(chars(uc), chars(rlc)),
+ f" {chars(uc)} -> {chars(rlc)}",
" %28s: %s" % ("expected", chars(lc)),
]
)
writemsg_level(
- "".join(["!!! %s\n" % l for l in msg]), level=logging.ERROR, noiselevel=-1
+ "".join([f"!!! {l}\n" for l in msg]), level=logging.ERROR, noiselevel=-1
)
return False
diff --git a/lib/portage/util/movefile.py b/lib/portage/util/movefile.py
index b3f186eef..e9971c3fa 100644
--- a/lib/portage/util/movefile.py
+++ b/lib/portage/util/movefile.py
@@ -149,9 +149,9 @@ def movefile(
raise
except Exception as e:
writemsg(
- "!!! %s\n" % _("Stating source file failed... movefile()"), noiselevel=-1
+ f"!!! {_('Stating source file failed... movefile()')}\n", noiselevel=-1
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
destexists = 1
@@ -233,10 +233,10 @@ def movefile(
raise
except Exception as e:
writemsg(
- "!!! %s\n" % _("failed to properly create symlink:"), noiselevel=-1
+ f"!!! {_('failed to properly create symlink:')}\n", noiselevel=-1
)
- writemsg("!!! {} -> {}\n".format(dest, target), noiselevel=-1)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {dest} -> {target}\n", noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
hardlinked = False
@@ -247,7 +247,7 @@ def movefile(
if hardlink_candidates:
head, tail = os.path.split(dest)
hardlink_tmp = os.path.join(
- head, ".{}._portage_merge_.{}".format(tail, portage.getpid())
+ head, f".{tail}._portage_merge_.{portage.getpid()}"
)
try:
os.unlink(hardlink_tmp)
@@ -258,7 +258,7 @@ def movefile(
% (hardlink_tmp,),
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
del e
for hardlink_src in hardlink_candidates:
@@ -274,7 +274,7 @@ def movefile(
_("!!! Failed to rename %s to %s\n") % (hardlink_tmp, dest),
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
hardlinked = True
try:
@@ -297,12 +297,11 @@ def movefile(
if e.errno != errno.EXDEV:
# Some random error.
writemsg(
- "!!! %s\n"
- % _("Failed to move %(src)s to %(dest)s")
+ f"!!! {_('Failed to move %(src)s to %(dest)s')}\n"
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
if renamefailed:
@@ -332,19 +331,18 @@ def movefile(
)
msg = textwrap.wrap(msg, 65)
for line in msg:
- writemsg("!!! {}\n".format(line), noiselevel=-1)
+ writemsg(f"!!! {line}\n", noiselevel=-1)
raise
_rename(dest_tmp_bytes, dest_bytes)
_os.unlink(src_bytes)
success = True
except Exception as e:
writemsg(
- "!!! %s\n"
- % _("copy %(src)s -> %(dest)s failed.")
+ f"!!! {_('copy %(src)s -> %(dest)s failed.')}\n"
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! {}\n".format(e), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
finally:
if not success:
@@ -365,7 +363,7 @@ def movefile(
},
noiselevel=-1,
)
- writemsg("!!! %s\n" % a, noiselevel=-1)
+ writemsg(f"!!! {a}\n", noiselevel=-1)
return None # failure
# In Python <3.3 always use stat_obj[stat.ST_MTIME] for the integral timestamp
@@ -395,8 +393,8 @@ def movefile(
newmtime = os.stat(dest).st_mtime_ns
except OSError as e:
writemsg(_("!!! Failed to stat in movefile()\n"), noiselevel=-1)
- writemsg("!!! %s\n" % dest, noiselevel=-1)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {dest}\n", noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
return None
if bsd_chflags:
diff --git a/lib/portage/util/whirlpool.py b/lib/portage/util/whirlpool.py
index d26780604..62fcfda53 100644
--- a/lib/portage/util/whirlpool.py
+++ b/lib/portage/util/whirlpool.py
@@ -78,7 +78,7 @@ class PyWhirlpool:
dig = self.digest()
tempstr = ""
for d in dig:
- xxx = "%02x" % (ord(d))
+ xxx = f"{ord(d):02x}"
tempstr = tempstr + xxx
return tempstr
@@ -116,7 +116,7 @@ class CWhirlpool:
dig = self.digest()
tempstr = ""
for d in dig:
- xxx = "%02x" % (d,)
+ xxx = f"{d:02x}"
tempstr = tempstr + xxx
return tempstr
@@ -2225,7 +2225,7 @@ def WhirlpoolInit(ctx):
def WhirlpoolAdd(source, sourceBits, ctx):
if not isinstance(source, bytes):
- raise TypeError("Expected {}, got {}".format(bytes, type(source)))
+ raise TypeError(f"Expected {bytes}, got {type(source)}")
if sourceBits == 0:
return
diff --git a/lib/portage/versions.py b/lib/portage/versions.py
index c7a247b40..98f6489f8 100644
--- a/lib/portage/versions.py
+++ b/lib/portage/versions.py
@@ -312,7 +312,7 @@ def _pkgsplit(mypkg, eapi=None):
return (m.group("pn"), m.group("ver"), rev)
-_cat_re = re.compile("^%s$" % _cat, re.UNICODE)
+_cat_re = re.compile(f"^{_cat}$", re.UNICODE)
_missing_cat = "null"
@@ -521,7 +521,7 @@ def cpv_getkey(mycpv, eapi=None):
warnings.warn(
"portage.versions.cpv_getkey() "
- + "called with invalid cpv: '{}'".format(mycpv),
+ + f"called with invalid cpv: '{mycpv}'",
DeprecationWarning,
stacklevel=2,
)
diff --git a/lib/portage/xml/metadata.py b/lib/portage/xml/metadata.py
index df3ce8121..33bb977d7 100644
--- a/lib/portage/xml/metadata.py
+++ b/lib/portage/xml/metadata.py
@@ -83,7 +83,7 @@ class _Maintainer:
setattr(self, attr.tag, attr.text)
def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.email)
+ return f"<{self.__class__.__name__} {self.email!r}>"
class _Useflag:
@@ -113,7 +113,7 @@ class _Useflag:
self.description = re.sub(r"\s+", " ", _desc)
def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__} {self.name!r}>"
class _Upstream:
@@ -145,7 +145,7 @@ class _Upstream:
self.remoteids = self.upstream_remoteids()
def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.__dict__)
+ return f"<{self.__class__.__name__} {self.__dict__!r}>"
def upstream_bugtrackers(self):
"""Retrieve upstream bugtracker location from xml node."""
@@ -200,7 +200,7 @@ class MetaDataXML:
except ImportError:
pass
except ExpatError as e:
- raise SyntaxError("{}".format(e))
+ raise SyntaxError(f"{e}")
if isinstance(herds, etree.ElementTree):
herds_etree = herds
@@ -219,7 +219,7 @@ class MetaDataXML:
self._upstream = None
def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.metadata_xml_path)
+ return f"<{self.__class__.__name__} {self.metadata_xml_path!r}>"
def _get_herd_email(self, herd):
"""Get a herd's email address.
diff --git a/runtests b/runtests
index 503c9b713..2fa948ae2 100755
--- a/runtests
+++ b/runtests
@@ -44,14 +44,14 @@ class Colors:
elif nocolors in self._COLORS_NO:
colorize = True
else:
- raise ValueError("$NOCOLORS is invalid: %s" % nocolors)
+ raise ValueError(f"$NOCOLORS is invalid: {nocolors}")
else:
if colorize in self._COLORS_YES:
colorize = True
elif colorize in self._COLORS_NO:
colorize = False
else:
- raise ValueError("--colors is invalid: %s" % colorize)
+ raise ValueError(f"--colors is invalid: {colorize}")
if colorize:
self.WARN = "\033[1;33m"
@@ -138,24 +138,21 @@ def main(argv):
cmd = [prog, "-b", "-Wd", "lib/portage/tests/runTests.py"] + args
if os.access(prog, os.X_OK):
print(
- "{}Testing with Python {}...{}".format(
- colors.GOOD, ver, colors.NORMAL
- )
+ f"{colors.GOOD}Testing with Python {ver}...{colors.NORMAL}"
)
statuses.append((ver, subprocess.call(cmd)))
elif not ignore_missing:
print(
- "%sCould not find requested Python %s%s"
- % (colors.BAD, ver, colors.NORMAL)
+ f"{colors.BAD}Could not find requested Python {ver}{colors.NORMAL}"
)
statuses.append((ver, 1))
else:
- print("{}Skip Python {}...{}".format(colors.WARN, ver, colors.NORMAL))
+ print(f"{colors.WARN}Skip Python {ver}...{colors.NORMAL}")
print()
finally:
if tempdir is not None:
if opts.keep_temp:
- print("Temporary directory left behind:\n%s" % tempdir)
+ print(f"Temporary directory left behind:\n{tempdir}")
else:
# Nuke our tempdir and anything that might be under it.
shutil.rmtree(tempdir, True)
@@ -164,7 +161,7 @@ def main(argv):
print("\nSummary:\n")
width = 10
header = "| %-*s | %s" % (width, "Version", "Status")
- print("{}\n|{}".format(header, "-" * (len(header) - 1)))
+ print(f"{header}\n|{'-' * (len(header) - 1)}")
exit_status = 0
for ver, status in statuses:
exit_status += status
diff --git a/setup.py b/setup.py
index 65a9bd248..1444d77b4 100755
--- a/setup.py
+++ b/setup.py
@@ -132,7 +132,7 @@ class build_man(Command):
if not newer(source, target) and not newer(__file__, target):
continue
- print("copying and updating {} -> {}".format(source, target))
+ print(f"copying and updating {source} -> {target}")
with codecs.open(source, "r", "utf8") as f:
data = f.readlines()
@@ -165,10 +165,10 @@ class docbook(Command):
with open("doc/fragment/date", "w"):
pass
with open("doc/fragment/version", "w") as f:
- f.write("<releaseinfo>%s</releaseinfo>" % self.distribution.get_version())
+ f.write(f"<releaseinfo>{self.distribution.get_version()}</releaseinfo>")
for f in self.doc_formats:
- print("Building docs in %s format..." % f)
+ print(f"Building docs in {f} format...")
subprocess.check_call(
["xmlto", "-o", "doc", "-m", "doc/custom.xsl", f, "doc/portage.docbook"]
)
@@ -331,7 +331,7 @@ class x_clean(clean):
break
for f in get_doc_outfiles():
- print("removing %s" % repr(f))
+ print(f"removing {repr(f)}")
os.remove(f)
if os.path.isdir("doc/fragment"):
@@ -353,12 +353,12 @@ class x_clean(clean):
conf_dir = os.path.join(top_dir, "cnf")
if os.path.islink(conf_dir):
- print("removing %s symlink" % repr(conf_dir))
+ print(f"removing {repr(conf_dir)} symlink")
os.unlink(conf_dir)
pni_file = os.path.join(top_dir, ".portage_not_installed")
if os.path.exists(pni_file):
- print("removing %s" % repr(pni_file))
+ print(f"removing {repr(pni_file)}")
os.unlink(pni_file)
def clean_man(self):
@@ -456,7 +456,7 @@ class x_install_data(install_data):
def run(self):
def re_sub_file(path, pattern, repl):
- print("Rewriting %s" % path)
+ print(f"Rewriting {path}")
with codecs.open(path, "r", "utf-8") as f:
data = f.read()
data = re.sub(pattern, repl, data, flags=re.MULTILINE)
@@ -511,7 +511,7 @@ class x_install_lib(install_lib):
def rewrite_file(path, val_dict):
path = os.path.join(self.install_dir, path)
- print("Rewriting %s" % path)
+ print(f"Rewriting {path}")
with codecs.open(path, "r", "utf-8") as f:
data = f.read()
@@ -533,7 +533,7 @@ class x_install_lib(install_lib):
def re_sub_file(path, pattern_repl_items):
path = os.path.join(self.install_dir, path)
- print("Rewriting %s" % path)
+ print(f"Rewriting {path}")
with codecs.open(path, "r", "utf-8") as f:
data = f.read()
for pattern, repl in pattern_repl_items:
@@ -563,10 +563,7 @@ class x_install_lib(install_lib):
),
(
r"^(EPREFIX\s*=\s*)(.*)",
- lambda m: "{}{}".format(
- m.group(1),
- '__import__("sys").prefix',
- ),
+ lambda m: f"{m.group(1)}__import__(\"sys\").prefix",
),
),
)
@@ -678,11 +675,11 @@ class build_tests(x_build_scripts_custom):
if os.path.exists(conf_dir):
if not os.path.islink(conf_dir):
raise SystemError(
- "%s exists and is not a symlink (collision)" % repr(conf_dir)
+ f"{repr(conf_dir)} exists and is not a symlink (collision)"
)
os.unlink(conf_dir)
conf_src = os.path.relpath("cnf", self.top_dir)
- print("Symlinking {} -> {}".format(conf_dir, conf_src))
+ print(f"Symlinking {conf_dir} -> {conf_src}")
os.symlink(conf_src, conf_dir)
source_path = os.path.realpath(__file__)
@@ -752,7 +749,7 @@ def get_manpages():
topdir = dirpath[len("man/") :]
if not topdir or linguas is None or topdir in linguas:
for g, mans in groups.items():
- yield [os.path.join("$mandir", topdir, "man%s" % g), mans]
+ yield [os.path.join("$mandir", topdir, f"man{g}"), mans]
class build_ext(_build_ext):
@@ -873,9 +870,7 @@ setup(
),
entry_points={
"console_scripts": [
- "{}=portage.util.bin_entry_point:bin_entry_point".format(
- os.path.basename(path)
- )
+ f"{os.path.basename(path)}=portage.util.bin_entry_point:bin_entry_point"
for path in itertools.chain.from_iterable(x_scripts.values())
],
}
^ permalink raw reply related [flat|nested] only message in thread
only message in thread, other threads:[~2023-01-10 15:12 UTC | newest]
Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2023-01-10 15:12 [gentoo-commits] proj/portage:master commit in: lib/portage/package/ebuild/, lib/portage/_emirrordist/, lib/portage/dep/, Sam James
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox