Skip to content

Commit

Permalink
refactor: convert to f-strings (#3107)
Browse files Browse the repository at this point in the history
  • Loading branch information
scarf005 authored Sep 1, 2023
1 parent 2d75e1b commit 0a8a74a
Show file tree
Hide file tree
Showing 18 changed files with 148 additions and 150 deletions.
6 changes: 3 additions & 3 deletions lang/concat_pot_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
destination_file = args[2]

if not os.path.isfile(source_file_1):
print("Error: Couldn't find file '{}'.".format(source_file_1))
print(f"Error: Couldn't find file '{source_file_1}'.")
exit(1)
if not os.path.isfile(source_file_2):
print("Error: Couldn't find file '{}'.".format(source_file_2))
print(f"Error: Couldn't find file '{source_file_2}'.")
exit(1)

print("==> Merging '{}' and '{}' into '{}".format(source_file_1, source_file_2, destination_file))
print(f"==> Merging '{source_file_1}' and '{source_file_2}' into '{destination_file}")

pot1 = polib.pofile(source_file_1)
pot2 = polib.pofile(source_file_2)
Expand Down
144 changes: 72 additions & 72 deletions lang/extract_json_strings.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions lang/unicode_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def print_encode_error(unicode_err, counter):
err_line = counter + chunk.count(b'\n', 0, unicode_err.start)
line_start = chunk.rfind(b'\n', 0, unicode_err.start) + 1
line_end = chunk.find(b'\n', line_start)
print("Unicode error on line {0}:".format(err_line))
print(f"Unicode error on line {err_line}:")
# Use RAW write because this is bytes class
sys.stdout.buffer.write(chunk[line_start:line_end + 1])
x_num = unicode_err.end - unicode_err.start + 2
Expand All @@ -30,7 +30,7 @@ def check(f):

if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: {} [FILENAME]".format(sys.argv[0]))
print(f"Usage: {sys.argv[0]} [FILENAME]")
sys.exit(1)
with open(sys.argv[1], encoding="utf-8") as pot_file:
if not check(pot_file):
Expand Down
2 changes: 1 addition & 1 deletion tools/check_po_printf_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def print_message(msg, segments):
for file in sorted(os.listdir("lang/po")):
if file.endswith(".po") and not file.endswith("en.po"):
print("Checking", file, end="", flush=True)
errors = check_po_file("lang/po/" + file)
errors = check_po_file(f"lang/po/{file}")
n = len(errors)
num_errors += n
if n > 0:
Expand Down
6 changes: 3 additions & 3 deletions tools/copy_from.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ def get_data(argsDict, resource_name):
with open(resource_filename) as resource_file:
resource += json.load(resource_file)
except FileNotFoundError:
exit("Failed: could not find {}".format(resource_filename))
exit(f"Failed: could not find {resource_filename}")
else:
print("Invalid filename {}".format(resource_filename))
print(f"Invalid filename {resource_filename}")
if not resource:
exit("Failed: {} was empty".format(resource_filename))
exit(f"Failed: {resource_filename} was empty")
return resource


Expand Down
10 changes: 5 additions & 5 deletions tools/copy_mac_libs.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,25 @@ def rewrite_identity(object):
shutil.chown(object, os.getuid())
st = os.stat(object)
os.chmod(object, st.st_mode | stat.S_IWUSR)
id = "@executable_path/{}".format(os.path.basename(object))
id = f"@executable_path/{os.path.basename(object)}"
ret = subprocess.run(["install_name_tool", "-id", id, object])
if ret.returncode != 0:
print("Error:", ret.stderr.decode('utf-8'))
os.chmod(object, (st.st_mode | stat.S_IWUSR) ^ stat.S_IWUSR)
print("Rewritten identity of {}".format(object))
print(f"Rewritten identity of {object}")


def rewrite_dependency(object, dependency):
shutil.chown(object, os.getuid())
st = os.stat(object)
os.chmod(object, st.st_mode | stat.S_IWUSR)
dest = "@executable_path/{}".format(os.path.basename(dependency))
dest = f"@executable_path/{os.path.basename(dependency)}"
ret = subprocess.run(["install_name_tool", "-change", dependency,
dest, object])
if ret.returncode != 0:
print("Error:", ret.stderr.decode('utf-8'))
os.chmod(object, (st.st_mode | stat.S_IWUSR) ^ stat.S_IWUSR)
print("Rewritten reference from {} to {}".format(dependency, dest))
print(f"Rewritten reference from {dependency} to {dest}")


def copy_and_rewrite(file):
Expand All @@ -64,7 +64,7 @@ def copy_and_rewrite(file):
copied_file = file
if file != executable:
copied_file = shutil.copy2(file, executable_dir)
print("Copied {} to {}".format(file, copied_file))
print(f"Copied {file} to {copied_file}")
for dependency in dependencies:
if dependency == file:
rewrite_identity(copied_file)
Expand Down
14 changes: 7 additions & 7 deletions tools/dialogue_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get_dialogue_from_json():
arg_path = arg_path[:-1]
for subdir_path, dirnames, filenames in os.walk(arg_path):
for filename in filenames:
path = subdir_path + "/" + filename
path = f"{subdir_path}/{filename}"
if path == "data/json/npcs/TALK_TEST.json":
continue
if path.endswith(".json"):
Expand Down Expand Up @@ -151,15 +151,15 @@ def validate(dialogue):
if not topic_record.get("valid", False):
all_topics_valid = False
if topic_id in start_topics:
print("talk topic {} referenced in an NPC chat but not defined".format(topic_id))
print(f"talk topic {topic_id} referenced in an NPC chat but not defined")
else:
print("talk topic {} referenced in a response but not defined".format(topic_id))
print(f"talk topic {topic_id} referenced in a response but not defined")
if not topic_record.get("in_response", False):
all_topics_valid = False
print("talk topic {} defined but not referenced in a response".format(topic_id))
print(f"talk topic {topic_id} defined but not referenced in a response")
if topic_id in OBSOLETE_TOPICS:
all_topics_valid = False
print("talk topic {} referenced despite being listed as obsolete.".format(topic_id))
print(f"talk topic {topic_id} referenced despite being listed as obsolete.")

no_change = False
passes = 0
Expand Down Expand Up @@ -208,8 +208,8 @@ def validate(dialogue):
continue
branch_record = topic_branches[topic_id]
if not branch_record["ends"]:
print("{} does not reach TALK_DONE".format(topic_id))
print(f"{topic_id} does not reach TALK_DONE")
if not branch_record["parent"] in start_topics:
print("no path from a start topic to {}".format(topic_id))
print(f"no path from a start topic to {topic_id}")

validate(get_dialogue_from_json())
2 changes: 1 addition & 1 deletion tools/fix-compilation-database.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
else:
match_result = starts_with_drive_letter.match(command[i])
if match_result:
command[i] = "{}:/{}".format(match_result.group(1), match_result.group(2))
command[i] = f"{match_result.group(1)}:/{match_result.group(2)}"
i = i + 1
data[j]["command"] = " ".join([shlex.quote(s) for s in command])

Expand Down
26 changes: 13 additions & 13 deletions tools/gfx_tools/decompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,16 +178,16 @@ def parse_tile_entry(self, tile_entry, refs):
pngnum = all_pngnums[i]
if pngnum in refs.pngnum_to_pngname:
continue
pngname = "{}_{}_{}".format(pngnum, tile_id, i + offset)
pngname = f"{pngnum}_{tile_id}_{i + offset}"
while pngname in refs.pngname_to_pngnum:
offset += 1
pngname = "{}_{}_{}".format(pngnum, tile_id, i + offset)
pngname = f"{pngnum}_{tile_id}_{i + offset}"
try:
refs.pngnum_to_pngname.setdefault(pngnum, pngname)
refs.pngname_to_pngnum.setdefault(pngname, pngnum)
refs.add_pngnum_to_tsfilepath(pngnum)
except TypeError:
print("failed to parse {}".format(json.dumps(tile_entry, indent=2)))
print(f"failed to parse {json.dumps(tile_entry, indent=2)}")
raise
return tile_id

Expand All @@ -204,7 +204,7 @@ def summarize(self, tile_info, refs):
if self.pngnum_max > 0:
refs.ts_data[self.ts_filename] = self
ts_tile_info = {
"//": "indices {} to {}".format(self.pngnum_min, self.pngnum_max)
"//": f"indices {self.pngnum_min} to {self.pngnum_max}"
}
if self.write_dim:
ts_tile_info["sprite_offset_x"] = self.sprite_offset_x
Expand All @@ -224,8 +224,8 @@ def __init__(self, ts_filename, refs):
self.valid = True

ts_base = ts_filename.split(".png")[0]
geometry_dim = "{}x{}".format(self.ts_data.sprite_width, self.ts_data.sprite_height)
pngs_dir = "/pngs_" + ts_base + "_{}".format(geometry_dim)
geometry_dim = f"{self.ts_data.sprite_width}x{self.ts_data.sprite_height}"
pngs_dir = f"/pngs_{ts_base}_{geometry_dim}"
self.ts_dir_pathname = refs.tileset_pathname + pngs_dir
find_or_make_dir(self.ts_dir_pathname)
self.tilenum_in_dir = 256
Expand All @@ -237,12 +237,12 @@ def write_expansions(self):
expansion_id = expand_entry.get("id", "expansion")
if not isinstance(expansion_id, str):
continue
expand_entry_pathname = self.ts_dir_pathname + "/" + expansion_id + ".json"
expand_entry_pathname = f"{self.ts_dir_pathname}/{expansion_id}.json"
write_to_json(expand_entry_pathname, expand_entry)

def increment_dir(self):
if self.tilenum_in_dir > 255:
self.subdir_pathname = self.ts_dir_pathname + "/" + "images{}".format(self.dir_count)
self.subdir_pathname = f"{self.ts_dir_pathname}/images{self.dir_count}"
find_or_make_dir(self.subdir_pathname)
self.tilenum_in_dir = 0
self.dir_count += 1
Expand All @@ -268,7 +268,7 @@ def extract_image(self, png_index, refs):
tile_image = tile_data.ts_image.extract_area(tile_off_x, tile_off_y,
tile_data.sprite_width,
tile_data.sprite_height)
tile_png_pathname = self.subdir_pathname + "/" + pngname + ".png"
tile_png_pathname = f"{self.subdir_pathname}/{pngname}.png"
tile_image.pngsave(tile_png_pathname)
refs.extracted_pngnums[png_index] = True

Expand Down Expand Up @@ -303,15 +303,15 @@ def get_all_data(self, tileset_dirname, delete_pathname):
try:
os.stat(self.tileset_pathname)
except KeyError:
print("cannot find a directory {}".format(self.tileset_pathname))
print(f"cannot find a directory {self.tileset_pathname}")
exit -1

tileset_confname = refs.tileset_pathname + "/" + "tile_config.json"

try:
os.stat(tileset_confname)
except KeyError:
print("cannot find a directory {}".format(tileset_confname))
print(f"cannot find a directory {tileset_confname}")
exit -1

if delete_pathname:
Expand Down Expand Up @@ -389,7 +389,7 @@ def convert_pngnum_to_pngname(self, tile_entry):
def report_missing(self):
for pngnum in self.pngnum_to_pngname:
if not self.extracted_pngnums.get(pngnum):
print("missing index {}, {}".format(pngnum, self.pngnum_to_pngname[pngnum]))
print(f"missing index {pngnum}, {self.pngnum_to_pngname[pngnum]}")


args = argparse.ArgumentParser(description="Split a tileset's tile_config.json into a directory per tile containing the tile data and png.")
Expand Down Expand Up @@ -452,7 +452,7 @@ def report_missing(self):
tile_entry_name, tile_entry = refs.convert_pngnum_to_pngname(tile_entry)
if not tile_entry_name:
continue
tile_entry_pathname = subdir_pathname + "/" + tile_entry_name + "_" + str(idx) + ".json"
tile_entry_pathname = f"{subdir_pathname}/{tile_entry_name}_{str(idx)}.json"
#if os.path.isfile(tile_entry_pathname):
# print("overwriting {}".format(tile_entry_pathname))
write_to_json(tile_entry_pathname, tile_entry)
Expand Down
20 changes: 10 additions & 10 deletions tools/gfx_tools/png_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,31 +124,31 @@ def convert_tile_entry_file(file_path, old_name, new_name):
if tmp_new_name.endswith(".png"):
new_name = tmp_new_name[:-4]

old_name_json = old_name + ".json"
old_name_png = old_name + ".png"
new_name_json = new_name + ".json"
new_name_png = new_name + ".png"
old_name_json = f"{old_name}.json"
old_name_png = f"{old_name}.png"
new_name_json = f"{new_name}.json"
new_name_png = f"{new_name}.png"

if not tileset_dirname.startswith("gfx/"):
tileset_dirname = "gfx/" + tileset_dirname
tileset_dirname = f"gfx/{tileset_dirname}"
if tileset_dirname.endswith("/"):
tileset_dirname = tileset_dirname[:-1]

print("In " + tileset_dirname + ", renaming " + old_name + " to " + new_name)
print(f"In {tileset_dirname}, renaming {old_name} to {new_name}")
for png_dirname in os.listdir(tileset_dirname):
if not png_dirname.startswith("pngs_"):
continue
png_path = tileset_dirname + "/" + png_dirname
png_path = f"{tileset_dirname}/{png_dirname}"
for subdir_fpath, dirnames, filenames in os.walk(png_path):
for filename in filenames:
old_path = subdir_fpath + "/" + filename
old_path = f"{subdir_fpath}/{filename}"
if filename.endswith(".json"):
convert_tile_entry_file(old_path, old_name, new_name)
if filename == old_name_png:
new_path = subdir_fpath + "/" + new_name_png
new_path = f"{subdir_fpath}/{new_name_png}"
os.rename(old_path, new_path)
elif filename == old_name_json:
new_path = subdir_fpath + "/" + new_name_json
new_path = f"{subdir_fpath}/{new_name_json}"
os.rename(old_path, new_path)


7 changes: 3 additions & 4 deletions tools/json_tools/cddatags.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
TAGS_FILE = os.path.join(TOP_DIR, "tags")

def make_tags_line(id_key, id, filename):
pattern = '/"{id_key}": "{id}"/'.format(id_key=id_key, id=id)
return '\t'.join((id, filename, pattern)).encode('utf-8')
pattern = f'/"{id_key}": "{id}"/'
return f"{id}\t{filename}\t{pattern}".encode('utf-8')

def is_json_tag_line(line):
return b'.json\t' in line
Expand Down Expand Up @@ -41,8 +41,7 @@ def main(args):
json_data = json.load(file)
except Exception as err:
sys.stderr.write(
"Problem reading file %s, reason: %s" %
(filename, err))
f"Problem reading file {filename}, reason: {err}")
continue
if type(json_data) == dict:
json_data = [json_data]
Expand Down
4 changes: 2 additions & 2 deletions tools/json_tools/keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@

if args.human:
title = "Count of keys"
print("\n\n%s" % title)
print("(Data from %s out of %s blobs)" % (num_matches, len(json_data)))
print(f"\n\n{title}")
print(f"(Data from {num_matches} out of {len(json_data)} blobs)")
print("-" * len(title))
ui_counts_to_columns(stats)
else:
Expand Down
6 changes: 3 additions & 3 deletions tools/json_tools/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ def import_data(json_dir=JSON_DIR, json_fmatch=JSON_FNMATCH):
try:
candidates = json.load(file, object_pairs_hook=OrderedDict)
except Exception as err:
errors.append("Problem reading file %s, reason: %s" % (json_file, err))
errors.append(f"Problem reading file {json_file}, reason: {err}")
if type(candidates) != list:
errors.append("Problem parsing data from file %s, reason: expected a list." % json_file)
errors.append(f"Problem parsing data from file {json_file}, reason: expected a list.")
else:
data += candidates
return (data, errors)
Expand Down Expand Up @@ -250,7 +250,7 @@ def indented_write(self, s):
self.buf.write(self.indent*self.indent_multiplier + s)

def write_key(self, k):
self.indented_write("\"%s\": " % k)
self.indented_write(f"\"{k}\": ")

def write_primitive_key_val(self, k, v):
self.write_key(k)
Expand Down
6 changes: 3 additions & 3 deletions tools/json_tools/values.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@
sys.exit(1)

if args.human:
title = "Count of values from field '%s'" % search_key
print("\n\n%s" % title)
print("(Data from %s out of %s blobs)" % (num_matches, len(json_data)))
title = f"Count of values from field '{search_key}'"
print(f"\n\n{title}")
print(f"(Data from {num_matches} out of {len(json_data)} blobs)")
print("-" * len(title))
ui_counts_to_columns(stats)
else:
Expand Down
Loading

0 comments on commit 0a8a74a

Please sign in to comment.