diff --git a/Makefile b/Makefile index f35c16f..6384ab9 100644 --- a/Makefile +++ b/Makefile @@ -60,6 +60,7 @@ LIBPCCS := $(CURDIR)/PCCS #--------------------------------------------------------------------------------- TARGET := $(notdir $(CURDIR))_mb BUILD := build +GENERATED_DIR := $(BUILD)/generated SOURCES := source INCLUDES := include PCCS/lib/include DATA := data @@ -124,14 +125,15 @@ ifneq ($(BUILD),$(notdir $(CURDIR))) export OUTPUT := $(CURDIR)/$(TARGET) -export VPATH := $(foreach dir,$(SOURCES),$(CURDIR)/$(dir)) \ +export VPATH := $(CURDIR)/$(GENERATED_DIR) \ + $(foreach dir,$(SOURCES),$(CURDIR)/$(dir)) \ $(foreach dir,$(DATA),$(CURDIR)/$(dir)) \ $(foreach dir,$(GRAPHICS),$(CURDIR)/$(dir)) export DEPSDIR := $(CURDIR)/$(BUILD) CFILES := $(foreach dir,$(SOURCES),$(notdir $(wildcard $(dir)/*.c))) -CPPFILES := $(foreach dir,$(SOURCES),$(notdir $(wildcard $(dir)/*.cpp))) translated_text.cpp +CPPFILES := $(sort $(foreach dir,$(SOURCES),$(notdir $(wildcard $(dir)/*.cpp))) translated_text.cpp) SFILES := $(foreach dir,$(SOURCES),$(notdir $(wildcard $(dir)/*.s))) PNGFILES := $(foreach dir,$(GRAPHICS),$(notdir $(wildcard $(dir)/*.png))) @@ -160,25 +162,45 @@ export OFILES_GRAPHICS := $(PNGFILES:.png=.o) export OFILES := $(OFILES_SOURCES) $(OFILES_GRAPHICS) -export HFILES := $(addsuffix .h,$(subst .,_,$(BINFILES))) $(PNGFILES:.png=.h) $(CURDIR)/include/translated_text.h $(CURDIR)/include/fonts.h +export HFILES := $(addsuffix .h,$(subst .,_,$(BINFILES))) $(PNGFILES:.png=.h) \ + $(CURDIR)/$(GENERATED_DIR)/translated_text.h \ + $(CURDIR)/$(GENERATED_DIR)/fonts.h export INCLUDE := $(foreach dir,$(INCLUDES),-iquote $(CURDIR)/$(dir)) \ $(foreach dir,$(LIBDIRS),-I$(dir)/include) \ + -I$(CURDIR)/$(GENERATED_DIR) \ -I$(CURDIR)/$(BUILD) \ -I$(CURDIR)/tools/payload-generator/include export LIBPATHS := $(foreach dir,$(LIBDIRS),-L$(dir)/lib) -.PHONY: $(BUILD) generate_data clean +.PHONY: $(BUILD) generate_data clean text_generated data to_compress generated_dir all: $(BUILD) -generate_data: +TEXT_GENERATED := $(CURDIR)/$(GENERATED_DIR)/translated_text.h \ + $(CURDIR)/$(GENERATED_DIR)/translated_text.cpp \ + $(CURDIR)/$(GENERATED_DIR)/fonts.h \ + $(CURDIR)/$(GENERATED_DIR)/output.json + +text_generated: $(TEXT_GENERATED) + +$(TEXT_GENERATED): tools/text_helper/main.py | data to_compress generated_dir + @PTGB_GEN_DIR="$(CURDIR)/$(GENERATED_DIR)" python3 tools/text_helper/main.py + +data: + @mkdir -p $@ + +to_compress: + @mkdir -p $@ + +generated_dir: + @mkdir -p $(GENERATED_DIR) + +generate_data: data to_compress text_generated @echo "----------------------------------------------------------------" @echo "Building v$(GIT_VERSION) with parameters: $(BUILD_LANG), $(BUILD_TYPE)" @echo "----------------------------------------------------------------" - mkdir -p data - mkdir -p to_compress @env - \ PATH="$(PATH)" \ TMPDIR=/tmp TMP=/tmp TEMP=/tmp \ @@ -194,7 +216,6 @@ generate_data: @echo "----------------------------------------------------------------" @echo @tools/payload-generator/payload-generator to_compress - @python3 text_helper/main.py @echo "Compressing bin files!" @echo -n "[" @find to_compress -name "*.bin" -print0 | xargs -0 -n1 ./compress_lz10.sh @@ -225,10 +246,8 @@ clean: @$(MAKE) -C loader clean @$(MAKE) -C PCCS clean @rm -fr $(BUILD) $(TARGET).elf $(TARGET).gba data/ to_compress/ - @rm -f text_helper/output.json - @rm -f include/fonts.h - @rm -f include/translated_text.h - @rm -f source/translated_text.cpp + @rm -f tools/text_helper/output.json + @rm -rf $(GENERATED_DIR) diff --git a/text_helper/fonts/japanese_normal.png b/tools/text_helper/fonts/japanese_normal.png similarity index 100% rename from text_helper/fonts/japanese_normal.png rename to tools/text_helper/fonts/japanese_normal.png diff --git a/text_helper/fonts/latin_normal.png b/tools/text_helper/fonts/latin_normal.png similarity index 100% rename from text_helper/fonts/latin_normal.png rename to tools/text_helper/fonts/latin_normal.png diff --git a/text_helper/main.py b/tools/text_helper/main.py old mode 100755 new mode 100644 similarity index 95% rename from text_helper/main.py rename to tools/text_helper/main.py index 30458f4..d617c3a --- a/text_helper/main.py +++ b/tools/text_helper/main.py @@ -24,12 +24,26 @@ FIRST_TRANSLATION_COL_INDEX = 10 BASE_DIR = Path(__file__).resolve().parent # read by default 1st sheet of an excel file -textDir = os.curdir + "/text_helper" +textDir = str(BASE_DIR) + +gen_dir_env = os.environ.get("PTGB_GEN_DIR") +if gen_dir_env: + GEN_DIR = Path(gen_dir_env) + GEN_DIR.mkdir(parents=True, exist_ok=True) + TRANSLATED_H_PATH = GEN_DIR / "translated_text.h" + TRANSLATED_CPP_PATH = GEN_DIR / "translated_text.cpp" + FONTS_H_PATH = GEN_DIR / "fonts.h" + OUTPUT_JSON_PATH = GEN_DIR / "output.json" +else: + TRANSLATED_H_PATH = Path(os.curdir) / "include/translated_text.h" + TRANSLATED_CPP_PATH = Path(os.curdir) / "source/translated_text.cpp" + FONTS_H_PATH = Path(os.curdir) / "include/fonts.h" + OUTPUT_JSON_PATH = BASE_DIR / "output.json" url = 'https://docs.google.com/spreadsheets/d/14LLs5lLqWasFcssBmJdGXjjYxARAJBa_QUOUhXZt4v8/export?format=xlsx' new_file_path = BASE_DIR / 'new_text.xlsx' old_file_path = BASE_DIR / 'text.xlsx' -json_file_path = BASE_DIR / 'output.json' +json_file_path = OUTPUT_JSON_PATH def split_into_sentences(text: str) -> list[str]: # -*- coding: utf-8 -*- @@ -91,7 +105,7 @@ def split_into_sentences(text: str) -> list[str]: if sentences and not sentences[-1]: sentences = sentences[:-1] return sentences -def split_sentence_into_lines(sentence, offset, pixelsPerChar, pixelsInLine, lang): +def split_sentence_into_lines(sentence, offset, pixelsPerChar, pixelsInLine, isCentered, lang): outStr = "" currLine = "" lineCount = 0 @@ -166,7 +180,11 @@ def split_sentence_into_lines(sentence, offset, pixelsPerChar, pixelsInLine, lan lineCount += 1 lineLength = 0 offset = 0 - + if (isCentered): + count = (pixelsInLine - lineLength) // 2 + for i in range(count): + currLine = " " + currLine + lineLength += spaceLength outStr += currLine return lineLength + offset, lineCount, outStr @@ -239,7 +257,7 @@ def convert_item(ogDict, lang): offset = 0 escapeCount = 0 while index < len(split_sents) and escapeCount < 100: - offset, recievedLine, out = split_sentence_into_lines(split_sents[index], offset, pixelsPerChar, pixelsInLine, lang) + offset, recievedLine, out = split_sentence_into_lines(split_sents[index], offset, pixelsPerChar, pixelsInLine, center_text, lang) currLine += recievedLine if (out == "ȼ"): @@ -385,7 +403,7 @@ def download_xlsx_file(): # XML exists (guaranteed here) if json_file_path.exists(): print("Offline mode: trusting cached XML + JSON. Skipping parse.") - if os.path.getmtime(f'{textDir}/main.py') > os.path.getmtime(f'{textDir}/output.json'): + if os.path.getmtime(f'{textDir}/main.py') > os.path.getmtime(OUTPUT_JSON_PATH): print("\t...but the python file is new, so we're doing it anyway!") return sys.exit(0) @@ -401,7 +419,7 @@ def download_xlsx_file(): new_file_path.unlink() if json_file_path.exists(): print("Skipping parse") - if os.path.getmtime(f'{textDir}/main.py') > os.path.getmtime(f'{textDir}/output.json'): + if os.path.getmtime(f'{textDir}/main.py') > os.path.getmtime(OUTPUT_JSON_PATH): print("\t...but the python file is new, so we're doing it anyway!") return sys.exit(0) @@ -465,7 +483,7 @@ def transfer_xlsx_to_dict(): def generate_header_file(): print("\tGenerating header file") - with open (os.curdir + '/include/translated_text.h', 'w') as hFile: + with open(TRANSLATED_H_PATH, 'w') as hFile: hFile.write("// THIS FILE HAS BEEN GENERATED BY text_helper/main.py !\n\n#ifndef TRANSLATED_TEXT_H\n#define TRANSLATED_TEXT_H\n\n#include \n\n") sectionEnds = [] @@ -502,7 +520,7 @@ def generate_text_tables(): def generate_cpp_file(): print("\tGenerating cpp file") - with open(os.curdir + '/source/translated_text.cpp', 'w') as cppFile: + with open(TRANSLATED_CPP_PATH, 'w') as cppFile: cppFile.write("// THIS FILE HAS BEEN GENERATED BY text_helper/main.py !\n#include \"translated_text.h\"\n#include \"debug_mode.h\"\n") # generate includes for each language for lang in Languages: @@ -539,7 +557,7 @@ def output_json_file(): outText += str(byte) mainDict[lang.name][section][item]["text"] = outText - with open(textDir + '/output.json', 'w') as jsonFile: + with open(OUTPUT_JSON_PATH, 'w') as jsonFile: jsonFile.write(json.dumps(mainDict)) @@ -569,7 +587,7 @@ class Font: def build_h(): print("Building font.h") - with open(fontDir + "/include/fonts.h", 'w') as f: + with open(FONTS_H_PATH, 'w') as f: f.write(f'''#ifndef PTGB_BUILD_LANGUAGE #error "PTGB_BUILD_LANGUAGE not defined" #endif @@ -629,7 +647,7 @@ def generate_tables(): print("Generating font tables") for myFont in fonts.values(): print(f'\t{myFont.fileName}') - reader = png.Reader(f'{fontDir}/text_helper/fonts/{myFont.fileName}.png') + reader = png.Reader(f'{BASE_DIR}/fonts/{myFont.fileName}.png') png_info = reader.read()[3] palette = png_info.get('palette') if (palette is None): @@ -747,4 +765,4 @@ generate_header_file() generate_text_tables() generate_cpp_file() output_json_file() -print("text_helper finished!\n") \ No newline at end of file +print("text_helper finished!\n") diff --git a/tools/text_helper/text.xlsx b/tools/text_helper/text.xlsx new file mode 100644 index 0000000..e02c4e5 Binary files /dev/null and b/tools/text_helper/text.xlsx differ