Modifying build process to ignore unmodified files

This commit is contained in:
The Gears of Progress 2025-08-06 23:08:14 -04:00
parent ed6ee974f8
commit 4a612713ee
8 changed files with 176 additions and 111 deletions

View File

@ -147,9 +147,19 @@ generate_data:
mkdir -p data
mkdir -p to_compress
@env -i "PATH=$(PATH)" $(MAKE) -C tools/data-generator
@echo
@echo "----------------------------------------------------------------"
@echo
@tools/data-generator/data-generator to_compress
@python3 text_helper/main.py
@echo "Compressing bin files!"
@echo -n "["
@find to_compress -name "*.bin" -print0 | xargs -0 -n1 ./compress_lz10.sh
@echo "]"
@echo "Compressing finished!"
@echo
@echo "----------------------------------------------------------------"
@echo
#---------------------------------------------------------------------------------
$(BUILD): generate_data
@ -165,6 +175,8 @@ clean:
@$(MAKE) -C tools/data-generator clean
@$(MAKE) -C loader clean
@rm -fr $(BUILD) $(TARGET).elf $(TARGET).gba data/ to_compress/
@rm text_helper/output.json
#---------------------------------------------------------------------------------

View File

@ -1,4 +1,10 @@
#!/bin/sh
infile="$1"
outfile="data/$(basename "$infile" .bin)_lz10.bin"
gbalzss e "$infile" "$outfile"
if [ "$infile" -nt "$outfile" ]; then
gbalzss e "$infile" "$outfile"
echo -n "C"
else
echo -n "S"
fi

View File

@ -8,28 +8,51 @@ from collections import defaultdict
import copy
import math
import sys
import filecmp
update = True
print ("\nRunning text_helper:\n\n\n\n---------------")
print ("Running text_helper:")
if (update == True):
url = 'https://docs.google.com/spreadsheets/d/14LLs5lLqWasFcssBmJdGXjjYxARAJBa_QUOUhXZt4v8/export?format=xlsx'
file_Path = 'text_helper/text.xlsx'
new_file_path = 'text_helper/new_text.xlsx'
old_file_path = 'text_helper/text.xlsx'
json_file_path = 'text_helper/output.json'
try:
response = requests.get(url, timeout=10)
response = requests.get(url, timeout=5)
response.raise_for_status()
if response.status_code == 200:
with open(file_Path, 'wb') as file:
with open(new_file_path, 'wb') as file:
file.write(response.content)
print('File downloaded successfully')
except requests.exceptions.ReadTimeout as errrt:
print("Connection Error. Continuing with previously downloaded file.")
if os.path.exists(old_file_path):
print("Connection timed out. Continuing with locally downloaded file.")
else:
print("xlsx file is missing and connection timed out. Exiting...")
except requests.exceptions.ConnectionError as conerr:
print("Connection Error. Continuing with previously downloaded file.")
if os.path.exists(old_file_path):
print("Connection error. Continuing with locally downloaded file.")
else:
print("xlsx file is missing and connection timed out. Exiting...")
if os.path.exists(old_file_path):
new_file = pd.read_excel(new_file_path, sheet_name="Translations")
old_file = pd.read_excel(old_file_path, sheet_name="Translations")
if new_file.equals(old_file):
if os.path.exists(json_file_path):
print("Downloaded file is identical. Skipping parse\n")
os.rename(new_file_path, old_file_path)
exit()
print("json file missing - forcing rebuild.")
os.remove(new_file_path)
else:
print("xlsx file missing - forcing rebuild.")
os.rename(new_file_path, old_file_path)
engCharArray = [
@ -210,13 +233,13 @@ def SplitSentenceIntoLines(sentence, offset, pixelsPerChar, pixelsInLine):
# -*- coding: utf-8 -*-
import re
alphabets= "([A-Za-z])"
prefixes = "(Mr|St|Mrs|Ms|Dr)[.]"
suffixes = "(Inc|Ltd|Jr|Sr|Co)"
starters = "(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\s|She\s|It\s|They\s|Their\s|Our\s|We\s|But\s|However\s|That\s|This\s|Wherever)"
acronyms = "([A-Z][.][A-Z][.](?:[A-Z][.])?)"
websites = "[.](com|net|org|io|gov|edu|me)"
digits = "([0-9])"
alphabets= r"([A-Za-z])"
prefixes = r"(Mr|St|Mrs|Ms|Dr)[.]"
suffixes = r"(Inc|Ltd|Jr|Sr|Co)"
starters = r"(Mr|Mrs|Ms|Dr|Prof|Capt|Cpt|Lt|He\s|She\s|It\s|They\s|Their\s|Our\s|We\s|But\s|However\s|That\s|This\s|Wherever)"
acronyms = r"([A-Z][.][A-Z][.](?:[A-Z][.])?)"
websites = r"[.](com|net|org|io|gov|edu|me)"
digits = r"([0-9])"
multiple_dots = r'\.{2,}'
def split_into_sentences(text: str) -> list[str]:
@ -239,7 +262,7 @@ def split_into_sentences(text: str) -> list[str]:
text = re.sub(digits + "[.]" + digits,"\\1<prd>\\2",text)
text = re.sub(multiple_dots, lambda match: "<prd>" * len(match.group(0)) + "<stop>", text)
if "Ph.D" in text: text = text.replace("Ph.D.","Ph<prd>D<prd>")
text = re.sub("\s" + alphabets + "[.] "," \\1<prd> ",text)
text = re.sub(r"\s" + alphabets + "[.] "," \\1<prd> ",text)
text = re.sub(acronyms+" "+starters,"\\1<stop> \\2",text)
text = re.sub(alphabets + "[.]" + alphabets + "[.]" + alphabets + "[.]","\\1<prd>\\2<prd>\\3<prd>",text)
text = re.sub(alphabets + "[.]" + alphabets + "[.]","\\1<prd>\\2<prd>",text)
@ -416,7 +439,7 @@ def write_enum_to_header_file(hFile, prefix, dictionary):
return num
print("\n\nStarting parse: \n")
print("Starting parse:")
currSheet = pd.read_excel(dir + "/text.xlsx", sheet_name="Translations")
for row in currSheet.iterrows():
@ -548,3 +571,4 @@ for lang in Languages:
with open(dir + '/output.json', 'w') as jsonFile:
jsonFile.write(json.dumps(mainDict))
print("Parse finished!\n")

View File

@ -1,5 +1,5 @@
# # Compiler flags
CXXFLAGS := -std=c++11 -fno-rtti -fno-exceptions -fno-unwind-tables -Wall -Wextra -I $(CURDIR)/include -g
CXXFLAGS := -std=c++20 -fno-rtti -fno-exceptions -fno-unwind-tables -Wall -Wextra -I $(CURDIR)/include -g
# Source files directory
SRC_DIR := ./src

View File

@ -4,6 +4,6 @@
#include <cstdint>
#include <cstddef>
void writeTable(const char *output_path, const char *filename, const char *buffer, size_t buffer_size);
void writeTable(const char *input_path, const char *output_path, const char *filename, const char *buffer, size_t buffer_size);
#endif

View File

@ -2,22 +2,39 @@
#include <cstdio>
#include <cstring>
#include <filesystem>
void writeTable(const char* output_path, const char *filename, const char *buffer, size_t buffer_size)
void writeTable(const char *input_path, const char *output_path, const char *filename, const char *buffer, size_t buffer_size)
{
char full_path[4096];
FILE* f;
char full_output_path[4096];
if(output_path[0] != '\0')
FILE *f;
if (output_path[0] != '\0')
{
snprintf(full_path, sizeof(full_path), "%s/%s", output_path, filename);
snprintf(full_output_path, sizeof(full_output_path), "%s/%s", output_path, filename);
}
else
{
strncpy(full_path, filename, sizeof(full_path));
strncpy(full_output_path, filename, sizeof(full_output_path));
}
f = fopen(full_path, "wb+");
if (std::filesystem::exists(full_output_path))
{
std::filesystem::file_time_type inf_time = std::filesystem::last_write_time(input_path);
std::filesystem::file_time_type outf_time = std::filesystem::last_write_time(full_output_path);
if (outf_time > inf_time)
{
//printf("File %s is newer than %s, skipping\n", full_output_path, input_path);
printf("S");
return;
}
}
f = fopen(full_output_path, "wb+");
fwrite(buffer, 1, buffer_size, f);
fclose(f);
printf("B");
}

View File

@ -9,20 +9,21 @@
#include <cstdio>
#include <cstring>
#include <cstdlib>
#include <filesystem>
// This application holds the various long static data arrays that Poke Transporter GB uses
// and it writes them to .bin files that can be compressed with gbalzss later.
// it's useful to do it this way because it keeps this data easy to view, edit and document
// This function generates a binary file containing the specified list of ROM_DATA structs
void generate_gba_rom_value_tables(const char *output_path, const char *filename, const struct ROM_DATA *rom_data_values, u16 num_elements)
void generate_gba_rom_value_tables(const char *input_path, const char *output_path, const char *filename, const struct ROM_DATA *rom_data_values, u16 num_elements)
{
writeTable(output_path, filename, reinterpret_cast<const char*>(rom_data_values), num_elements * sizeof(struct ROM_DATA));
writeTable(input_path, output_path, filename, reinterpret_cast<const char *>(rom_data_values), num_elements * sizeof(struct ROM_DATA));
}
void generate_gb_rom_value_tables(const char *output_path, const char *filename, const struct GB_ROM *rom_data_values, u16 num_elements)
void generate_gb_rom_value_tables(const char *input_path, const char *output_path, const char *filename, const struct GB_ROM *rom_data_values, u16 num_elements)
{
writeTable(output_path, filename, reinterpret_cast<const char*>(rom_data_values), num_elements * sizeof(struct GB_ROM));
writeTable(input_path, output_path, filename, reinterpret_cast<const char *>(rom_data_values), num_elements * sizeof(struct GB_ROM));
}
/**
@ -31,7 +32,7 @@ void generate_gb_rom_value_tables(const char *output_path, const char *filename,
* The reason we single out pokémon yellow, is because the binary patch diff is significant compared to Blue, Red and Green.
* It's easier to compress the data if we generate it into a separate file.
*/
void generate_payloads_for(uint8_t generation, bool yellow_version, const char* output_path, const char* filename)
void generate_payloads_for(uint8_t generation, bool yellow_version, const char *full_path)
{
uint8_t base_payload_buffer[PAYLOAD_SIZE];
uint8_t other_payload_buffer[PAYLOAD_SIZE];
@ -41,35 +42,22 @@ void generate_payloads_for(uint8_t generation, bool yellow_version, const char*
u16 base_payload_index;
u16 index;
char full_path[4096];
if(output_path[0] != '\0')
{
snprintf(full_path, sizeof(full_path), "%s/%s", output_path, filename);
}
else
{
strncpy(full_path, filename, sizeof(full_path));
}
const struct GB_ROM *rom_value_sets[] = {
gb_rom_values_eng,
gb_rom_values_fre
};
gb_rom_values_fre};
const u16 rom_value_sizes[] = {
gb_rom_values_eng_size,
gb_rom_values_fre_size
};
gb_rom_values_fre_size};
const u8 num_elements = sizeof(rom_value_sizes) / sizeof(u16);
// search for the first english GB_ROM struct for the given generation
for(base_payload_index = 0; base_payload_index < gb_rom_values_eng_size; ++base_payload_index)
for (base_payload_index = 0; base_payload_index < gb_rom_values_eng_size; ++base_payload_index)
{
if(gb_rom_values_eng[base_payload_index].generation == generation)
if (gb_rom_values_eng[base_payload_index].generation == generation)
{
if((!yellow_version && gb_rom_values_eng[base_payload_index].version != YELLOW_ID) || (yellow_version && gb_rom_values_eng[base_payload_index].version == YELLOW_ID))
if ((!yellow_version && gb_rom_values_eng[base_payload_index].version != YELLOW_ID) || (yellow_version && gb_rom_values_eng[base_payload_index].version == YELLOW_ID))
{
break;
}
@ -82,14 +70,14 @@ void generate_payloads_for(uint8_t generation, bool yellow_version, const char*
init_payload(base_payload_buffer, gb_rom_values_eng[base_payload_index], TRANSFER, false);
payload_writer.set_base_payload(gb_rom_values_eng[base_payload_index].language, gb_rom_values_eng[base_payload_index].version, base_payload_buffer, gb_rom_values_eng[base_payload_index].payload_size);
for(rom_set_index = 0; rom_set_index < num_elements; ++rom_set_index)
for (rom_set_index = 0; rom_set_index < num_elements; ++rom_set_index)
{
for(index = 0; index < rom_value_sizes[rom_set_index]; ++index)
for (index = 0; index < rom_value_sizes[rom_set_index]; ++index)
{
const struct GB_ROM *curr_rom = &rom_value_sets[rom_set_index][index];
if(curr_rom->generation != generation ||
(rom_set_index == 0 && index == base_payload_index) ||
(yellow_version && curr_rom->version != YELLOW_ID) ||
if (curr_rom->generation != generation ||
(rom_set_index == 0 && index == base_payload_index) ||
(yellow_version && curr_rom->version != YELLOW_ID) ||
(!yellow_version && curr_rom->version == YELLOW_ID))
{
// skip if:
@ -97,7 +85,7 @@ void generate_payloads_for(uint8_t generation, bool yellow_version, const char*
// - if it's the base payload
// - if we specified yellow_version == true and the current rom is not a pokémon yellow rom.
// - if we specified yellow_version == false and the current rom IS a pokémon yellow rom.
continue;
continue;
}
// add the binary patches for this ROM
@ -111,9 +99,8 @@ void generate_payloads_for(uint8_t generation, bool yellow_version, const char*
payload_writer.write_to_file(full_path);
}
void test_payloads(const char* output_path, const char* filename)
void test_payloads(const char *full_path)
{
char full_path[4096];
uint8_t buffer[2048]; // 2048 bytes is enough for the payloads
uint8_t reference_payload_buffer[PAYLOAD_SIZE];
uint8_t reconstructed_payload_buffer[PAYLOAD_SIZE];
@ -122,42 +109,31 @@ void test_payloads(const char* output_path, const char* filename)
u16 rom_set_index;
u16 index;
if(output_path[0] != '\0')
{
snprintf(full_path, sizeof(full_path), "%s/%s", output_path, filename);
}
else
{
strncpy(full_path, filename, sizeof(full_path));
}
file = fopen(full_path,"rb"); /*open file*/
fseek(file, 0, SEEK_END);
size = ftell(file); /*calc the size needed*/
file = fopen(full_path, "rb"); /*open file*/
fseek(file, 0, SEEK_END);
size = ftell(file); /*calc the size needed*/
fseek(file, 0, SEEK_SET);
payload_file_reader payload_reader(buffer, size);
const struct GB_ROM *rom_value_sets[] = {
gb_rom_values_eng,
gb_rom_values_fre
};
gb_rom_values_fre};
const u16 rom_value_sizes[] = {
gb_rom_values_eng_size,
gb_rom_values_fre_size
};
gb_rom_values_fre_size};
const u8 num_elements = sizeof(rom_value_sizes) / sizeof(u16);
fread(&buffer, 1, size, file);
for(rom_set_index = 0; rom_set_index < num_elements; ++rom_set_index)
for (rom_set_index = 0; rom_set_index < num_elements; ++rom_set_index)
{
for(index = 0; index < rom_value_sizes[rom_set_index]; ++index)
for (index = 0; index < rom_value_sizes[rom_set_index]; ++index)
{
const struct GB_ROM *curr_rom = &rom_value_sets[rom_set_index][index];
// first generate the reference payload
memset(reference_payload_buffer, 0, PAYLOAD_SIZE);
init_payload(reference_payload_buffer, *curr_rom, TRANSFER, false);
@ -167,56 +143,86 @@ void test_payloads(const char* output_path, const char* filename)
// okay, so, the given file may or may not contain the desired payload.
// we should just skip if the read call returns false
if(!payload_reader.read_payload(reconstructed_payload_buffer, curr_rom->language, curr_rom->version))
if (!payload_reader.read_payload(reconstructed_payload_buffer, curr_rom->language, curr_rom->version))
{
continue; // skip if the payload was not found
}
printf("Testing payload from file %s for language %u, variant %u: ", full_path, curr_rom->language, curr_rom->version);
if(!memcmp(reference_payload_buffer, reconstructed_payload_buffer, PAYLOAD_SIZE))
// printf("Testing payload from file %s for language %u, variant %u: ", full_path, curr_rom->language, curr_rom->version);
if (!memcmp(reference_payload_buffer, reconstructed_payload_buffer, PAYLOAD_SIZE))
{
printf("PASS!\n");
printf("P");
}
else
{
printf("FAILED!\n");
printf("\n\n!!!Payload from file %s for language %u, variant %u failed the payload test!!!\n", full_path, curr_rom->language, curr_rom->version);
// print the differences
for(size_t i = 0; i < PAYLOAD_SIZE; ++i)
for (size_t i = 0; i < PAYLOAD_SIZE; ++i)
{
if(reference_payload_buffer[i] != reconstructed_payload_buffer[i])
if (reference_payload_buffer[i] != reconstructed_payload_buffer[i])
{
printf("Byte %zu: expected 0x%02X, got 0x%02X\n", i, reference_payload_buffer[i], reconstructed_payload_buffer[i]);
}
}
abort(); // stop execution on failure
}
}
}
}
void generate_and_test_payloads_for(uint8_t generation, bool yellow_version, const char *output_path, const char *filename)
{
char full_output_path[4096];
if (output_path[0] != '\0')
{
snprintf(full_output_path, sizeof(full_output_path), "%s/%s", output_path, filename);
}
else
{
strncpy(full_output_path, filename, sizeof(full_output_path));
}
if (std::filesystem::exists(full_output_path))
{
std::filesystem::file_time_type inf_time = std::filesystem::last_write_time("tools/data-generator/src/payloads/payload_builder.cpp");
std::filesystem::file_time_type outf_time = std::filesystem::last_write_time(full_output_path);
if (outf_time > inf_time)
{
// printf("File %s is newer than %s, skipping\n", full_output_path, input_path);
printf("S");
return;
}
}
generate_payloads_for(generation, yellow_version, full_output_path);
test_payloads(full_output_path);
printf("B");
}
int main(int argc, char **argv)
{
printf("Converting data into bin files!\n[");
const char *output_path = (argc > 1) ? argv[1] : "";
generate_pokemon_data(output_path);
// generate the ROM_DATA tables for each language
generate_gba_rom_value_tables(output_path, "gba_rom_values_eng.bin", rom_data_values_eng, rom_data_values_eng_size);
generate_gba_rom_value_tables(output_path, "gba_rom_values_fre.bin", rom_data_values_fre, rom_data_values_fre_size);
generate_gba_rom_value_tables(output_path, "gba_rom_values_ger.bin", rom_data_values_ger, rom_data_values_ger_size);
generate_gba_rom_value_tables(output_path, "gba_rom_values_ita.bin", rom_data_values_ita, rom_data_values_ita_size);
generate_gba_rom_value_tables(output_path, "gba_rom_values_jpn.bin", rom_data_values_jpn, rom_data_values_jpn_size);
generate_gba_rom_value_tables(output_path, "gba_rom_values_spa.bin", rom_data_values_spa, rom_data_values_spa_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_eng.cpp", output_path, "gba_rom_values_eng.bin", rom_data_values_eng, rom_data_values_eng_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_fre.cpp", output_path, "gba_rom_values_fre.bin", rom_data_values_fre, rom_data_values_fre_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_ger.cpp", output_path, "gba_rom_values_ger.bin", rom_data_values_ger, rom_data_values_ger_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_ita.cpp", output_path, "gba_rom_values_ita.bin", rom_data_values_ita, rom_data_values_ita_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_jpn.cpp", output_path, "gba_rom_values_jpn.bin", rom_data_values_jpn, rom_data_values_jpn_size);
generate_gba_rom_value_tables("tools/data-generator/src/gba_rom_values/gba_rom_values_spa.cpp", output_path, "gba_rom_values_spa.bin", rom_data_values_spa, rom_data_values_spa_size);
generate_gb_rom_value_tables(output_path, "gb_rom_values_eng.bin", gb_rom_values_eng, gb_rom_values_eng_size);
generate_gb_rom_value_tables(output_path, "gb_rom_values_fre.bin", gb_rom_values_fre, gb_rom_values_fre_size);
generate_gb_rom_value_tables("tools/data-generator/src/gb_rom_values/gb_rom_values_eng.cpp", output_path, "gb_rom_values_eng.bin", gb_rom_values_eng, gb_rom_values_eng_size);
generate_gb_rom_value_tables("tools/data-generator/src/gb_rom_values/gb_rom_values_fre.cpp", output_path, "gb_rom_values_fre.bin", gb_rom_values_fre, gb_rom_values_fre_size);
generate_payloads_for(1, false, output_path, "gb_gen1_payloads_RB.bin");
test_payloads(output_path, "gb_gen1_payloads_RB.bin");
generate_payloads_for(1, true, output_path, "gb_gen1_payloads_Y.bin");
test_payloads(output_path, "gb_gen1_payloads_Y.bin");
generate_payloads_for(2, false, output_path, "gb_gen2_payloads.bin");
test_payloads(output_path, "gb_gen2_payloads.bin");
generate_and_test_payloads_for(1, false, output_path, "gb_gen1_payloads_RB.bin");
generate_and_test_payloads_for(1, true, output_path, "gb_gen1_payloads_Y.bin");
generate_and_test_payloads_for(2, false, output_path, "gb_gen2_payloads.bin");
printf("]\nConvertion finished!\n\n");
return 0;
}

View File

@ -4705,16 +4705,16 @@ const u8 TYPES[POKEMON_ARRAY_SIZE][2]{
void generate_pokemon_data(const char *output_path)
{
writeTable(output_path, "gen_1_charsets.bin", (const char*)gen_1_charsets, sizeof(gen_1_charsets));
writeTable(output_path, "gen_2_charsets.bin", (const char*)gen_2_charsets, sizeof(gen_2_charsets));
writeTable(output_path, "gen_3_charsets.bin", (const char*)gen_3_charsets, sizeof(gen_3_charsets));
writeTable(output_path, "EXP_GROUPS.bin", (const char*)EXP_GROUPS, sizeof(EXP_GROUPS));
writeTable(output_path, "GENDER_RATIO.bin", (const char*)GENDER_RATIO, sizeof(GENDER_RATIO));
writeTable(output_path, "NUM_ABILITIES.bin", (const char*)(NUM_ABILITIES), sizeof(NUM_ABILITIES));
writeTable(output_path, "FIRST_MOVES.bin", (const char*)FIRST_MOVES, sizeof(FIRST_MOVES));
writeTable(output_path, "JPN_NAMES.bin", (const char*)JPN_NAMES, sizeof(JPN_NAMES));
writeTable(output_path, "POWER_POINTS.bin", (const char*)POWER_POINTS, sizeof(POWER_POINTS));
writeTable(output_path, "MENU_SPRITE_PALS.bin", (const char*)MENU_SPRITE_PALS, sizeof(MENU_SPRITE_PALS));
writeTable(output_path, "EVENT_PKMN.bin", (const char*)EVENT_PKMN, sizeof(EVENT_PKMN));
writeTable(output_path, "TYPES.bin", (const char*)TYPES, sizeof(TYPES));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "gen_1_charsets.bin", (const char*)gen_1_charsets, sizeof(gen_1_charsets));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "gen_2_charsets.bin", (const char*)gen_2_charsets, sizeof(gen_2_charsets));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "gen_3_charsets.bin", (const char*)gen_3_charsets, sizeof(gen_3_charsets));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "EXP_GROUPS.bin", (const char*)EXP_GROUPS, sizeof(EXP_GROUPS));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "GENDER_RATIO.bin", (const char*)GENDER_RATIO, sizeof(GENDER_RATIO));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "NUM_ABILITIES.bin", (const char*)(NUM_ABILITIES), sizeof(NUM_ABILITIES));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "FIRST_MOVES.bin", (const char*)FIRST_MOVES, sizeof(FIRST_MOVES));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "JPN_NAMES.bin", (const char*)JPN_NAMES, sizeof(JPN_NAMES));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "POWER_POINTS.bin", (const char*)POWER_POINTS, sizeof(POWER_POINTS));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "MENU_SPRITE_PALS.bin", (const char*)MENU_SPRITE_PALS, sizeof(MENU_SPRITE_PALS));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "EVENT_PKMN.bin", (const char*)EVENT_PKMN, sizeof(EVENT_PKMN));
writeTable("tools/data-generator/src/pokemon_data.cpp", output_path, "TYPES.bin", (const char*)TYPES, sizeof(TYPES));
}