mirror of
https://github.com/etienerodri/PRSA---Rom-Manager-.git
synced 2026-05-11 22:45:19 -05:00
Add files via upload
This commit is contained in:
parent
a02bc81024
commit
d65d3a1041
6
gui/__init__.py
Normal file
6
gui/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from gui.gui import RomToolGUI
|
||||
from gui.romselector import ROMSelector
|
||||
from gui.mapselector import MapSelector, MapPair
|
||||
from gui.tilesetrender import TilesetRenderer
|
||||
from gui.layerswap import LayerSwap
|
||||
__all__ = ['RomToolGUI', 'ROMSelector', 'MapSelector', 'MapPair', 'TilesetRenderer', 'LayerSwap']
|
||||
872
gui/gui.py
Normal file
872
gui/gui.py
Normal file
|
|
@ -0,0 +1,872 @@
|
|||
import sys
|
||||
import os
|
||||
import io
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from PyQt6.QtWidgets import QApplication, QMainWindow, QWidget, QFrame, QLabel, QPushButton, QScrollArea, QVBoxLayout, QHBoxLayout, QSizePolicy, QMessageBox, QFileDialog, QDialog, QStatusBar
|
||||
from PyQt6.QtCore import Qt, QSize
|
||||
from PyQt6.QtGui import QPixmap, QFont, QColor
|
||||
from PIL import Image
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from gui.romselector import ROMSelector
|
||||
from gui.mapselector import MapSelector
|
||||
from gui.tilesetrender import TilesetRenderer
|
||||
from gui.layerswap import LayerSwap
|
||||
from load.importtileset import import_tileset_auto_detect, get_file_info
|
||||
from load.pngtilesettransfer import transfer_png_to_map, get_png_info
|
||||
from load.saverom import ROMSaver
|
||||
COLOR_BG_MAIN = '#182d55'
|
||||
COLOR_ELEMENT_BG = '#26395e'
|
||||
COLOR_ACCENT = '#6d86a8'
|
||||
COLOR_HOVER = '#354a75'
|
||||
COLOR_TEXT = '#FFFFFF'
|
||||
WINDOW_WIDTH = 1200
|
||||
WINDOW_HEIGHT = 800
|
||||
SCALE_STEPS = [0.25, 0.5, 0.75, 1.0, 1.5, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0]
|
||||
GLOBAL_STYLE = f'\n QMainWindow, QWidget {{\n background-color: {COLOR_BG_MAIN};\n color: {COLOR_TEXT};\n font-family: Arial;\n }}\n\n QPushButton.primary {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 2px solid {COLOR_ACCENT};\n border-radius: 10px;\n font-size: 12px;\n font-weight: bold;\n min-height: 40px;\n padding: 4px 12px;\n }}\n QPushButton.primary:hover {{\n background-color: {COLOR_HOVER};\n }}\n QPushButton.primary:disabled {{\n color: #5a6070;\n border-color: #3a4560;\n background-color: {COLOR_ELEMENT_BG};\n }}\n\n QPushButton.small {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 5px;\n font-size: 10px;\n min-height: 30px;\n padding: 2px 8px;\n text-align: left;\n }}\n QPushButton.small:hover {{\n background-color: {COLOR_HOVER};\n }}\n QPushButton.small:disabled {{\n color: #5a6070;\n border-color: #3a4560;\n }}\n\n QPushButton.scale {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 6px;\n font-size: 13px;\n font-weight: bold;\n min-width: 44px;\n max-width: 44px;\n min-height: 28px;\n max-height: 28px;\n }}\n QPushButton.scale:hover {{\n background-color: {COLOR_HOVER};\n }}\n\n QFrame.panel {{\n background-color: {COLOR_ELEMENT_BG};\n border: 2px solid {COLOR_ACCENT};\n border-radius: 10px;\n }}\n\n QScrollArea {{\n background-color: {COLOR_ELEMENT_BG};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 5px;\n }}\n QScrollArea > QWidget > QWidget {{\n background-color: {COLOR_ELEMENT_BG};\n }}\n QScrollBar:vertical {{\n background: {COLOR_BG_MAIN};\n width: 10px;\n border-radius: 5px;\n }}\n QScrollBar::handle:vertical {{\n background: {COLOR_ACCENT};\n border-radius: 5px;\n min-height: 20px;\n }}\n QScrollBar::handle:vertical:hover {{\n background: {COLOR_HOVER};\n }}\n QScrollBar::add-line:vertical, QScrollBar::sub-line:vertical {{\n height: 0px;\n }}\n\n QStatusBar {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n font-size: 11px;\n padding: 0px 20px;\n min-height: 30px;\n }}\n\n QLabel {{\n background-color: transparent;\n color: {COLOR_TEXT};\n font-size: 11px;\n }}\n\n QDialog {{\n background-color: {COLOR_BG_MAIN};\n }}\n QMessageBox {{\n background-color: {COLOR_BG_MAIN};\n color: {COLOR_TEXT};\n }}\n QMessageBox QLabel {{\n color: {COLOR_TEXT};\n }}\n QMessageBox QPushButton {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 6px;\n min-width: 80px;\n min-height: 28px;\n padding: 4px 12px;\n font-size: 11px;\n }}\n QMessageBox QPushButton:hover {{\n background-color: {COLOR_HOVER};\n }}\n'
|
||||
|
||||
def _primary_btn(text: str, parent: QWidget=None) -> QPushButton:
|
||||
btn = QPushButton(text, parent)
|
||||
btn.setProperty('class', 'primary')
|
||||
btn.setFont(QFont('Arial', 12, QFont.Weight.Bold))
|
||||
btn.setMinimumHeight(40)
|
||||
return btn
|
||||
|
||||
def _small_btn(text: str, parent: QWidget=None) -> QPushButton:
|
||||
btn = QPushButton(text, parent)
|
||||
btn.setProperty('class', 'small')
|
||||
btn.setFont(QFont('Arial', 10))
|
||||
btn.setMinimumHeight(30)
|
||||
return btn
|
||||
|
||||
def _scale_btn(text: str, parent: QWidget=None) -> QPushButton:
|
||||
btn = QPushButton(text, parent)
|
||||
btn.setProperty('class', 'scale')
|
||||
btn.setFont(QFont('Arial', 13, QFont.Weight.Bold))
|
||||
btn.setFixedSize(44, 28)
|
||||
return btn
|
||||
|
||||
def _panel_frame(parent: QWidget=None) -> QFrame:
|
||||
frame = QFrame(parent)
|
||||
frame.setObjectName('panel')
|
||||
frame.setProperty('class', 'panel')
|
||||
return frame
|
||||
|
||||
def _pil_to_qpixmap(image: Image.Image) -> QPixmap:
|
||||
buf = io.BytesIO()
|
||||
image.save(buf, format='PNG')
|
||||
buf.seek(0)
|
||||
pm = QPixmap()
|
||||
pm.loadFromData(buf.read())
|
||||
return pm
|
||||
|
||||
class ScrollContainer(QScrollArea):
|
||||
|
||||
def __init__(self, parent: QWidget=None):
|
||||
super().__init__(parent)
|
||||
self.setWidgetResizable(True)
|
||||
self.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff)
|
||||
self.setVerticalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAsNeeded)
|
||||
self._inner = QWidget()
|
||||
self._inner.setStyleSheet(f'background-color: {COLOR_ELEMENT_BG};')
|
||||
self._layout = QVBoxLayout(self._inner)
|
||||
self._layout.setContentsMargins(5, 5, 5, 5)
|
||||
self._layout.setSpacing(2)
|
||||
self._layout.addStretch()
|
||||
self.setWidget(self._inner)
|
||||
|
||||
def inner_widget(self) -> QWidget:
|
||||
return self._inner
|
||||
|
||||
def inner_layout(self) -> QVBoxLayout:
|
||||
return self._layout
|
||||
|
||||
def add_widget(self, widget: QWidget):
|
||||
self._layout.insertWidget(self._layout.count() - 1, widget)
|
||||
|
||||
def clear_items(self):
|
||||
while self._layout.count() > 1:
|
||||
item = self._layout.takeAt(0)
|
||||
if item.widget():
|
||||
item.widget().deleteLater()
|
||||
|
||||
def winfo_children(self):
|
||||
children = []
|
||||
for i in range(self._layout.count()):
|
||||
item = self._layout.itemAt(i)
|
||||
if item and item.widget():
|
||||
children.append(item.widget())
|
||||
return children
|
||||
|
||||
class TilesetCanvas(QLabel):
|
||||
|
||||
def __init__(self, parent: QWidget=None):
|
||||
super().__init__(parent)
|
||||
self.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self.setStyleSheet(f'background-color: {COLOR_ELEMENT_BG}; border: none;')
|
||||
self.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
|
||||
self.setMinimumSize(200, 100)
|
||||
self._current_pixmap: QPixmap | None = None
|
||||
|
||||
def display_image(self, pixmap: QPixmap):
|
||||
self._current_pixmap = pixmap
|
||||
self.setPixmap(pixmap)
|
||||
self.resize(pixmap.width(), pixmap.height())
|
||||
|
||||
def clear_image(self):
|
||||
self._current_pixmap = None
|
||||
self.clear()
|
||||
|
||||
class ProgressDialog(QDialog):
|
||||
|
||||
def __init__(self, parent: QWidget, title: str, message: str):
|
||||
super().__init__(parent)
|
||||
self.setWindowTitle(title)
|
||||
self.setFixedSize(400, 160)
|
||||
self.setModal(True)
|
||||
self.setWindowFlags(Qt.WindowType.Dialog | Qt.WindowType.CustomizeWindowHint | Qt.WindowType.WindowTitleHint)
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(24, 24, 24, 24)
|
||||
layout.setSpacing(12)
|
||||
self._msg_label = QLabel(message)
|
||||
self._msg_label.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self._msg_label.setFont(QFont('Arial', 12))
|
||||
self._msg_label.setWordWrap(True)
|
||||
self._msg_label.setStyleSheet(f'color: {COLOR_TEXT};')
|
||||
layout.addWidget(self._msg_label)
|
||||
self._status_label = QLabel('Initializing...')
|
||||
self._status_label.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self._status_label.setFont(QFont('Arial', 10))
|
||||
self._status_label.setStyleSheet(f'color: {COLOR_ACCENT};')
|
||||
layout.addWidget(self._status_label)
|
||||
if parent:
|
||||
pg = parent.geometry()
|
||||
self.move(pg.x() + (pg.width() - self.width()) // 2, pg.y() + (pg.height() - self.height()) // 2)
|
||||
|
||||
def set_status(self, text: str):
|
||||
self._status_label.setText(text)
|
||||
QApplication.processEvents()
|
||||
|
||||
class RomToolGUI(QMainWindow):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.setWindowTitle('Pokemon Ranger: Shadows of Almia - ROM Manager')
|
||||
self.resize(WINDOW_WIDTH, WINDOW_HEIGHT)
|
||||
self.setStyleSheet(GLOBAL_STYLE)
|
||||
self.rom_selector = ROMSelector()
|
||||
self.map_selector = MapSelector()
|
||||
self.tileset_renderer = TilesetRenderer()
|
||||
self.layer_swap = LayerSwap()
|
||||
self.rom_saver = ROMSaver()
|
||||
self.layer_swap.set_map_selector(self.map_selector)
|
||||
self.layer_swap.set_rom_saver(self.rom_saver)
|
||||
self._tileset_scale = 1.0
|
||||
self._current_pil_image: Image.Image | None = None
|
||||
self.map_selector.on_maps_loaded = self._handle_maps_loaded
|
||||
self.map_selector.on_map_selected = self._handle_map_selected
|
||||
self.map_selector.on_map_data_loaded = self._handle_map_data_loaded
|
||||
self.tileset_renderer.on_tileset_rendered = self._handle_tileset_rendered
|
||||
self._build_ui()
|
||||
self._reset_ui_state()
|
||||
|
||||
def _build_ui(self):
|
||||
PAD = 20
|
||||
PAD_IN = 10
|
||||
central = QWidget()
|
||||
self.setCentralWidget(central)
|
||||
main_layout = QHBoxLayout(central)
|
||||
main_layout.setContentsMargins(PAD, PAD, PAD, PAD)
|
||||
main_layout.setSpacing(PAD_IN)
|
||||
self._build_left_sidebar(main_layout)
|
||||
self._build_middle_section(main_layout)
|
||||
self._build_right_sidebar(main_layout)
|
||||
self._status_lbl = QLabel('Ready')
|
||||
self._status_lbl.setFont(QFont('Arial', 11))
|
||||
self.statusBar().addWidget(self._status_lbl, 1)
|
||||
|
||||
def _build_left_sidebar(self, parent_layout: QHBoxLayout):
|
||||
left = QWidget()
|
||||
left.setFixedWidth(220)
|
||||
vbox = QVBoxLayout(left)
|
||||
vbox.setContentsMargins(0, 0, 0, 0)
|
||||
vbox.setSpacing(10)
|
||||
self.btn_rom = _primary_btn('ROM SELECTOR')
|
||||
self.btn_rom.clicked.connect(self._on_rom_selector_clicked)
|
||||
vbox.addWidget(self.btn_rom)
|
||||
self.btn_map = _primary_btn('MAP SELECT')
|
||||
self.btn_map.setEnabled(False)
|
||||
vbox.addWidget(self.btn_map)
|
||||
map_panel = _panel_frame()
|
||||
map_panel_vbox = QVBoxLayout(map_panel)
|
||||
map_panel_vbox.setContentsMargins(5, 5, 5, 5)
|
||||
self.map_list_scroll = ScrollContainer()
|
||||
map_panel_vbox.addWidget(self.map_list_scroll)
|
||||
vbox.addWidget(map_panel, stretch=3)
|
||||
self.btn_tileset = _primary_btn('TILESET SELECTOR')
|
||||
self.btn_tileset.setEnabled(False)
|
||||
vbox.addWidget(self.btn_tileset)
|
||||
ts_panel = _panel_frame()
|
||||
ts_panel_vbox = QVBoxLayout(ts_panel)
|
||||
ts_panel_vbox.setContentsMargins(5, 5, 5, 5)
|
||||
self.tileset_list_scroll = ScrollContainer()
|
||||
ts_panel_vbox.addWidget(self.tileset_list_scroll)
|
||||
vbox.addWidget(ts_panel, stretch=2)
|
||||
self.map_buttons: list[QPushButton] = []
|
||||
self.tileset_buttons: list[QPushButton] = []
|
||||
parent_layout.addWidget(left)
|
||||
|
||||
def _build_middle_section(self, parent_layout: QHBoxLayout):
|
||||
mid = QWidget()
|
||||
vbox = QVBoxLayout(mid)
|
||||
vbox.setContentsMargins(0, 0, 0, 0)
|
||||
vbox.setSpacing(10)
|
||||
rgcn_panel = _panel_frame()
|
||||
rgcn_vbox = QVBoxLayout(rgcn_panel)
|
||||
rgcn_vbox.setContentsMargins(8, 8, 8, 8)
|
||||
rgcn_vbox.setSpacing(4)
|
||||
scale_strip = QWidget()
|
||||
scale_strip.setFixedHeight(38)
|
||||
scale_strip.setStyleSheet(f'\n QWidget {{\n background-color: {COLOR_BG_MAIN};\n border-radius: 6px;\n }}\n ')
|
||||
scale_hbox = QHBoxLayout(scale_strip)
|
||||
scale_hbox.setContentsMargins(6, 4, 6, 4)
|
||||
scale_hbox.setSpacing(4)
|
||||
self.btn_scale_down = _scale_btn(' — ')
|
||||
self.btn_scale_down.clicked.connect(self._on_scale_decrease)
|
||||
scale_hbox.addWidget(self.btn_scale_down)
|
||||
self.lbl_scale = QLabel('SCALE 1.0x')
|
||||
self.lbl_scale.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self.lbl_scale.setFont(QFont('Arial', 11, QFont.Weight.Bold))
|
||||
scale_hbox.addWidget(self.lbl_scale, stretch=1)
|
||||
self.btn_scale_up = _scale_btn(' + ')
|
||||
self.btn_scale_up.clicked.connect(self._on_scale_increase)
|
||||
scale_hbox.addWidget(self.btn_scale_up)
|
||||
rgcn_vbox.addWidget(scale_strip)
|
||||
self.canvas_scroll = QScrollArea()
|
||||
self.canvas_scroll.setWidgetResizable(True)
|
||||
self.canvas_scroll.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self.canvas_scroll.setStyleSheet(f'\n QScrollArea {{\n background-color: {COLOR_ELEMENT_BG};\n border: none;\n }}\n QScrollArea > QWidget > QWidget {{\n background-color: {COLOR_ELEMENT_BG};\n }}\n ')
|
||||
self.canvas_rgcn = TilesetCanvas()
|
||||
self.canvas_scroll.setWidget(self.canvas_rgcn)
|
||||
self.label_rgcn_placeholder = QLabel('Select a tileset to render')
|
||||
self.label_rgcn_placeholder.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
self.label_rgcn_placeholder.setFont(QFont('Arial', 11))
|
||||
self.label_rgcn_placeholder.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
|
||||
rgcn_vbox.addWidget(self.label_rgcn_placeholder, stretch=1)
|
||||
rgcn_vbox.addWidget(self.canvas_scroll, stretch=1)
|
||||
self.canvas_scroll.hide()
|
||||
vbox.addWidget(rgcn_panel, stretch=1)
|
||||
layer_panel = _panel_frame()
|
||||
layer_vbox = QVBoxLayout(layer_panel)
|
||||
layer_vbox.setContentsMargins(5, 5, 5, 5)
|
||||
self.layer_scroll = ScrollContainer()
|
||||
layer_vbox.addWidget(self.layer_scroll)
|
||||
vbox.addWidget(layer_panel, stretch=1)
|
||||
parent_layout.addWidget(mid, stretch=1)
|
||||
|
||||
def _build_right_sidebar(self, parent_layout: QHBoxLayout):
|
||||
right = QWidget()
|
||||
right.setFixedWidth(220)
|
||||
vbox = QVBoxLayout(right)
|
||||
vbox.setContentsMargins(0, 0, 0, 0)
|
||||
vbox.setSpacing(10)
|
||||
actions = [('SAVE ROM', self._on_save_rom), ('PNG EXPORT', self._on_png_export), ('IMPORT TILESET', self._on_import_tileset), ('PNG TILESET TRANSFER', self._on_png_transfer), ('EXPORT MAP FILES', self._on_export_map_files), ('EXPORT TILESET', self._on_export_tileset)]
|
||||
self.action_buttons: dict[str, QPushButton] = {}
|
||||
for text, handler in actions:
|
||||
btn = _primary_btn(text)
|
||||
btn.setEnabled(False)
|
||||
btn.clicked.connect(handler)
|
||||
vbox.addWidget(btn)
|
||||
self.action_buttons[text.lower().replace(' ', '_')] = btn
|
||||
vbox.addStretch()
|
||||
parent_layout.addWidget(right)
|
||||
|
||||
def _set_status(self, message: str):
|
||||
self._status_lbl.setText(message)
|
||||
QApplication.processEvents()
|
||||
print(f'Status: {message}')
|
||||
|
||||
def _on_scale_increase(self):
|
||||
current = self._tileset_scale
|
||||
for step in SCALE_STEPS:
|
||||
if step > current + 0.01:
|
||||
self._tileset_scale = step
|
||||
break
|
||||
else:
|
||||
self._tileset_scale = SCALE_STEPS[-1]
|
||||
self._update_scale_label()
|
||||
self._redisplay_tileset()
|
||||
|
||||
def _on_scale_decrease(self):
|
||||
current = self._tileset_scale
|
||||
for step in reversed(SCALE_STEPS):
|
||||
if step < current - 0.01:
|
||||
self._tileset_scale = step
|
||||
break
|
||||
else:
|
||||
self._tileset_scale = SCALE_STEPS[0]
|
||||
self._update_scale_label()
|
||||
self._redisplay_tileset()
|
||||
|
||||
def _update_scale_label(self):
|
||||
scale = self._tileset_scale
|
||||
text = f'SCALE {int(scale)}x' if scale == int(scale) else f'SCALE {scale}x'
|
||||
self.lbl_scale.setText(text)
|
||||
|
||||
def _redisplay_tileset(self):
|
||||
image = self.tileset_renderer.get_rendered_image()
|
||||
if image is not None:
|
||||
self._display_image_on_canvas(image)
|
||||
|
||||
def _reset_ui_state(self):
|
||||
self.btn_map.setEnabled(False)
|
||||
self.btn_tileset.setEnabled(False)
|
||||
for btn in self.action_buttons.values():
|
||||
btn.setEnabled(False)
|
||||
self._clear_map_list()
|
||||
self._clear_tileset_list()
|
||||
self._clear_canvas()
|
||||
self._clear_layer_info()
|
||||
self._set_status('Ready')
|
||||
|
||||
def _on_rom_selector_clicked(self):
|
||||
self._set_status('Opening ROM selector...')
|
||||
if not self.rom_selector.browse_rom():
|
||||
self._set_status('Ready')
|
||||
return
|
||||
self._set_status('Extracting ROM...')
|
||||
if self.rom_selector.extract_rom(callback=self._set_status):
|
||||
dat_files, tex_files = self.rom_selector.get_map_files()
|
||||
self.map_selector.pair_map_files(dat_files, tex_files)
|
||||
rom_path = Path(self.rom_selector.rom_path)
|
||||
if self.rom_saver.initialize(rom_path):
|
||||
print('ROM Saver initialized successfully')
|
||||
else:
|
||||
print('Warning: ROM Saver initialization failed')
|
||||
self.btn_map.setEnabled(True)
|
||||
map_count = len(self.map_selector.get_map_pairs())
|
||||
self._set_status(f'ROM loaded! Found {map_count} maps')
|
||||
QMessageBox.information(self, 'Success', f'ROM loaded successfully!\n\nFound {map_count} map pairs')
|
||||
else:
|
||||
self._set_status('ROM extraction failed')
|
||||
QMessageBox.critical(self, 'Error', 'Failed to extract ROM')
|
||||
|
||||
def _handle_maps_loaded(self, map_pairs):
|
||||
self._set_status(f'Loading {len(map_pairs)} maps...')
|
||||
self._clear_map_list()
|
||||
for i, map_pair in enumerate(map_pairs):
|
||||
if map_pair.is_complete():
|
||||
text = f'[OK] {map_pair.name}'
|
||||
enabled = True
|
||||
else:
|
||||
text = f'[!] {map_pair.name}'
|
||||
enabled = False
|
||||
btn = _small_btn(text)
|
||||
btn.setEnabled(enabled)
|
||||
btn.clicked.connect(lambda checked, idx=i: self._on_map_clicked(idx))
|
||||
self.map_list_scroll.add_widget(btn)
|
||||
self.map_buttons.append(btn)
|
||||
self._set_status(f'Loaded {len(map_pairs)} maps')
|
||||
|
||||
def _on_map_clicked(self, index: int):
|
||||
map_pair = self.map_selector.select_map_by_index(index)
|
||||
if map_pair:
|
||||
self._set_status(f'Selected: {map_pair.name}')
|
||||
|
||||
def _handle_map_selected(self, map_pair):
|
||||
self._set_status(f'Loading map: {map_pair.name}...')
|
||||
self.layer_scroll.clear_items()
|
||||
loading = QLabel(f'Loading map data...\n\n{map_pair.name}\n\nBuilding layer tree...')
|
||||
loading.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
loading.setFont(QFont('Arial', 11))
|
||||
self.layer_scroll.add_widget(loading)
|
||||
|
||||
def _handle_map_data_loaded(self, map_data):
|
||||
self._set_status(f'Map data loaded: {map_data.map_name}')
|
||||
tilesets = self.map_selector.get_all_tilesets_for_rendering()
|
||||
print(f'\n=== Loading {len(tilesets)} tilesets into renderer ===')
|
||||
self.tileset_renderer.load_tilesets(tilesets)
|
||||
self._populate_tileset_list(tilesets)
|
||||
selected_map = self.map_selector.get_selected_map()
|
||||
if selected_map and selected_map.dat_path and selected_map.tex_path:
|
||||
self.layer_swap.set_map_paths(selected_map.dat_path, selected_map.tex_path)
|
||||
self.layer_swap.populate_layers(map_data, self.layer_scroll)
|
||||
self.btn_tileset.setEnabled(True)
|
||||
self.action_buttons['save_rom'].setEnabled(True)
|
||||
self.action_buttons['import_tileset'].setEnabled(True)
|
||||
self.action_buttons['export_map_files'].setEnabled(True)
|
||||
self._set_status(f'Ready - {map_data.map_name} ({len(tilesets)} tilesets)')
|
||||
|
||||
def _clear_map_list(self):
|
||||
for btn in self.map_buttons:
|
||||
btn.deleteLater()
|
||||
self.map_buttons.clear()
|
||||
self.map_list_scroll.clear_items()
|
||||
|
||||
def _populate_tileset_list(self, tilesets):
|
||||
self._clear_tileset_list()
|
||||
print(f'Creating {len(tilesets)} tileset buttons...')
|
||||
for i, tileset in enumerate(tilesets):
|
||||
has_gfx = tileset.get('has_graphics', False)
|
||||
has_pal = tileset.get('has_palette', False)
|
||||
if has_gfx and has_pal:
|
||||
icon = '[OK]'
|
||||
enabled = True
|
||||
elif has_gfx or has_pal:
|
||||
icon = '[~] '
|
||||
enabled = True
|
||||
else:
|
||||
icon = '[X] '
|
||||
enabled = False
|
||||
text = f'{icon} Tileset {i}'
|
||||
if 'error' in tileset:
|
||||
text += ' [ERR]'
|
||||
elif 'warning' in tileset:
|
||||
text += ' [WARN]'
|
||||
btn = _small_btn(text)
|
||||
btn.setEnabled(enabled)
|
||||
btn.clicked.connect(lambda checked, idx=i: self._on_tileset_clicked(idx))
|
||||
self.tileset_list_scroll.add_widget(btn)
|
||||
self.tileset_buttons.append(btn)
|
||||
print(f'Created {len(self.tileset_buttons)} tileset buttons')
|
||||
|
||||
def _on_tileset_clicked(self, index: int):
|
||||
print(f'\n=== Tileset {index} clicked ===')
|
||||
self._set_status(f'Rendering tileset {index}...')
|
||||
if self.tileset_renderer.select_tileset(index):
|
||||
self.action_buttons['png_export'].setEnabled(True)
|
||||
self.action_buttons['import_tileset'].setEnabled(True)
|
||||
self.action_buttons['png_tileset_transfer'].setEnabled(True)
|
||||
self.action_buttons['export_tileset'].setEnabled(True)
|
||||
self._set_status(f'Tileset {index} rendered')
|
||||
else:
|
||||
self._set_status(f'Failed to render tileset {index}')
|
||||
QMessageBox.critical(self, 'Render Error', f'Failed to render tileset {index}')
|
||||
|
||||
def _handle_tileset_rendered(self, image: Image.Image):
|
||||
print(f'Displaying tileset: {image.size}')
|
||||
self.label_rgcn_placeholder.hide()
|
||||
self.canvas_scroll.show()
|
||||
self._display_image_on_canvas(image)
|
||||
|
||||
def _display_image_on_canvas(self, image: Image.Image):
|
||||
self._current_pil_image = image
|
||||
canvas_w = self.canvas_scroll.width()
|
||||
canvas_h = self.canvas_scroll.height()
|
||||
if canvas_w <= 1:
|
||||
canvas_w = 700
|
||||
if canvas_h <= 1:
|
||||
canvas_h = 350
|
||||
img_w, img_h = image.size
|
||||
target_w = int(img_w * self._tileset_scale)
|
||||
target_h = int(img_h * self._tileset_scale)
|
||||
max_w = int(canvas_w * 0.95)
|
||||
max_h = int(canvas_h * 0.95)
|
||||
if target_w > max_w or target_h > max_h:
|
||||
fit_scale = min(max_w / max(target_w, 1), max_h / max(target_h, 1))
|
||||
target_w = max(1, int(target_w * fit_scale))
|
||||
target_h = max(1, int(target_h * fit_scale))
|
||||
scaled_img = image.resize((target_w, target_h), Image.NEAREST)
|
||||
pixmap = _pil_to_qpixmap(scaled_img)
|
||||
self.canvas_rgcn.display_image(pixmap)
|
||||
print(f'Image displayed: {target_w}x{target_h} (scale {self._tileset_scale}x)')
|
||||
|
||||
def _clear_tileset_list(self):
|
||||
for btn in self.tileset_buttons:
|
||||
btn.deleteLater()
|
||||
self.tileset_buttons.clear()
|
||||
self.tileset_list_scroll.clear_items()
|
||||
|
||||
def _clear_canvas(self):
|
||||
self.canvas_rgcn.clear_image()
|
||||
self.canvas_scroll.hide()
|
||||
self.label_rgcn_placeholder.show()
|
||||
self._current_pil_image = None
|
||||
|
||||
def _clear_layer_info(self):
|
||||
self.layer_swap.clear()
|
||||
self.layer_scroll.clear_items()
|
||||
placeholder = QLabel('Select a map to view layer tree')
|
||||
placeholder.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
placeholder.setFont(QFont('Arial', 11))
|
||||
self.layer_scroll.add_widget(placeholder)
|
||||
|
||||
def _on_save_rom(self):
|
||||
if not self.rom_selector.rom_path:
|
||||
QMessageBox.warning(self, 'No ROM Loaded', 'Please load a ROM before saving')
|
||||
return
|
||||
if not self.rom_saver.cache.has_modifications():
|
||||
QMessageBox.information(self, 'No Changes', 'No modifications have been made to the ROM.\n\nMake some changes (import tilesets, swap layers, etc.) before saving.')
|
||||
return
|
||||
summary = self.rom_saver.get_modification_summary()
|
||||
summary_msg = f"Save ROM with modifications?\n\nTotal modifications: {summary['total_count']}\nTotal modified data: {summary['total_size']:,} bytes\n\nModification types:\n"
|
||||
for mod_type, count in summary['by_type'].items():
|
||||
summary_msg += f' - {mod_type}: {count}\n'
|
||||
summary_msg += '\n\nWhere would you like to save the modified ROM?'
|
||||
if QMessageBox.question(self, 'Confirm Save', summary_msg, QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) != QMessageBox.StandardButton.Yes:
|
||||
self._set_status('Save cancelled')
|
||||
return
|
||||
default_name = Path(self.rom_selector.rom_path).stem + '_modified.nds'
|
||||
save_path, _ = QFileDialog.getSaveFileName(self, 'Save Modified ROM', default_name, 'NDS ROM files (*.nds);;All files (*.*)')
|
||||
if not save_path:
|
||||
self._set_status('Save cancelled')
|
||||
return
|
||||
dlg = ProgressDialog(self, 'Saving ROM...', 'Saving ROM...\n\nThis may take a moment.')
|
||||
dlg.show()
|
||||
QApplication.processEvents()
|
||||
self._set_status('Saving ROM...')
|
||||
try:
|
||||
success, message = self.rom_saver.save_rom(Path(save_path), progress_callback=dlg.set_status)
|
||||
dlg.close()
|
||||
if success:
|
||||
self._set_status(f'ROM saved successfully to {Path(save_path).name}')
|
||||
QMessageBox.information(self, 'Save Complete', f"ROM Saved Successfully!\n\nLocation: {save_path}\n\nModifications applied: {summary['total_count']}\nFile size: {Path(save_path).stat().st_size:,} bytes\n\nYour modified ROM is ready to use!")
|
||||
if QMessageBox.question(self, 'Clear Modifications?', 'ROM saved successfully!\n\nWould you like to clear the modification cache?\n\n(This will reset modification tracking, but your saved ROM is safe)', QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) == QMessageBox.StandardButton.Yes:
|
||||
self.rom_saver.clear_modifications()
|
||||
self._set_status('Modifications cleared - ROM saved')
|
||||
else:
|
||||
self._set_status('ROM save failed')
|
||||
QMessageBox.critical(self, 'Save Failed', f'Failed to save ROM:\n\n{message}')
|
||||
except Exception as e:
|
||||
dlg.close()
|
||||
QMessageBox.critical(self, 'Save Error', f'ROM save operation failed:\n\n{str(e)}')
|
||||
self._set_status('ROM save failed')
|
||||
print('\n=== ROM SAVE ERROR ===')
|
||||
traceback.print_exc()
|
||||
print('======================\n')
|
||||
|
||||
def _on_png_export(self):
|
||||
if not self.tileset_renderer.get_rendered_image():
|
||||
QMessageBox.warning(self, 'No Tileset', 'No tileset rendered to export')
|
||||
return
|
||||
filepath, _ = QFileDialog.getSaveFileName(self, 'Export Tileset as PNG', '', 'PNG files (*.png);;All files (*.*)')
|
||||
if filepath:
|
||||
self._set_status('Exporting PNG...')
|
||||
if self.tileset_renderer.export_png(filepath):
|
||||
self._set_status('PNG exported successfully')
|
||||
QMessageBox.information(self, 'Success', f'Tileset exported to:\n{filepath}')
|
||||
else:
|
||||
self._set_status('PNG export failed')
|
||||
QMessageBox.critical(self, 'Error', 'Failed to export PNG')
|
||||
else:
|
||||
self._set_status('Ready')
|
||||
|
||||
def _on_import_tileset(self):
|
||||
if not self.map_selector.get_selected_map():
|
||||
QMessageBox.warning(self, 'No Map Selected', 'Please select a map before importing a tileset')
|
||||
return
|
||||
selected_map = self.map_selector.get_selected_map()
|
||||
dat_path = str(selected_map.dat_path)
|
||||
tex_path = str(selected_map.tex_path)
|
||||
self._set_status('Select tileset files to import...')
|
||||
file1_path, _ = QFileDialog.getOpenFileName(self, 'Select First Tileset File (RGCN or RLCN)', '', 'RGCN Graphics (*.rgcn *.ncgr);;RLCN Palette (*.rlcn *.nclr);;Binary files (*.bin);;All files (*.*)')
|
||||
if not file1_path:
|
||||
self._set_status('Ready')
|
||||
return
|
||||
file2_path, _ = QFileDialog.getOpenFileName(self, 'Select Second Tileset File (RGCN or RLCN)', '', 'RGCN Graphics (*.rgcn *.ncgr);;RLCN Palette (*.rlcn *.nclr);;Binary files (*.bin);;All files (*.*)')
|
||||
if not file2_path:
|
||||
self._set_status('Ready')
|
||||
return
|
||||
self._set_status('Analyzing files...')
|
||||
try:
|
||||
file1_info = get_file_info(file1_path)
|
||||
file2_info = get_file_info(file2_path)
|
||||
confirm_msg = f"Import these files?\n\nFile 1: {file1_info['name']}\n Type: {file1_info['type']} ({file1_info['format']})\n Size: {file1_info['size']:,} bytes\n\nFile 2: {file2_info['name']}\n Type: {file2_info['type']} ({file2_info['format']})\n Size: {file2_info['size']:,} bytes\n\nTarget Map: {selected_map.name}"
|
||||
if QMessageBox.question(self, 'Confirm Import', confirm_msg, QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) != QMessageBox.StandardButton.Yes:
|
||||
self._set_status('Ready')
|
||||
return
|
||||
except Exception as e:
|
||||
QMessageBox.critical(self, 'File Analysis Error', f'Failed to analyze files:\n{str(e)}')
|
||||
self._set_status('Ready')
|
||||
return
|
||||
self._set_status('Importing tileset (this may take a moment)...')
|
||||
try:
|
||||
success, message = import_tileset_auto_detect(dat_path=dat_path, tex_path=tex_path, file1_path=file1_path, file2_path=file2_path)
|
||||
if success:
|
||||
print('Tracking modifications in ROM saver...')
|
||||
self.rom_saver.add_modified_map_files(Path(dat_path), Path(tex_path))
|
||||
QMessageBox.information(self, 'Import Successful', message)
|
||||
self._set_status('Reloading map with new tileset...')
|
||||
self.map_selector.select_map(selected_map.name)
|
||||
self._set_status(f'Tileset imported successfully into {selected_map.name}')
|
||||
else:
|
||||
QMessageBox.critical(self, 'Import Failed', message)
|
||||
self._set_status('Import failed')
|
||||
except Exception as e:
|
||||
QMessageBox.critical(self, 'Import Error', f'Import operation failed:\n\n{str(e)}')
|
||||
self._set_status('Import failed')
|
||||
print('\n=== IMPORT ERROR ===')
|
||||
traceback.print_exc()
|
||||
print('====================\n')
|
||||
|
||||
def _on_png_transfer(self):
|
||||
if not self.map_selector.get_selected_map():
|
||||
QMessageBox.warning(self, 'No Map Selected', 'Please select a map before transferring a PNG tileset')
|
||||
return
|
||||
selected_map = self.map_selector.get_selected_map()
|
||||
dat_path = str(selected_map.dat_path)
|
||||
tex_path = str(selected_map.tex_path)
|
||||
self._set_status('Select PNG image to convert...')
|
||||
png_path, _ = QFileDialog.getOpenFileName(self, 'Select PNG Image for Tileset', '', 'PNG Images (*.png);;All files (*.*)')
|
||||
if not png_path:
|
||||
self._set_status('Ready')
|
||||
return
|
||||
self._set_status('Analyzing PNG image...')
|
||||
try:
|
||||
png_info = get_png_info(png_path)
|
||||
if 'error' in png_info:
|
||||
QMessageBox.critical(self, 'PNG Error', f"Failed to read PNG file:\n{png_info['error']}")
|
||||
self._set_status('Ready')
|
||||
return
|
||||
unique_colors = png_info.get('unique_colors', 256)
|
||||
if unique_colors <= 16:
|
||||
auto_mode = 'Tile Banking Mode (15 colors + transparency)'
|
||||
mode_desc = 'Optimal for Pokemon Ranger - Best compatibility'
|
||||
else:
|
||||
auto_mode = 'Standard Mode (256 colors)'
|
||||
mode_desc = 'More colors, good compatibility'
|
||||
info_msg = f"PNG Image Information:\n\nFile: {png_info['name']}\nSize: {png_info['width']}x{png_info['height']} pixels\nColors: {unique_colors} unique colors\nTransparency: {('Yes' if png_info['has_transparency'] else 'No')}\nFile size: {png_info['file_size']:,} bytes\n\nAuto-Selected Mode: {auto_mode}\n{mode_desc}\n\n"
|
||||
if unique_colors > 256:
|
||||
info_msg += 'WARNING: >256 colors detected!\nImage will be quantized, quality may be reduced.\n\n'
|
||||
info_msg += 'Proceed with conversion?'
|
||||
if QMessageBox.question(self, 'PNG Image Info', info_msg, QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) != QMessageBox.StandardButton.Yes:
|
||||
self._set_status('Ready')
|
||||
return
|
||||
except Exception as e:
|
||||
QMessageBox.critical(self, 'Analysis Error', f'Failed to analyze PNG:\n{str(e)}')
|
||||
self._set_status('Ready')
|
||||
return
|
||||
self._set_status('Converting PNG (auto-detecting best mode)...')
|
||||
dlg = ProgressDialog(self, 'Converting...', 'Converting and integrating PNG tileset...\n\nMode: Auto-Detecting\nPlease wait...')
|
||||
dlg.show()
|
||||
QApplication.processEvents()
|
||||
try:
|
||||
success, message = transfer_png_to_map(png_path=png_path, dat_path=dat_path, tex_path=tex_path, use_tile_banks=None)
|
||||
dlg.close()
|
||||
if success:
|
||||
print('Tracking modifications in ROM saver...')
|
||||
self.rom_saver.add_modified_map_files(Path(dat_path), Path(tex_path))
|
||||
QMessageBox.information(self, 'Success', f'PNG Tileset Transfer Complete!\n\n{message}\n\nMap: {selected_map.name}\n\nThe map will now reload to display the new tileset.')
|
||||
self._set_status('Reloading map with new tileset...')
|
||||
self.map_selector.select_map(selected_map.name)
|
||||
self._set_status(f'PNG tileset successfully integrated into {selected_map.name}')
|
||||
else:
|
||||
QMessageBox.critical(self, 'Transfer Failed', f'Failed to transfer PNG tileset:\n\n{message}')
|
||||
self._set_status('PNG transfer failed')
|
||||
except Exception as e:
|
||||
dlg.close()
|
||||
QMessageBox.critical(self, 'Transfer Error', f'PNG transfer operation failed:\n\n{str(e)}')
|
||||
self._set_status('PNG transfer failed')
|
||||
print('\n=== PNG TRANSFER ERROR ===')
|
||||
traceback.print_exc()
|
||||
print('==========================\n')
|
||||
|
||||
def _on_export_map_files(self):
|
||||
selected_map = self.map_selector.get_selected_map()
|
||||
if not selected_map:
|
||||
QMessageBox.warning(self, 'No Map Selected', 'Please select a map first.')
|
||||
return
|
||||
msg = f'Export map binary files for: {selected_map.name}\n\nWhich files would you like to export?\n\n DAT - Compressed map data (.map.dat)\n TEX - Compressed tileset data (.map.tex)\n\nFiles are exported as-is (LZ10 compressed).'
|
||||
export_dialog = _ExportMapDialog(self, selected_map.name)
|
||||
export_dialog.exec()
|
||||
choices = export_dialog.get_choices()
|
||||
if not choices:
|
||||
self._set_status('Export cancelled')
|
||||
return
|
||||
out_dir = QFileDialog.getExistingDirectory(self, 'Select Export Folder', '')
|
||||
if not out_dir:
|
||||
self._set_status('Export cancelled')
|
||||
return
|
||||
out_path = Path(out_dir)
|
||||
exported = []
|
||||
failed = []
|
||||
if choices.get('dat') and selected_map.dat_path:
|
||||
try:
|
||||
src = Path(selected_map.dat_path)
|
||||
dst = out_path / src.name
|
||||
dst.write_bytes(src.read_bytes())
|
||||
exported.append(src.name)
|
||||
print(f'Exported DAT: {dst}')
|
||||
except Exception as e:
|
||||
failed.append(f'DAT: {e}')
|
||||
if choices.get('tex') and selected_map.tex_path:
|
||||
try:
|
||||
src = Path(selected_map.tex_path)
|
||||
dst = out_path / src.name
|
||||
dst.write_bytes(src.read_bytes())
|
||||
exported.append(src.name)
|
||||
print(f'Exported TEX: {dst}')
|
||||
except Exception as e:
|
||||
failed.append(f'TEX: {e}')
|
||||
if exported:
|
||||
msg = f'Exported {len(exported)} file(s) to:\n{out_dir}\n\n'
|
||||
msg += '\n'.join((f' {f}' for f in exported))
|
||||
if failed:
|
||||
msg += f'\n\nFailed:\n' + '\n'.join((f' {f}' for f in failed))
|
||||
self._set_status(f'Exported {len(exported)} map file(s)')
|
||||
QMessageBox.information(self, 'Export Complete', msg)
|
||||
else:
|
||||
err_msg = '\n'.join(failed) if failed else 'No files were selected.'
|
||||
QMessageBox.critical(self, 'Export Failed', f'No files exported.\n\n{err_msg}')
|
||||
self._set_status('Export failed')
|
||||
|
||||
def _on_export_tileset(self):
|
||||
idx = self.tileset_renderer.selected_tileset_index
|
||||
if idx is None:
|
||||
QMessageBox.warning(self, 'No Tileset Selected', 'Please select and render a tileset first.')
|
||||
return
|
||||
tilesets = self.tileset_renderer.get_tilesets()
|
||||
if idx >= len(tilesets):
|
||||
QMessageBox.warning(self, 'Error', 'Selected tileset index is out of range.')
|
||||
return
|
||||
tileset = tilesets[idx]
|
||||
rgcn_data = tileset.get('RGCN') or tileset.get('NCGR')
|
||||
rlcn_data = tileset.get('RLCN') or tileset.get('NCLR')
|
||||
if not rgcn_data and (not rlcn_data):
|
||||
QMessageBox.warning(self, 'No Data', f'Tileset {idx} has no RGCN or RLCN data to export.')
|
||||
return
|
||||
selected_map = self.map_selector.get_selected_map()
|
||||
map_name = selected_map.name if selected_map else 'map'
|
||||
export_dialog = _ExportTilesetDialog(self, idx, bool(rgcn_data), bool(rlcn_data))
|
||||
export_dialog.exec()
|
||||
choices = export_dialog.get_choices()
|
||||
if not choices:
|
||||
self._set_status('Export cancelled')
|
||||
return
|
||||
out_dir = QFileDialog.getExistingDirectory(self, 'Select Export Folder', '')
|
||||
if not out_dir:
|
||||
self._set_status('Export cancelled')
|
||||
return
|
||||
out_path = Path(out_dir)
|
||||
exported = []
|
||||
failed = []
|
||||
if choices.get('rgcn') and rgcn_data:
|
||||
try:
|
||||
filename = f'{map_name}_tileset{idx}_RGCN.bin'
|
||||
dst = out_path / filename
|
||||
dst.write_bytes(rgcn_data)
|
||||
exported.append(filename)
|
||||
print(f'Exported RGCN: {dst} ({len(rgcn_data):,} bytes)')
|
||||
except Exception as e:
|
||||
failed.append(f'RGCN: {e}')
|
||||
if choices.get('rlcn') and rlcn_data:
|
||||
try:
|
||||
filename = f'{map_name}_tileset{idx}_RLCN.bin'
|
||||
dst = out_path / filename
|
||||
dst.write_bytes(rlcn_data)
|
||||
exported.append(filename)
|
||||
print(f'Exported RLCN: {dst} ({len(rlcn_data):,} bytes)')
|
||||
except Exception as e:
|
||||
failed.append(f'RLCN: {e}')
|
||||
if exported:
|
||||
msg = f'Exported {len(exported)} file(s) to:\n{out_dir}\n\n'
|
||||
msg += '\n'.join((f' {f}' for f in exported))
|
||||
if failed:
|
||||
msg += f'\n\nFailed:\n' + '\n'.join((f' {f}' for f in failed))
|
||||
self._set_status(f'Exported tileset {idx} ({len(exported)} file(s))')
|
||||
QMessageBox.information(self, 'Export Complete', msg)
|
||||
else:
|
||||
err_msg = '\n'.join(failed) if failed else 'No files were selected.'
|
||||
QMessageBox.critical(self, 'Export Failed', f'No files exported.\n\n{err_msg}')
|
||||
self._set_status('Export failed')
|
||||
|
||||
class _ExportMapDialog(QDialog):
|
||||
|
||||
def __init__(self, parent: QWidget, map_name: str):
|
||||
super().__init__(parent)
|
||||
self.setWindowTitle('Export Map Files')
|
||||
self.setFixedSize(340, 220)
|
||||
self.setModal(True)
|
||||
self._choices = {}
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(20, 20, 20, 20)
|
||||
layout.setSpacing(10)
|
||||
title = QLabel(f'Export binary files for:\n{map_name}')
|
||||
title.setFont(QFont('Arial', 11, QFont.Weight.Bold))
|
||||
title.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
title.setStyleSheet(f'color: {COLOR_TEXT};')
|
||||
layout.addWidget(title)
|
||||
from PyQt6.QtWidgets import QCheckBox
|
||||
self._chk_dat = QCheckBox(' DAT file (LZ10 compressed map data)')
|
||||
self._chk_dat.setChecked(True)
|
||||
self._chk_dat.setStyleSheet(f'color: {COLOR_TEXT}; font-size: 11px;')
|
||||
layout.addWidget(self._chk_dat)
|
||||
self._chk_tex = QCheckBox(' TEX file (LZ10 compressed tileset data)')
|
||||
self._chk_tex.setChecked(True)
|
||||
self._chk_tex.setStyleSheet(f'color: {COLOR_TEXT}; font-size: 11px;')
|
||||
layout.addWidget(self._chk_tex)
|
||||
layout.addStretch()
|
||||
btn_row = QHBoxLayout()
|
||||
btn_ok = _primary_btn('Export')
|
||||
btn_ok.setFixedHeight(36)
|
||||
btn_ok.clicked.connect(self._on_ok)
|
||||
btn_cancel = _primary_btn('Cancel')
|
||||
btn_cancel.setFixedHeight(36)
|
||||
btn_cancel.clicked.connect(self.reject)
|
||||
btn_row.addWidget(btn_ok)
|
||||
btn_row.addWidget(btn_cancel)
|
||||
layout.addLayout(btn_row)
|
||||
|
||||
def _on_ok(self):
|
||||
self._choices = {'dat': self._chk_dat.isChecked(), 'tex': self._chk_tex.isChecked()}
|
||||
self.accept()
|
||||
|
||||
def get_choices(self):
|
||||
if self.result() != QDialog.DialogCode.Accepted:
|
||||
return {}
|
||||
return self._choices
|
||||
|
||||
class _ExportTilesetDialog(QDialog):
|
||||
|
||||
def __init__(self, parent: QWidget, tileset_idx: int, has_rgcn: bool, has_rlcn: bool):
|
||||
super().__init__(parent)
|
||||
self.setWindowTitle('Export Tileset Files')
|
||||
self.setFixedSize(340, 220)
|
||||
self.setModal(True)
|
||||
self._choices = {}
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(20, 20, 20, 20)
|
||||
layout.setSpacing(10)
|
||||
title = QLabel(f'Export binary files for:\nTileset {tileset_idx}')
|
||||
title.setFont(QFont('Arial', 11, QFont.Weight.Bold))
|
||||
title.setAlignment(Qt.AlignmentFlag.AlignCenter)
|
||||
title.setStyleSheet(f'color: {COLOR_TEXT};')
|
||||
layout.addWidget(title)
|
||||
from PyQt6.QtWidgets import QCheckBox
|
||||
rgcn_label = ' RGCN (graphics / tile data)' + ('' if has_rgcn else ' [unavailable]')
|
||||
self._chk_rgcn = QCheckBox(rgcn_label)
|
||||
self._chk_rgcn.setChecked(has_rgcn)
|
||||
self._chk_rgcn.setEnabled(has_rgcn)
|
||||
self._chk_rgcn.setStyleSheet(f'color: {COLOR_TEXT}; font-size: 11px;')
|
||||
layout.addWidget(self._chk_rgcn)
|
||||
rlcn_label = ' RLCN (palette data)' + ('' if has_rlcn else ' [unavailable]')
|
||||
self._chk_rlcn = QCheckBox(rlcn_label)
|
||||
self._chk_rlcn.setChecked(has_rlcn)
|
||||
self._chk_rlcn.setEnabled(has_rlcn)
|
||||
self._chk_rlcn.setStyleSheet(f'color: {COLOR_TEXT}; font-size: 11px;')
|
||||
layout.addWidget(self._chk_rlcn)
|
||||
layout.addStretch()
|
||||
btn_row = QHBoxLayout()
|
||||
btn_ok = _primary_btn('Export')
|
||||
btn_ok.setFixedHeight(36)
|
||||
btn_ok.clicked.connect(self._on_ok)
|
||||
btn_cancel = _primary_btn('Cancel')
|
||||
btn_cancel.setFixedHeight(36)
|
||||
btn_cancel.clicked.connect(self.reject)
|
||||
btn_row.addWidget(btn_ok)
|
||||
btn_row.addWidget(btn_cancel)
|
||||
layout.addLayout(btn_row)
|
||||
|
||||
def _on_ok(self):
|
||||
self._choices = {'rgcn': self._chk_rgcn.isChecked(), 'rlcn': self._chk_rlcn.isChecked()}
|
||||
self.accept()
|
||||
|
||||
def get_choices(self):
|
||||
if self.result() != QDialog.DialogCode.Accepted:
|
||||
return {}
|
||||
return self._choices
|
||||
if __name__ == '__main__':
|
||||
app = QApplication(sys.argv)
|
||||
window = RomToolGUI()
|
||||
window.show()
|
||||
sys.exit(app.exec())
|
||||
715
gui/layerswap.py
Normal file
715
gui/layerswap.py
Normal file
|
|
@ -0,0 +1,715 @@
|
|||
import struct
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, List, Tuple
|
||||
from PyQt6.QtWidgets import QWidget, QFrame, QLabel, QPushButton, QHBoxLayout, QVBoxLayout, QSizePolicy, QMessageBox, QFileDialog
|
||||
from PyQt6.QtCore import Qt
|
||||
from PyQt6.QtGui import QFont
|
||||
LAYER_TYPE_INFO: Dict[int, Tuple[str, str]] = {1: ('0x01', 'PRIORITY / BASE'), 2: ('0x02', 'COMBINED TILEMAP'), 3: ('0x03', 'COLLISION'), 4: ('0x04', 'OBJECTS'), 5: ('0x05', 'STATIC ALT TILEMAP'), 6: ('0x06', 'ATTRIBUTES'), 7: ('0x07', 'TRIGGERS'), 8: ('0x08', 'NPCs'), 9: ('0x09', 'POKEMON SPAWNS'), 10: ('0x0A', 'MULTI-SCROLL'), 11: ('0x0B', 'UV SCROLL'), 12: ('0x0C', 'COLOR ANIMATION'), 13: ('0x0D', 'SHADOW / OVERLAY'), 14: ('0x0E', 'CHARACTER TRANSFORM')}
|
||||
COLOR_BG_MAIN = '#182d55'
|
||||
COLOR_ELEMENT_BG = '#26395e'
|
||||
COLOR_ACCENT = '#6d86a8'
|
||||
COLOR_HOVER = '#354a75'
|
||||
COLOR_TEXT = '#FFFFFF'
|
||||
COLOR_MODIFIED = '#FFD700'
|
||||
COLOR_INFO = '#a8c4e0'
|
||||
COLOR_TYPE_BADGE = '#1e4d7a'
|
||||
COLOR_ROOT = '#1a3a6e'
|
||||
COLOR_SECTION = '#26395e'
|
||||
COLOR_LEAF = '#2d4470'
|
||||
|
||||
def _make_frame(color: str, border: bool=False, radius: int=5, parent: QWidget=None) -> QFrame:
|
||||
frame = QFrame(parent)
|
||||
border_css = f"border: {('2px' if border else '1px')} solid {COLOR_ACCENT};" if border else ''
|
||||
frame.setStyleSheet(f'\n QFrame {{\n background-color: {color};\n border-radius: {radius}px;\n {border_css}\n }}\n ')
|
||||
return frame
|
||||
|
||||
def _make_label(text: str, color: str=COLOR_TEXT, bold: bool=False, size: int=10, parent: QWidget=None) -> QLabel:
|
||||
lbl = QLabel(text, parent)
|
||||
weight = QFont.Weight.Bold if bold else QFont.Weight.Normal
|
||||
lbl.setFont(QFont('Arial', size, weight))
|
||||
lbl.setStyleSheet(f'color: {color}; background: transparent; border: none;')
|
||||
return lbl
|
||||
|
||||
def _make_button(text: str, width: int=80, height: int=25, size: int=9, parent: QWidget=None) -> QPushButton:
|
||||
btn = QPushButton(text, parent)
|
||||
btn.setFont(QFont('Arial', size))
|
||||
btn.setFixedSize(width, height)
|
||||
btn.setStyleSheet(f'\n QPushButton {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 5px;\n }}\n QPushButton:hover {{\n background-color: {COLOR_HOVER};\n }}\n ')
|
||||
return btn
|
||||
|
||||
def _make_expand_btn(text: str, level: int, parent: QWidget=None) -> QPushButton:
|
||||
size = 30 if level == 0 else 25
|
||||
btn = QPushButton(text, parent)
|
||||
btn.setFont(QFont('Arial', 14 if level == 0 else 12, QFont.Weight.Bold))
|
||||
btn.setFixedSize(size, size)
|
||||
btn.setStyleSheet(f'\n QPushButton {{\n background-color: {COLOR_ELEMENT_BG};\n color: {COLOR_TEXT};\n border: 1px solid {COLOR_ACCENT};\n border-radius: 5px;\n }}\n QPushButton:hover {{\n background-color: {COLOR_HOVER};\n }}\n ')
|
||||
return btn
|
||||
|
||||
def _read_mpif_info(data: bytes) -> str:
|
||||
if not data or len(data) < 12:
|
||||
return 'no data'
|
||||
if data[:4] != b'MPIF':
|
||||
return 'invalid MPIF'
|
||||
try:
|
||||
width, height = struct.unpack_from('<II', data, 4)
|
||||
return f'{width} x {height} px ({width // 16} x {height // 16} tiles)'
|
||||
except Exception:
|
||||
return 'parse error'
|
||||
|
||||
def _read_txif_info(data: bytes) -> str:
|
||||
if not data or len(data) < 8:
|
||||
return 'no data'
|
||||
if data[:4] != b'TXIF':
|
||||
return 'invalid TXIF'
|
||||
try:
|
||||
count = struct.unpack_from('<H', data, 4)[0]
|
||||
return f"{count} tileset {('entry' if count == 1 else 'entries')}"
|
||||
except Exception:
|
||||
return 'parse error'
|
||||
|
||||
def _read_layer_entry_info(layer_type: int, data: bytes) -> str:
|
||||
if not data or len(data) < 8:
|
||||
return ''
|
||||
try:
|
||||
if layer_type == 9:
|
||||
count = struct.unpack_from('<I', data, 4)[0]
|
||||
if count <= 50:
|
||||
return f"{count} spawn{('s' if count != 1 else '')}"
|
||||
elif layer_type == 8:
|
||||
count = struct.unpack_from('<I', data, 4)[0]
|
||||
if count <= 50:
|
||||
return f"{count} NPC{('s' if count != 1 else '')}"
|
||||
elif layer_type == 7:
|
||||
count = struct.unpack_from('<I', data, 4)[0]
|
||||
if count <= 200:
|
||||
return f"{count} trigger{('s' if count != 1 else '')}"
|
||||
elif layer_type == 3:
|
||||
if len(data) >= 20:
|
||||
gw, gh = struct.unpack_from('<II', data, 12)
|
||||
if 0 < gw < 2000 and 0 < gh < 2000:
|
||||
return f'grid {gw} x {gh}'
|
||||
except Exception:
|
||||
pass
|
||||
return ''
|
||||
|
||||
def _detect_tileset_type(rgcn: Optional[bytes], rlcn: Optional[bytes]) -> str:
|
||||
if not rgcn or not rlcn:
|
||||
return 'INCOMPLETE'
|
||||
if len(rlcn) >= 44:
|
||||
try:
|
||||
c0 = struct.unpack_from('<H', rlcn, 40)[0]
|
||||
c1 = struct.unpack_from('<H', rlcn, 42)[0]
|
||||
if c0 == 14233 and c1 == 30720:
|
||||
return 'SHADOW'
|
||||
except Exception:
|
||||
pass
|
||||
RGCN_DATA_OFFSET = 48
|
||||
if len(rgcn) > RGCN_DATA_OFFSET + 32:
|
||||
sample = rgcn[RGCN_DATA_OFFSET:RGCN_DATA_OFFSET + 64]
|
||||
unique = len(set(sample))
|
||||
if unique <= 2:
|
||||
return 'COLOR FILL'
|
||||
if unique <= 8:
|
||||
return 'PATTERN FILL'
|
||||
return 'NORMAL'
|
||||
|
||||
class LayerNode:
|
||||
|
||||
def __init__(self, name: str, data: bytes=None, node_type: str='layer', parent=None, level: int=0):
|
||||
self.name = name
|
||||
self.data = data
|
||||
self.node_type = node_type
|
||||
self.parent = parent
|
||||
self.children: List['LayerNode'] = []
|
||||
self.is_expanded = False
|
||||
self.level = level
|
||||
self.frame: Optional[QFrame] = None
|
||||
self.children_container: Optional[QWidget] = None
|
||||
self.expand_button: Optional[QPushButton] = None
|
||||
self.name_label: Optional[QLabel] = None
|
||||
self.info_label: Optional[QLabel] = None
|
||||
self.swap_button: Optional[QPushButton] = None
|
||||
self.export_button: Optional[QPushButton] = None
|
||||
self.layer_index: Optional[int] = None
|
||||
self.layer_type: Optional[int] = None
|
||||
self.tileset_index: Optional[int] = None
|
||||
self.component: Optional[str] = None
|
||||
self.info_text: str = ''
|
||||
|
||||
def add_child(self, child: 'LayerNode'):
|
||||
child.parent = self
|
||||
child.level = self.level + 1
|
||||
self.children.append(child)
|
||||
|
||||
def can_expand(self) -> bool:
|
||||
return len(self.children) > 0
|
||||
|
||||
def get_path(self) -> str:
|
||||
parts, node = ([], self)
|
||||
while node:
|
||||
parts.insert(0, node.name)
|
||||
node = node.parent
|
||||
return ' > '.join(parts)
|
||||
|
||||
def get_bg_color(self) -> str:
|
||||
if self.level == 0:
|
||||
return COLOR_ROOT
|
||||
if self.level == 1:
|
||||
return COLOR_SECTION
|
||||
return COLOR_LEAF
|
||||
|
||||
class LayerSwap:
|
||||
|
||||
def __init__(self):
|
||||
self.root_nodes: List[LayerNode] = []
|
||||
self.current_map_name: Optional[str] = None
|
||||
self.modified_layers: Dict[str, bytes] = {}
|
||||
self._map_data = None
|
||||
self._dat_path: Optional[Path] = None
|
||||
self._tex_path: Optional[Path] = None
|
||||
self._rom_saver = None
|
||||
self.map_selector = None
|
||||
self.on_layer_modified = None
|
||||
self._parent_widget: Optional[QWidget] = None
|
||||
|
||||
def set_rom_saver(self, rom_saver):
|
||||
self._rom_saver = rom_saver
|
||||
print('[LayerSwap] ROMSaver wired')
|
||||
|
||||
def set_map_paths(self, dat_path: Path, tex_path: Path):
|
||||
self._dat_path = Path(dat_path)
|
||||
self._tex_path = Path(tex_path)
|
||||
print(f'[LayerSwap] Map paths: {self._dat_path.name}, {self._tex_path.name}')
|
||||
|
||||
def set_map_data(self, map_data):
|
||||
self._map_data = map_data
|
||||
|
||||
def set_map_selector(self, map_selector):
|
||||
self.map_selector = map_selector
|
||||
|
||||
def populate_layers(self, map_data, parent_frame):
|
||||
self._parent_widget = parent_frame
|
||||
self._clear_tree(parent_frame)
|
||||
self.root_nodes = []
|
||||
self.current_map_name = map_data.map_name
|
||||
self._map_data = map_data
|
||||
print(f'\n=== Populating Layer Tree for {map_data.map_name} ===')
|
||||
self._build_dat_tree(map_data)
|
||||
self._build_tex_tree(map_data)
|
||||
self._render_tree(parent_frame)
|
||||
print(f'Layer tree: {len(self.root_nodes)} root nodes')
|
||||
|
||||
def _build_dat_tree(self, map_data):
|
||||
dat_root = LayerNode('DAT MAP', node_type='root', level=0)
|
||||
if map_data.has_mpif():
|
||||
mpif_data = map_data.dat_data.get('mpif')
|
||||
info = _read_mpif_info(mpif_data)
|
||||
size = len(mpif_data) if mpif_data else 0
|
||||
node = LayerNode('MPIF', data=mpif_data, node_type='section', parent=dat_root, level=1)
|
||||
node.info_text = f'{info} • {size} bytes'
|
||||
dat_root.add_child(node)
|
||||
print(f' MPIF: {size} bytes [{info}]')
|
||||
if map_data.has_txif():
|
||||
txif_data = map_data.dat_data.get('txif')
|
||||
info = _read_txif_info(txif_data)
|
||||
size = len(txif_data) if txif_data else 0
|
||||
node = LayerNode('TXIF', data=txif_data, node_type='section', parent=dat_root, level=1)
|
||||
node.info_text = f'{info} • {size} bytes'
|
||||
dat_root.add_child(node)
|
||||
print(f' TXIF: {size} bytes [{info}]')
|
||||
layers = map_data.dat_data.get('layers', [])
|
||||
if layers:
|
||||
lyr_root = LayerNode('LYR', node_type='section', parent=dat_root, level=1)
|
||||
lyr_root.info_text = f"{len(layers)} layer{('s' if len(layers) != 1 else '')}"
|
||||
for i, layer in enumerate(layers):
|
||||
raw_type = layer.get('type', -1)
|
||||
layer_data = layer.get('data', b'')
|
||||
label, desc = LAYER_TYPE_INFO.get(raw_type, (f'0x{raw_type:02X}', 'UNKNOWN'))
|
||||
display_name = f'{label} — {desc}'
|
||||
entry_info = _read_layer_entry_info(raw_type, layer_data)
|
||||
size = len(layer_data) if layer_data else 0
|
||||
info_parts = [p for p in (entry_info, f'{size:,} bytes') if p]
|
||||
child = LayerNode(display_name, data=layer_data, node_type='layer', parent=lyr_root, level=2)
|
||||
child.layer_index = i
|
||||
child.layer_type = raw_type
|
||||
child.info_text = ' • '.join(info_parts)
|
||||
lyr_root.add_child(child)
|
||||
dat_root.add_child(lyr_root)
|
||||
print(f' LYR: {len(layers)} layers')
|
||||
if map_data.has_cta():
|
||||
cta_data = map_data.dat_data.get('cta')
|
||||
size = len(cta_data) if cta_data else 0
|
||||
node = LayerNode('CTA', data=cta_data, node_type='section', parent=dat_root, level=1)
|
||||
node.info_text = f'tile animations • {size} bytes'
|
||||
dat_root.add_child(node)
|
||||
print(f' CTA: {size} bytes')
|
||||
self.root_nodes.append(dat_root)
|
||||
|
||||
def _build_tex_tree(self, map_data):
|
||||
tex_root = LayerNode('TEX MAP', node_type='root', level=0)
|
||||
if self.map_selector:
|
||||
tilesets = self.map_selector.get_tilesets()
|
||||
elif map_data.tex_data:
|
||||
tilesets = map_data.tex_data.get('tilesets', [])
|
||||
else:
|
||||
tilesets = []
|
||||
tex_root.info_text = f"{len(tilesets)} tileset{('s' if len(tilesets) != 1 else '')}"
|
||||
for i, tileset in enumerate(tilesets):
|
||||
rgcn_data = tileset.get('RGCN') or tileset.get('NCGR')
|
||||
rlcn_data = tileset.get('RLCN') or tileset.get('NCLR')
|
||||
ts_type = _detect_tileset_type(rgcn_data, rlcn_data)
|
||||
ts_node = LayerNode(f'TILESET {i}', node_type='tileset', parent=tex_root, level=1)
|
||||
ts_node.tileset_index = i
|
||||
ts_node.info_text = ts_type
|
||||
if rgcn_data:
|
||||
size = len(rgcn_data)
|
||||
rgcn_node = LayerNode('RGCN', data=rgcn_data, node_type='component', parent=ts_node, level=2)
|
||||
rgcn_node.tileset_index = i
|
||||
rgcn_node.component = 'RGCN'
|
||||
rgcn_node.info_text = f'graphics • {size:,} bytes'
|
||||
ts_node.add_child(rgcn_node)
|
||||
if rlcn_data:
|
||||
size = len(rlcn_data)
|
||||
rlcn_node = LayerNode('RLCN', data=rlcn_data, node_type='component', parent=ts_node, level=2)
|
||||
rlcn_node.tileset_index = i
|
||||
rlcn_node.component = 'RLCN'
|
||||
rlcn_node.info_text = f'palette • {size:,} bytes'
|
||||
ts_node.add_child(rlcn_node)
|
||||
tex_root.add_child(ts_node)
|
||||
print(f' TILESET {i} [{ts_type}]: {len(ts_node.children)} components')
|
||||
self.root_nodes.append(tex_root)
|
||||
|
||||
def _render_tree(self, parent_frame):
|
||||
for i, node in enumerate(self.root_nodes):
|
||||
if i > 0:
|
||||
sep = QFrame()
|
||||
sep.setFixedHeight(2)
|
||||
sep.setStyleSheet(f'background-color: {COLOR_ACCENT}; border: none;')
|
||||
parent_frame.add_widget(sep)
|
||||
self._render_node(node, parent_frame)
|
||||
|
||||
def _render_node(self, node: LayerNode, parent_frame):
|
||||
if node.level == 0:
|
||||
left_pad, vpad = (10, 3)
|
||||
border = True
|
||||
radius = 5
|
||||
elif node.level == 1:
|
||||
left_pad, vpad = (30, 2)
|
||||
border = False
|
||||
radius = 3
|
||||
else:
|
||||
left_pad, vpad = (50, 2)
|
||||
border = False
|
||||
radius = 3
|
||||
node.frame = QFrame()
|
||||
outer_vbox = QVBoxLayout(node.frame)
|
||||
outer_vbox.setContentsMargins(0, vpad, 0, vpad)
|
||||
outer_vbox.setSpacing(0)
|
||||
border_css = f'border: 2px solid {COLOR_ACCENT};' if border else 'border: none;'
|
||||
node.frame.setStyleSheet(f'\n QFrame {{\n background-color: {node.get_bg_color()};\n border-radius: {radius}px;\n {border_css}\n }}\n ')
|
||||
inner_widget = QWidget()
|
||||
inner_widget.setStyleSheet('background: transparent; border: none;')
|
||||
inner_hbox = QHBoxLayout(inner_widget)
|
||||
inner_hbox.setContentsMargins(left_pad, 4 if node.level == 0 else 3, 10, 4 if node.level == 0 else 3)
|
||||
inner_hbox.setSpacing(6)
|
||||
spacer_size = 30 if node.level == 0 else 25
|
||||
if node.can_expand():
|
||||
node.expand_button = _make_expand_btn('+', node.level)
|
||||
node.expand_button.clicked.connect(lambda checked, n=node: self._toggle_expand(n))
|
||||
inner_hbox.addWidget(node.expand_button)
|
||||
else:
|
||||
spacer = QWidget()
|
||||
spacer.setFixedSize(spacer_size, spacer_size)
|
||||
spacer.setStyleSheet('background: transparent; border: none;')
|
||||
inner_hbox.addWidget(spacer)
|
||||
name_size = 12 if node.level == 0 else 10 if node.level == 1 else 10
|
||||
node.name_label = _make_label(node.name, COLOR_TEXT, bold=node.level <= 1, size=name_size)
|
||||
inner_hbox.addWidget(node.name_label)
|
||||
if node.info_text:
|
||||
node.info_label = _make_label(node.info_text, COLOR_INFO, size=8)
|
||||
node.info_label.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Preferred)
|
||||
inner_hbox.addWidget(node.info_label, stretch=1)
|
||||
else:
|
||||
inner_hbox.addStretch(1)
|
||||
node.export_button = _make_button('EXPORT')
|
||||
node.export_button.clicked.connect(lambda checked, n=node: self._export_layer(n))
|
||||
inner_hbox.addWidget(node.export_button)
|
||||
node.swap_button = _make_button('SWAP')
|
||||
node.swap_button.clicked.connect(lambda checked, n=node: self._swap_layer(n))
|
||||
inner_hbox.addWidget(node.swap_button)
|
||||
outer_vbox.addWidget(inner_widget)
|
||||
node.children_container = QWidget()
|
||||
node.children_container.setStyleSheet('background: transparent; border: none;')
|
||||
children_vbox = QVBoxLayout(node.children_container)
|
||||
children_vbox.setContentsMargins(0, 0, 0, 0)
|
||||
children_vbox.setSpacing(0)
|
||||
node.children_container.hide()
|
||||
outer_vbox.addWidget(node.children_container)
|
||||
parent_frame.add_widget(node.frame)
|
||||
|
||||
def _toggle_expand(self, node: LayerNode):
|
||||
node.is_expanded = not node.is_expanded
|
||||
if node.is_expanded:
|
||||
node.expand_button.setText('−')
|
||||
layout = node.children_container.layout()
|
||||
if layout.count() == 0:
|
||||
_ChildProxy(layout)
|
||||
for child in node.children:
|
||||
self._render_node_into_layout(child, layout)
|
||||
node.children_container.show()
|
||||
print(f'Expanded {node.name}: {len(node.children)} children')
|
||||
else:
|
||||
node.expand_button.setText('+')
|
||||
node.children_container.hide()
|
||||
print(f'Collapsed {node.name}')
|
||||
|
||||
def _render_node_into_layout(self, node: LayerNode, layout: QVBoxLayout):
|
||||
if node.level == 0:
|
||||
left_pad, vpad, border, radius = (10, 3, True, 5)
|
||||
elif node.level == 1:
|
||||
left_pad, vpad, border, radius = (30, 2, False, 3)
|
||||
else:
|
||||
left_pad, vpad, border, radius = (50, 2, False, 3)
|
||||
node.frame = QFrame()
|
||||
outer_vbox = QVBoxLayout(node.frame)
|
||||
outer_vbox.setContentsMargins(0, vpad, 0, vpad)
|
||||
outer_vbox.setSpacing(0)
|
||||
border_css = f'border: 2px solid {COLOR_ACCENT};' if border else 'border: none;'
|
||||
node.frame.setStyleSheet(f'\n QFrame {{\n background-color: {node.get_bg_color()};\n border-radius: {radius}px;\n {border_css}\n }}\n ')
|
||||
inner_widget = QWidget()
|
||||
inner_widget.setStyleSheet('background: transparent; border: none;')
|
||||
inner_hbox = QHBoxLayout(inner_widget)
|
||||
inner_hbox.setContentsMargins(left_pad, 3, 10, 3)
|
||||
inner_hbox.setSpacing(6)
|
||||
spacer_size = 30 if node.level == 0 else 25
|
||||
if node.can_expand():
|
||||
node.expand_button = _make_expand_btn('+', node.level)
|
||||
node.expand_button.clicked.connect(lambda checked, n=node: self._toggle_expand(n))
|
||||
inner_hbox.addWidget(node.expand_button)
|
||||
else:
|
||||
sp = QWidget()
|
||||
sp.setFixedSize(spacer_size, spacer_size)
|
||||
sp.setStyleSheet('background: transparent; border: none;')
|
||||
inner_hbox.addWidget(sp)
|
||||
name_size = 12 if node.level == 0 else 10
|
||||
node.name_label = _make_label(node.name, COLOR_TEXT, bold=node.level <= 1, size=name_size)
|
||||
inner_hbox.addWidget(node.name_label)
|
||||
if node.info_text:
|
||||
node.info_label = _make_label(node.info_text, COLOR_INFO, size=8)
|
||||
node.info_label.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Preferred)
|
||||
inner_hbox.addWidget(node.info_label, stretch=1)
|
||||
else:
|
||||
inner_hbox.addStretch(1)
|
||||
node.export_button = _make_button('EXPORT')
|
||||
node.export_button.clicked.connect(lambda checked, n=node: self._export_layer(n))
|
||||
inner_hbox.addWidget(node.export_button)
|
||||
node.swap_button = _make_button('SWAP')
|
||||
node.swap_button.clicked.connect(lambda checked, n=node: self._swap_layer(n))
|
||||
inner_hbox.addWidget(node.swap_button)
|
||||
outer_vbox.addWidget(inner_widget)
|
||||
node.children_container = QWidget()
|
||||
node.children_container.setStyleSheet('background: transparent; border: none;')
|
||||
cc_layout = QVBoxLayout(node.children_container)
|
||||
cc_layout.setContentsMargins(0, 0, 0, 0)
|
||||
cc_layout.setSpacing(0)
|
||||
node.children_container.hide()
|
||||
outer_vbox.addWidget(node.children_container)
|
||||
layout.addWidget(node.frame)
|
||||
|
||||
def _swap_layer(self, node: LayerNode):
|
||||
print(f'\n=== Swap: {node.get_path()} ===')
|
||||
parent = self._parent_widget
|
||||
if not self._rom_saver:
|
||||
QMessageBox.critical(parent, 'Not Ready', 'ROMSaver is not connected.\nLoad a ROM before swapping layers.')
|
||||
return
|
||||
if not self._map_data:
|
||||
QMessageBox.critical(parent, 'Not Ready', 'No map is loaded.\nSelect a map first.')
|
||||
return
|
||||
file_path, _ = QFileDialog.getOpenFileName(parent, f"Select Binary File to Replace '{node.name}'", '', 'Binary files (*.bin);;All files (*.*)')
|
||||
if not file_path:
|
||||
print('Swap cancelled')
|
||||
return
|
||||
try:
|
||||
new_data = Path(file_path).read_bytes()
|
||||
except Exception as e:
|
||||
QMessageBox.critical(parent, 'Read Error', f'Could not read file:\n{e}')
|
||||
return
|
||||
old_size = len(node.data) if node.data else 0
|
||||
print(f' Replacement: {len(new_data):,} bytes (was {old_size:,})')
|
||||
if node.node_type == 'layer' and node.layer_type is not None:
|
||||
ok, msg = self._validate_layer_magic(node.layer_type, new_data)
|
||||
if not ok:
|
||||
QMessageBox.critical(parent, 'Invalid Layer File', f'The selected file does not match layer type {node.name}.\n\n{msg}\n\nMake sure you are replacing with the correct layer type.')
|
||||
return
|
||||
if node.node_type == 'component':
|
||||
ok, msg = self._validate_component_magic(node.component, new_data)
|
||||
if not ok:
|
||||
QMessageBox.critical(parent, 'Invalid Component File', f'The file does not appear to be a valid {node.component}.\n\n{msg}')
|
||||
return
|
||||
root_name = self._get_root_name(node)
|
||||
success = False
|
||||
if root_name == 'DAT MAP':
|
||||
success = self._apply_dat_swap(node, new_data)
|
||||
elif root_name == 'TEX MAP':
|
||||
success = self._apply_tex_swap(node, new_data)
|
||||
else:
|
||||
QMessageBox.critical(parent, 'Swap Error', f"Cannot determine file type for node '{node.get_path()}'")
|
||||
return
|
||||
if not success:
|
||||
return
|
||||
node.data = new_data
|
||||
self.modified_layers[node.get_path()] = new_data
|
||||
if node.name_label:
|
||||
node.name_label.setText(f'{node.name} [MODIFIED]')
|
||||
node.name_label.setStyleSheet(f'color: {COLOR_MODIFIED}; background: transparent; border: none;')
|
||||
if node.info_label:
|
||||
new_size_str = f'{len(new_data):,} bytes'
|
||||
base = node.info_text.split('•')[0].strip() if '•' in node.info_text else ''
|
||||
node.info_label.setText(f'{base} • {new_size_str}' if base else new_size_str)
|
||||
node.info_label.setStyleSheet(f'color: {COLOR_MODIFIED}; background: transparent; border: none;')
|
||||
if self.on_layer_modified:
|
||||
self.on_layer_modified(node.get_path(), new_data)
|
||||
QMessageBox.information(parent, 'Swap Successful', f"'{node.name}' swapped and queued for ROM save.\n\nOld: {old_size:,} bytes\nNew: {len(new_data):,} bytes")
|
||||
print(f' Swap complete: {node.get_path()}')
|
||||
|
||||
def _validate_layer_magic(self, layer_type: int, data: bytes) -> Tuple[bool, str]:
|
||||
if len(data) < 4:
|
||||
return (False, 'File is too small (< 4 bytes) to be a valid layer.')
|
||||
found_type = struct.unpack_from('<I', data, 0)[0]
|
||||
if found_type != layer_type:
|
||||
label, desc = LAYER_TYPE_INFO.get(layer_type, (f'0x{layer_type:02X}', 'UNKNOWN'))
|
||||
fl, fd = LAYER_TYPE_INFO.get(found_type, (f'0x{found_type:02X}', 'UNKNOWN'))
|
||||
return (False, f"Expected layer type {label} ({desc})\nFound type {fl} ({fd})\n(first 4 bytes: {data[:4].hex(' ')})")
|
||||
return (True, 'OK')
|
||||
|
||||
def _validate_component_magic(self, component: str, data: bytes) -> Tuple[bool, str]:
|
||||
if len(data) < 4:
|
||||
return (False, 'File is too small (< 4 bytes).')
|
||||
expected = {'RGCN': b'RGCN', 'RLCN': b'RLCN'}.get(component)
|
||||
if not expected:
|
||||
return (True, 'OK')
|
||||
if data[:4] != expected:
|
||||
found = data[:4]
|
||||
return (False, f"Expected magic '{expected.decode('ascii')}' (hex: {expected.hex(' ')})\nFound: {found.hex(' ')} ('{found.decode('ascii', errors='replace')}')")
|
||||
return (True, 'OK')
|
||||
|
||||
def _apply_dat_swap(self, node: LayerNode, new_data: bytes) -> bool:
|
||||
parent = self._parent_widget
|
||||
if not self._dat_path:
|
||||
QMessageBox.critical(parent, 'Swap Error', 'DAT file path not set.\nCall set_map_paths() before swapping.')
|
||||
return False
|
||||
dat_data = self._map_data.dat_data
|
||||
node_name = node.name.upper()
|
||||
if node_name == 'MPIF':
|
||||
dat_data['mpif'] = new_data
|
||||
elif node_name == 'TXIF':
|
||||
dat_data['txif'] = new_data
|
||||
elif node_name == 'CTA':
|
||||
dat_data['cta'] = new_data
|
||||
elif node.layer_index is not None:
|
||||
layers = dat_data.get('layers', [])
|
||||
if node.layer_index >= len(layers):
|
||||
QMessageBox.critical(parent, 'Swap Error', f'Layer index {node.layer_index} out of range ({len(layers)} layers).')
|
||||
return False
|
||||
layers[node.layer_index]['data'] = new_data
|
||||
print(f' Updated layer[{node.layer_index}] in MapData')
|
||||
else:
|
||||
QMessageBox.critical(parent, 'Swap Error', f"Unknown DAT node type for '{node.name}'.\nNode type: {node.node_type}")
|
||||
return False
|
||||
rebuilt = self._rebuild_dat(dat_data)
|
||||
if rebuilt is None:
|
||||
QMessageBox.critical(parent, 'Rebuild Error', 'Failed to rebuild DAT file.\nCheck console for details.')
|
||||
return False
|
||||
try:
|
||||
self._dat_path.write_bytes(rebuilt)
|
||||
print(f' DAT written: {self._dat_path.name} ({len(rebuilt):,} bytes)')
|
||||
except Exception as e:
|
||||
QMessageBox.critical(parent, 'Write Error', f'Could not write DAT to disk:\n{e}')
|
||||
return False
|
||||
ok = self._rom_saver.register_modification(self._dat_path, rebuilt, 'layer_swap')
|
||||
if not ok:
|
||||
QMessageBox.warning(parent, 'Registration Warning', 'File written but ROMSaver registration failed.\nThe change may not appear in the saved ROM.')
|
||||
return False
|
||||
print(f' Registered with ROMSaver: {self._dat_path.name}')
|
||||
return True
|
||||
|
||||
def _rebuild_dat(self, dat_data: dict) -> Optional[bytes]:
|
||||
try:
|
||||
from load.narcutil import build_narc
|
||||
from load.lz10util import compress_lz10
|
||||
layers = dat_data.get('layers', [])
|
||||
layer_blobs = [l['data'] for l in layers if l.get('data')]
|
||||
lyr_blob = None
|
||||
if layer_blobs:
|
||||
inner_narc = build_narc(layer_blobs)
|
||||
outer_narc = build_narc([inner_narc])
|
||||
lyr_blob = b'LYR\x00' + outer_narc
|
||||
outer_sections = []
|
||||
mpif = dat_data.get('mpif')
|
||||
txif = dat_data.get('txif')
|
||||
cta = dat_data.get('cta')
|
||||
if mpif:
|
||||
outer_sections.append(mpif)
|
||||
if txif:
|
||||
outer_sections.append(txif)
|
||||
if lyr_blob:
|
||||
outer_sections.append(lyr_blob)
|
||||
if cta:
|
||||
outer_sections.append(cta)
|
||||
if not outer_sections:
|
||||
print('[LayerSwap] ERROR: no sections to rebuild DAT')
|
||||
return None
|
||||
outer_narc_final = build_narc(outer_sections)
|
||||
compressed = compress_lz10(outer_narc_final)
|
||||
print(f' [rebuild_dat] sections={len(outer_sections)} layers={len(layer_blobs)} uncompressed={len(outer_narc_final):,} compressed={len(compressed):,}')
|
||||
return compressed
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
print(f'[LayerSwap] ERROR rebuilding DAT: {e}')
|
||||
return None
|
||||
|
||||
def _apply_tex_swap(self, node: LayerNode, new_data: bytes) -> bool:
|
||||
parent = self._parent_widget
|
||||
if not self._tex_path:
|
||||
QMessageBox.critical(parent, 'Swap Error', 'TEX file path not set.\nCall set_map_paths() before swapping.')
|
||||
return False
|
||||
if node.tileset_index is None or node.component is None:
|
||||
QMessageBox.critical(parent, 'Swap Error', f"Node '{node.name}' is missing tileset_index or component tag.")
|
||||
return False
|
||||
tex_data = self._map_data.tex_data
|
||||
tilesets = tex_data.get('tilesets', [])
|
||||
if node.tileset_index >= len(tilesets):
|
||||
QMessageBox.critical(parent, 'Swap Error', f'Tileset index {node.tileset_index} out of range ({len(tilesets)} tilesets).')
|
||||
return False
|
||||
tileset = tilesets[node.tileset_index]
|
||||
if node.component == 'RGCN':
|
||||
tileset['RGCN'] = new_data
|
||||
tileset['NCGR'] = new_data
|
||||
elif node.component == 'RLCN':
|
||||
tileset['RLCN'] = new_data
|
||||
tileset['NCLR'] = new_data
|
||||
else:
|
||||
QMessageBox.critical(parent, 'Swap Error', f"Unknown component '{node.component}'.")
|
||||
return False
|
||||
rebuilt = self._rebuild_tex(tex_data)
|
||||
if rebuilt is None:
|
||||
QMessageBox.critical(parent, 'Rebuild Error', 'Failed to rebuild TEX file.\nCheck console for details.')
|
||||
return False
|
||||
try:
|
||||
self._tex_path.write_bytes(rebuilt)
|
||||
print(f' TEX written: {self._tex_path.name} ({len(rebuilt):,} bytes)')
|
||||
except Exception as e:
|
||||
QMessageBox.critical(parent, 'Write Error', f'Could not write TEX to disk:\n{e}')
|
||||
return False
|
||||
ok = self._rom_saver.register_modification(self._tex_path, rebuilt, 'layer_swap')
|
||||
if not ok:
|
||||
QMessageBox.warning(parent, 'Registration Warning', 'File written but ROMSaver registration failed.\nThe change may not appear in the saved ROM.')
|
||||
return False
|
||||
print(f' Registered with ROMSaver: {self._tex_path.name}')
|
||||
return True
|
||||
|
||||
def _rebuild_tex(self, tex_data: dict) -> Optional[bytes]:
|
||||
try:
|
||||
from load.narcutil import build_narc
|
||||
from load.lz10util import compress_lz10
|
||||
tilesets = tex_data.get('tilesets', [])
|
||||
outer_blobs = []
|
||||
for i, ts in enumerate(tilesets):
|
||||
rgcn = ts.get('RGCN') or ts.get('NCGR')
|
||||
rlcn = ts.get('RLCN') or ts.get('NCLR')
|
||||
components = [c for c in (rgcn, rlcn) if c]
|
||||
if not components:
|
||||
print(f' [rebuild_tex] WARNING: tileset {i} empty, skipping')
|
||||
continue
|
||||
inner_narc = build_narc(components)
|
||||
outer_blobs.append(inner_narc)
|
||||
if not outer_blobs:
|
||||
print('[LayerSwap] ERROR: no tilesets to rebuild TEX')
|
||||
return None
|
||||
outer_narc = build_narc(outer_blobs)
|
||||
tex_body = b'TEX\x00' + outer_narc
|
||||
compressed = compress_lz10(tex_body)
|
||||
print(f' [rebuild_tex] tilesets={len(outer_blobs)} uncompressed={len(outer_narc):,} compressed={len(compressed):,}')
|
||||
return compressed
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
print(f'[LayerSwap] ERROR rebuilding TEX: {e}')
|
||||
return None
|
||||
|
||||
def _export_layer(self, node: LayerNode):
|
||||
print(f'\n=== Export: {node.get_path()} ===')
|
||||
parent = self._parent_widget
|
||||
if not node.data:
|
||||
QMessageBox.warning(parent, 'No Data', f"'{node.name}' has no data to export.")
|
||||
return
|
||||
safe_name = node.name.replace(' ', '_').replace('/', '_').replace('>', '').replace('—', '-').replace('0x', '').strip('_')
|
||||
default_file = f'{self.current_map_name}_{safe_name}.bin'
|
||||
file_path, _ = QFileDialog.getSaveFileName(parent, f'Export {node.name}', default_file, 'Binary files (*.bin);;All files (*.*)')
|
||||
if not file_path:
|
||||
print('Export cancelled')
|
||||
return
|
||||
try:
|
||||
Path(file_path).write_bytes(node.data)
|
||||
print(f' Exported {len(node.data):,} bytes → {file_path}')
|
||||
QMessageBox.information(parent, 'Export Successful', f"'{node.name}' exported.\n\nFile: {Path(file_path).name}\nSize: {len(node.data):,} bytes")
|
||||
except Exception as e:
|
||||
print(f' Export error: {e}')
|
||||
QMessageBox.critical(parent, 'Export Failed', f"Could not export '{node.name}':\n{e}")
|
||||
|
||||
def _get_root_name(self, node: LayerNode) -> Optional[str]:
|
||||
cur = node
|
||||
while cur.parent:
|
||||
cur = cur.parent
|
||||
return cur.name
|
||||
|
||||
def _clear_tree(self, parent_frame):
|
||||
parent_frame.clear_items()
|
||||
self.root_nodes = []
|
||||
self.modified_layers = {}
|
||||
|
||||
def has_modifications(self) -> bool:
|
||||
return len(self.modified_layers) > 0
|
||||
|
||||
def get_modified_layers(self) -> Dict[str, bytes]:
|
||||
return self.modified_layers.copy()
|
||||
|
||||
def get_modification_count(self) -> int:
|
||||
return len(self.modified_layers)
|
||||
|
||||
def clear_modifications(self):
|
||||
self.modified_layers = {}
|
||||
print('[LayerSwap] Modifications cleared')
|
||||
|
||||
def get_layer_data(self, node_path: str) -> Optional[bytes]:
|
||||
for root in self.root_nodes:
|
||||
result = self._find_node_by_path(root, node_path)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
def _find_node_by_path(self, node: LayerNode, target: str) -> Optional[bytes]:
|
||||
if node.get_path() == target:
|
||||
return node.data
|
||||
for child in node.children:
|
||||
r = self._find_node_by_path(child, target)
|
||||
if r is not None:
|
||||
return r
|
||||
return None
|
||||
|
||||
def clear(self):
|
||||
self.root_nodes = []
|
||||
self.current_map_name = None
|
||||
self.modified_layers = {}
|
||||
self._map_data = None
|
||||
self._dat_path = None
|
||||
self._tex_path = None
|
||||
print('[LayerSwap] Cleared')
|
||||
|
||||
class _ChildProxy:
|
||||
|
||||
def __init__(self, layout: QVBoxLayout):
|
||||
self._layout = layout
|
||||
|
||||
def add_widget(self, widget: QWidget):
|
||||
self._layout.addWidget(widget)
|
||||
164
gui/mapselector.py
Normal file
164
gui/mapselector.py
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
from pathlib import Path
|
||||
from tkinter import messagebox
|
||||
from load.maploader import MapLoader, MapData
|
||||
|
||||
class MapPair:
|
||||
|
||||
def __init__(self, name, dat_path=None, tex_path=None):
|
||||
self.name = name
|
||||
self.dat_path = dat_path
|
||||
self.tex_path = tex_path
|
||||
|
||||
def is_complete(self):
|
||||
return self.dat_path is not None and self.tex_path is not None
|
||||
|
||||
def __str__(self):
|
||||
status = '✓' if self.is_complete() else '⚠'
|
||||
return f'{status} {self.name}'
|
||||
|
||||
def __repr__(self):
|
||||
return f"MapPair(name='{self.name}', dat={self.dat_path is not None}, tex={self.tex_path is not None})"
|
||||
|
||||
class MapSelector:
|
||||
|
||||
def __init__(self):
|
||||
self.map_pairs = []
|
||||
self.selected_map = None
|
||||
self.map_loader = MapLoader()
|
||||
self.on_maps_loaded = None
|
||||
self.on_map_selected = None
|
||||
self.on_map_data_loaded = None
|
||||
self.map_loader.on_map_loaded = self._on_map_data_loaded
|
||||
|
||||
def pair_map_files(self, dat_files, tex_files):
|
||||
self.map_pairs = []
|
||||
maps_dict = {}
|
||||
for dat_file in dat_files:
|
||||
map_name = self._extract_map_name(dat_file.name, '.map.dat')
|
||||
if map_name:
|
||||
if map_name not in maps_dict:
|
||||
maps_dict[map_name] = MapPair(map_name)
|
||||
maps_dict[map_name].dat_path = dat_file
|
||||
for tex_file in tex_files:
|
||||
map_name = self._extract_map_name(tex_file.name, '.map.tex')
|
||||
if map_name:
|
||||
if map_name not in maps_dict:
|
||||
maps_dict[map_name] = MapPair(map_name)
|
||||
maps_dict[map_name].tex_path = tex_file
|
||||
self.map_pairs = sorted(maps_dict.values(), key=lambda x: x.name)
|
||||
complete_maps = sum((1 for m in self.map_pairs if m.is_complete()))
|
||||
incomplete_maps = len(self.map_pairs) - complete_maps
|
||||
print(f'\n=== Map Pairing Results ===')
|
||||
print(f'Total maps found: {len(self.map_pairs)}')
|
||||
print(f'Complete pairs (DAT + TEX): {complete_maps}')
|
||||
print(f'Incomplete pairs: {incomplete_maps}')
|
||||
if incomplete_maps > 0:
|
||||
print('\nWarning: Incomplete map pairs found:')
|
||||
for map_pair in self.map_pairs:
|
||||
if not map_pair.is_complete():
|
||||
missing = []
|
||||
if map_pair.dat_path is None:
|
||||
missing.append('DAT')
|
||||
if map_pair.tex_path is None:
|
||||
missing.append('TEX')
|
||||
print(f" - {map_pair.name}: Missing {', '.join(missing)}")
|
||||
print('===========================\n')
|
||||
if self.on_maps_loaded:
|
||||
self.on_maps_loaded(self.map_pairs)
|
||||
return self.map_pairs
|
||||
|
||||
def _extract_map_name(self, filename, suffix):
|
||||
filename_lower = filename.lower()
|
||||
suffix_lower = suffix.lower()
|
||||
if filename_lower.endswith('.lz'):
|
||||
filename_lower = filename_lower[:-3]
|
||||
filename = filename[:-3]
|
||||
if suffix_lower in filename_lower:
|
||||
idx = filename_lower.find(suffix_lower)
|
||||
map_name = filename[:idx]
|
||||
return map_name
|
||||
return None
|
||||
|
||||
def get_map_pairs(self):
|
||||
return self.map_pairs
|
||||
|
||||
def get_complete_maps(self):
|
||||
return [m for m in self.map_pairs if m.is_complete()]
|
||||
|
||||
def get_incomplete_maps(self):
|
||||
return [m for m in self.map_pairs if not m.is_complete()]
|
||||
|
||||
def select_map(self, map_name):
|
||||
for map_pair in self.map_pairs:
|
||||
if map_pair.name == map_name:
|
||||
self.selected_map = map_pair
|
||||
print(f'\nMap selected: {map_name}')
|
||||
print(f' DAT file: {map_pair.dat_path}')
|
||||
print(f' TEX file: {map_pair.tex_path}')
|
||||
if self.on_map_selected:
|
||||
self.on_map_selected(map_pair)
|
||||
if map_pair.is_complete():
|
||||
self.map_loader.load_map(map_pair.dat_path, map_pair.tex_path, map_pair.name)
|
||||
else:
|
||||
print(f"Warning: Cannot load incomplete map '{map_name}'")
|
||||
return map_pair
|
||||
print(f"Warning: Map '{map_name}' not found")
|
||||
return None
|
||||
|
||||
def select_map_by_index(self, index):
|
||||
if 0 <= index < len(self.map_pairs):
|
||||
map_pair = self.map_pairs[index]
|
||||
return self.select_map(map_pair.name)
|
||||
print(f'Warning: Invalid map index {index}')
|
||||
return None
|
||||
|
||||
def get_selected_map(self):
|
||||
return self.selected_map
|
||||
|
||||
def get_loaded_map_data(self) -> MapData:
|
||||
return self.map_loader.get_current_map()
|
||||
|
||||
def get_map_count(self):
|
||||
return len(self.map_pairs)
|
||||
|
||||
def get_map_names(self):
|
||||
return [m.name for m in self.map_pairs]
|
||||
|
||||
def get_complete_map_names(self):
|
||||
return [m.name for m in self.map_pairs if m.is_complete()]
|
||||
|
||||
def get_layers(self):
|
||||
return self.map_loader.get_layers()
|
||||
|
||||
def get_tilesets(self):
|
||||
return self.map_loader.get_tilesets()
|
||||
|
||||
def get_tileset(self, index: int):
|
||||
return self.map_loader.get_tileset(index)
|
||||
|
||||
def get_tileset_for_rendering(self, index: int):
|
||||
return self.map_loader.get_tileset_for_rendering(index)
|
||||
|
||||
def get_all_tilesets_for_rendering(self):
|
||||
return self.map_loader.get_all_tilesets_for_rendering()
|
||||
|
||||
def _on_map_data_loaded(self, map_data: MapData):
|
||||
print(f'Map data loaded callback triggered for: {map_data.map_name}')
|
||||
if self.on_map_data_loaded:
|
||||
self.on_map_data_loaded(map_data)
|
||||
|
||||
def read_map_files(self, map_pair):
|
||||
if not map_pair.is_complete():
|
||||
print(f"Error: Cannot read incomplete map '{map_pair.name}'")
|
||||
return (None, None)
|
||||
try:
|
||||
with open(map_pair.dat_path, 'rb') as f:
|
||||
dat_data = f.read()
|
||||
with open(map_pair.tex_path, 'rb') as f:
|
||||
tex_data = f.read()
|
||||
print(f"Read map '{map_pair.name}': DAT={len(dat_data)} bytes, TEX={len(tex_data)} bytes")
|
||||
return (dat_data, tex_data)
|
||||
except Exception as e:
|
||||
print(f'Error reading map files: {e}')
|
||||
messagebox.showerror('Error', f'Failed to read map files: {str(e)}')
|
||||
return (None, None)
|
||||
274
gui/romselector.py
Normal file
274
gui/romselector.py
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
import os
|
||||
import struct
|
||||
from pathlib import Path
|
||||
from tkinter import filedialog, messagebox
|
||||
import shutil
|
||||
import mmap
|
||||
|
||||
class NDSHeader:
|
||||
|
||||
def __init__(self, data):
|
||||
self.game_title = data[0:12].decode('ascii', errors='ignore').strip('\x00')
|
||||
self.game_code = data[12:16].decode('ascii', errors='ignore').strip('\x00')
|
||||
self.maker_code = data[16:18]
|
||||
self.arm9_rom_addr = struct.unpack('<I', data[32:36])[0]
|
||||
self.arm9_entry_addr = struct.unpack('<I', data[36:40])[0]
|
||||
self.arm9_ram_addr = struct.unpack('<I', data[40:44])[0]
|
||||
self.arm9_size = struct.unpack('<I', data[44:48])[0]
|
||||
self.arm7_rom_addr = struct.unpack('<I', data[48:52])[0]
|
||||
self.arm7_entry_addr = struct.unpack('<I', data[52:56])[0]
|
||||
self.arm7_ram_addr = struct.unpack('<I', data[56:60])[0]
|
||||
self.arm7_size = struct.unpack('<I', data[60:64])[0]
|
||||
self.filename_table_addr = struct.unpack('<I', data[64:68])[0]
|
||||
self.filename_size = struct.unpack('<I', data[68:72])[0]
|
||||
self.fat_addr = struct.unpack('<I', data[72:76])[0]
|
||||
self.fat_size = struct.unpack('<I', data[76:80])[0]
|
||||
self.rom_size = struct.unpack('<I', data[128:132])[0]
|
||||
self.header_size = struct.unpack('<I', data[132:136])[0]
|
||||
|
||||
class FatRange:
|
||||
|
||||
def __init__(self, start_addr, end_addr):
|
||||
self.start_addr = start_addr
|
||||
self.end_addr = end_addr
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return self.end_addr - self.start_addr
|
||||
|
||||
class FileIndexEntry:
|
||||
|
||||
def __init__(self, path, fat_index):
|
||||
self.path = path
|
||||
self.fat_index = fat_index
|
||||
|
||||
def __repr__(self):
|
||||
return f"FileIndexEntry(path='{self.path}', fat_index={self.fat_index})"
|
||||
|
||||
class ROMSelector:
|
||||
|
||||
def __init__(self):
|
||||
self.rom_path = None
|
||||
self.extracted_path = None
|
||||
self.map_folder_path = None
|
||||
self.dat_files = []
|
||||
self.tex_files = []
|
||||
self._cancel_extraction = False
|
||||
|
||||
def browse_rom(self):
|
||||
file_path = filedialog.askopenfilename(title='Select Pokemon Ranger: Shadows of Almia ROM', filetypes=[('NDS ROM files', '*.nds'), ('All files', '*.*')])
|
||||
if file_path:
|
||||
self.rom_path = file_path
|
||||
print(f'ROM selected: {self.rom_path}')
|
||||
return True
|
||||
return False
|
||||
|
||||
def extract_rom(self, callback=None):
|
||||
if not self.rom_path:
|
||||
messagebox.showerror('Error', 'No ROM file selected!')
|
||||
return False
|
||||
self._cancel_extraction = False
|
||||
try:
|
||||
rom_name = Path(self.rom_path).stem
|
||||
self.extracted_path = Path(self.rom_path).parent / f'{rom_name}_extracted'
|
||||
if callback:
|
||||
callback('Reading ROM header...')
|
||||
header = self._parse_nds_header()
|
||||
if not header:
|
||||
messagebox.showerror('Error', 'Invalid NDS ROM file!')
|
||||
return False
|
||||
print(f'\n=== ROM Information ===')
|
||||
print(f'Game Title: {header.game_title}')
|
||||
print(f'Game Code: {header.game_code}')
|
||||
print(f'FAT Address: 0x{header.fat_addr:08X}, Size: {header.fat_size}')
|
||||
print(f'FNT Address: 0x{header.filename_table_addr:08X}, Size: {header.filename_size}')
|
||||
print(f'======================\n')
|
||||
if callback:
|
||||
callback('Building file index...')
|
||||
success = self._extract_map_files_targeted(header, callback)
|
||||
if not success or self._cancel_extraction:
|
||||
if self._cancel_extraction:
|
||||
print('ROM extraction cancelled')
|
||||
else:
|
||||
messagebox.showerror('Error', 'Failed to extract map files!')
|
||||
return False
|
||||
if callback:
|
||||
callback(f'Found {len(self.dat_files)} .map.dat files and {len(self.tex_files)} .map.tex files')
|
||||
print(f'\nExtraction complete. Found {len(self.dat_files)} DAT maps and {len(self.tex_files)} TEX maps')
|
||||
return True
|
||||
except Exception as e:
|
||||
messagebox.showerror('Error', f'ROM extraction failed: {str(e)}')
|
||||
print(f'Error during extraction: {e}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def cancel_extraction(self):
|
||||
self._cancel_extraction = True
|
||||
|
||||
def _parse_nds_header(self):
|
||||
try:
|
||||
with open(self.rom_path, 'rb') as f:
|
||||
header_data = f.read(512)
|
||||
if len(header_data) < 512:
|
||||
return None
|
||||
return NDSHeader(header_data)
|
||||
except Exception as e:
|
||||
print(f'Error reading ROM header: {e}')
|
||||
return None
|
||||
|
||||
def _extract_map_files_targeted(self, header, callback=None):
|
||||
try:
|
||||
if self.extracted_path.exists():
|
||||
shutil.rmtree(self.extracted_path)
|
||||
self.extracted_path.mkdir(parents=True, exist_ok=True)
|
||||
self.map_folder_path = self.extracted_path / 'data' / 'field' / 'map'
|
||||
with open(self.rom_path, 'rb') as rom_file:
|
||||
with mmap.mmap(rom_file.fileno(), 0, access=mmap.ACCESS_READ) as rom_data:
|
||||
if callback:
|
||||
callback('Loading FAT entries...')
|
||||
fat_data = rom_data[header.fat_addr:header.fat_addr + header.fat_size]
|
||||
fat_entries = []
|
||||
for i in range(0, len(fat_data), 8):
|
||||
if i + 8 <= len(fat_data):
|
||||
start_addr = struct.unpack('<I', fat_data[i:i + 4])[0]
|
||||
end_addr = struct.unpack('<I', fat_data[i + 4:i + 8])[0]
|
||||
fat_entries.append(FatRange(start_addr, end_addr))
|
||||
print(f'Loaded {len(fat_entries)} FAT entries')
|
||||
if callback:
|
||||
callback('Reading file name table...')
|
||||
fnt_data = rom_data[header.filename_table_addr:header.filename_table_addr + header.filename_size]
|
||||
if callback:
|
||||
callback('Building file index...')
|
||||
file_index = self._build_file_index(fnt_data)
|
||||
print(f'Built index of {len(file_index)} files')
|
||||
if callback:
|
||||
callback('Filtering for map files...')
|
||||
map_files = self._filter_map_files(file_index)
|
||||
print(f'Found {len(map_files)} map files to extract')
|
||||
if len(map_files) == 0:
|
||||
print('Warning: No map files found in ROM!')
|
||||
return False
|
||||
if callback:
|
||||
callback('Extracting map files...')
|
||||
success = self._extract_filtered_files(rom_data, fat_entries, map_files, callback)
|
||||
if not success:
|
||||
return False
|
||||
self._scan_map_files()
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'Error in targeted extraction: {e}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def _build_file_index(self, fnt_data):
|
||||
file_index = []
|
||||
try:
|
||||
self._index_directory(fnt_data, 61440, '', file_index)
|
||||
except Exception as e:
|
||||
print(f'Error building file index: {e}')
|
||||
return file_index
|
||||
|
||||
def _index_directory(self, fnt_data, folder_id, current_path, file_index, fat_offset=[0]):
|
||||
try:
|
||||
current_offset = 8 * (folder_id & 4095)
|
||||
if current_offset + 8 > len(fnt_data):
|
||||
return
|
||||
entry_offset = struct.unpack('<I', fnt_data[current_offset:current_offset + 4])[0]
|
||||
first_file_id = struct.unpack('<H', fnt_data[current_offset + 4:current_offset + 6])[0]
|
||||
if isinstance(fat_offset, list) and len(fat_offset) == 1 and (fat_offset[0] == 0):
|
||||
fat_offset[0] = first_file_id
|
||||
offset = entry_offset
|
||||
while offset < len(fnt_data):
|
||||
control_byte = fnt_data[offset]
|
||||
if control_byte == 0:
|
||||
break
|
||||
offset += 1
|
||||
name_length = control_byte & 127
|
||||
is_directory = bool(control_byte & 128)
|
||||
if offset + name_length > len(fnt_data):
|
||||
break
|
||||
name = fnt_data[offset:offset + name_length].decode('utf-8', errors='replace')
|
||||
offset += name_length
|
||||
if current_path:
|
||||
new_path = f'{current_path}/{name}'
|
||||
else:
|
||||
new_path = name
|
||||
if is_directory:
|
||||
if offset + 2 > len(fnt_data):
|
||||
break
|
||||
sub_folder_id = struct.unpack('<H', fnt_data[offset:offset + 2])[0]
|
||||
offset += 2
|
||||
self._index_directory(fnt_data, sub_folder_id, new_path, file_index, fat_offset)
|
||||
else:
|
||||
file_index.append(FileIndexEntry(new_path, fat_offset[0]))
|
||||
fat_offset[0] += 1
|
||||
except Exception as e:
|
||||
print(f'Error indexing directory: {e}')
|
||||
|
||||
def _filter_map_files(self, file_index):
|
||||
map_files = []
|
||||
for entry in file_index:
|
||||
path_lower = entry.path.lower()
|
||||
if 'data/field/map/' in path_lower or 'data/field/map\\' in path_lower:
|
||||
if '.map.dat' in path_lower or '.map.tex' in path_lower:
|
||||
map_files.append(entry)
|
||||
print(f' Map file: {entry.path} (FAT index: {entry.fat_index})')
|
||||
return map_files
|
||||
|
||||
def _extract_filtered_files(self, rom_data, fat_entries, filtered_files, callback=None):
|
||||
try:
|
||||
self.map_folder_path.mkdir(parents=True, exist_ok=True)
|
||||
total_files = len(filtered_files)
|
||||
for i, file_entry in enumerate(filtered_files):
|
||||
if self._cancel_extraction:
|
||||
return False
|
||||
if file_entry.fat_index >= len(fat_entries):
|
||||
print(f'Warning: FAT index {file_entry.fat_index} out of range for {file_entry.path}')
|
||||
continue
|
||||
fat_entry = fat_entries[file_entry.fat_index]
|
||||
if fat_entry.size <= 0:
|
||||
continue
|
||||
filename = Path(file_entry.path).name
|
||||
output_path = self.map_folder_path / filename
|
||||
file_data = rom_data[fat_entry.start_addr:fat_entry.end_addr]
|
||||
try:
|
||||
with open(output_path, 'wb') as f:
|
||||
f.write(file_data)
|
||||
if callback and (i % 10 == 0 or i == total_files - 1):
|
||||
callback(f'Extracted {i + 1}/{total_files} map files...')
|
||||
except Exception as e:
|
||||
print(f'Error writing file {output_path}: {e}')
|
||||
print(f'Extracted {total_files} map files to {self.map_folder_path}')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'Error extracting filtered files: {e}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def _scan_map_files(self):
|
||||
self.dat_files = []
|
||||
self.tex_files = []
|
||||
if not self.map_folder_path or not self.map_folder_path.exists():
|
||||
return
|
||||
print(f'\nScanning map folder: {self.map_folder_path}')
|
||||
file_count = 0
|
||||
for file_path in self.map_folder_path.iterdir():
|
||||
if file_path.is_file():
|
||||
file_name = file_path.name.lower()
|
||||
file_count += 1
|
||||
if '.map.dat' in file_name:
|
||||
self.dat_files.append(file_path)
|
||||
elif '.map.tex' in file_name:
|
||||
self.tex_files.append(file_path)
|
||||
self.dat_files.sort(key=lambda x: x.name)
|
||||
self.tex_files.sort(key=lambda x: x.name)
|
||||
print(f'Scan complete: {len(self.dat_files)} DAT files, {len(self.tex_files)} TEX files')
|
||||
|
||||
def get_map_files(self):
|
||||
return (self.dat_files, self.tex_files)
|
||||
|
||||
def get_map_folder(self):
|
||||
return self.map_folder_path
|
||||
313
gui/tilesetrender.py
Normal file
313
gui/tilesetrender.py
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
from PIL import Image, ImageTk, ImageDraw
|
||||
from typing import List, Tuple, Optional, Dict
|
||||
import customtkinter as ctk
|
||||
|
||||
def u16(b: bytes, o: int) -> int:
|
||||
return b[o] | b[o + 1] << 8 if o + 1 < len(b) else 0
|
||||
|
||||
def u32(b: bytes, o: int) -> int:
|
||||
return b[o] | b[o + 1] << 8 | b[o + 2] << 16 | b[o + 3] << 24 if o + 3 < len(b) else 0
|
||||
|
||||
def is_valid_palette_magic(magic: bytes) -> bool:
|
||||
if len(magic) < 4:
|
||||
return False
|
||||
valid_magics = [b'RLCN', b'NCLR', b'RTFN', b'NCLR'[::-1], b'RLCN'[::-1]]
|
||||
return magic in valid_magics or magic[:3] in [m[:3] for m in valid_magics]
|
||||
|
||||
def is_valid_graphics_magic(magic: bytes) -> bool:
|
||||
if len(magic) < 4:
|
||||
return False
|
||||
valid_magics = [b'RGCN', b'NCGR', b'NCBR', b'NCER', b'NCGR'[::-1], b'RGCN'[::-1]]
|
||||
return magic in valid_magics or magic[:3] in [m[:3] for m in valid_magics]
|
||||
|
||||
def parse_palette(rlcn_data: bytes) -> List[Tuple[int, int, int, int]]:
|
||||
if not rlcn_data or len(rlcn_data) < 24:
|
||||
return [(i, i, i, 255 if i > 0 else 0) for i in range(256)]
|
||||
magic = rlcn_data[0:4]
|
||||
if not is_valid_palette_magic(magic):
|
||||
print(f'Warning: Unusual palette magic {magic.hex()}, attempting to parse anyway')
|
||||
ttlp_off = None
|
||||
for offset in [20, 16, 24, 28, 32]:
|
||||
if offset + 4 <= len(rlcn_data):
|
||||
section_magic = rlcn_data[offset:offset + 4]
|
||||
if section_magic in [b'TTLP', b'PLTT', b'PLTL', b'TLTP']:
|
||||
ttlp_off = offset
|
||||
break
|
||||
if ttlp_off is None:
|
||||
for offset in range(0, min(len(rlcn_data) - 4, 128)):
|
||||
section_magic = rlcn_data[offset:offset + 4]
|
||||
if section_magic in [b'TTLP', b'PLTT', b'PLTL', b'TLTP']:
|
||||
ttlp_off = offset
|
||||
break
|
||||
if ttlp_off is None:
|
||||
print('Warning: No palette section found, using default grayscale')
|
||||
return [(i, i, i, 255 if i > 0 else 0) for i in range(256)]
|
||||
pal_data_off = ttlp_off + 24
|
||||
if pal_data_off >= len(rlcn_data):
|
||||
for try_offset in [16, 20, 24, 28, 32]:
|
||||
test_off = ttlp_off + try_offset
|
||||
if test_off < len(rlcn_data) - 32:
|
||||
pal_data_off = test_off
|
||||
break
|
||||
if pal_data_off >= len(rlcn_data):
|
||||
print('Warning: Palette data offset out of range, using default')
|
||||
return [(i, i, i, 255 if i > 0 else 0) for i in range(256)]
|
||||
palette = []
|
||||
pal_offset = pal_data_off
|
||||
for i in range(256):
|
||||
if pal_offset + 2 > len(rlcn_data):
|
||||
break
|
||||
bgr555 = u16(rlcn_data, pal_offset)
|
||||
pal_offset += 2
|
||||
r = (bgr555 & 31) << 3
|
||||
g = (bgr555 >> 5 & 31) << 3
|
||||
b = (bgr555 >> 10 & 31) << 3
|
||||
r = r | r >> 5
|
||||
g = g | g >> 5
|
||||
b = b | b >> 5
|
||||
a = 0 if i == 0 else 255
|
||||
palette.append((r, g, b, a))
|
||||
while len(palette) < 256:
|
||||
palette.append((255, 0, 255, 255))
|
||||
return palette
|
||||
|
||||
def parse_graphics(rgcn_data: bytes) -> Tuple[bytes, int, int, int]:
|
||||
if not rgcn_data or len(rgcn_data) < 48:
|
||||
print('Warning: Graphics data too small')
|
||||
return (b'', 256, 256, 4)
|
||||
magic = rgcn_data[0:4]
|
||||
if not is_valid_graphics_magic(magic):
|
||||
print(f'Warning: Unusual graphics magic {magic.hex()}, attempting to parse anyway')
|
||||
rahc_off = None
|
||||
for offset in [20, 16, 24, 28, 32]:
|
||||
if offset + 4 <= len(rgcn_data):
|
||||
section_magic = rgcn_data[offset:offset + 4]
|
||||
if section_magic in [b'RAHC', b'CHAR', b'CRAH', b'RHAC']:
|
||||
rahc_off = offset
|
||||
break
|
||||
if rahc_off is None:
|
||||
for offset in range(16, min(len(rgcn_data) - 4, 128), 4):
|
||||
section_magic = rgcn_data[offset:offset + 4]
|
||||
if section_magic in [b'RAHC', b'CHAR', b'CRAH', b'RHAC']:
|
||||
rahc_off = offset
|
||||
break
|
||||
if rahc_off is None:
|
||||
print('Warning: No graphics section found')
|
||||
return (b'', 256, 256, 4)
|
||||
if rahc_off + 32 > len(rgcn_data):
|
||||
print('Warning: Graphics section header incomplete')
|
||||
width, height, bpp = (256, 256, 4)
|
||||
data_off = rahc_off + 16
|
||||
if data_off < len(rgcn_data):
|
||||
gfx_data = rgcn_data[data_off:]
|
||||
return (gfx_data, width, height, bpp)
|
||||
return (b'', width, height, bpp)
|
||||
height_value = u16(rgcn_data, rahc_off + 8)
|
||||
width_value = u16(rgcn_data, rahc_off + 10)
|
||||
bit_depth_flag = u32(rgcn_data, rahc_off + 12)
|
||||
tile_data_size = u32(rgcn_data, rahc_off + 24)
|
||||
bpp = 4 if bit_depth_flag == 3 else 8
|
||||
bytes_per_pixel = 0.5 if bpp == 4 else 1.0
|
||||
expected_size = int(width_value * height_value * bytes_per_pixel)
|
||||
if abs(expected_size - tile_data_size) < 16:
|
||||
width = width_value
|
||||
height = height_value
|
||||
elif abs(expected_size * 64 - tile_data_size) < 16:
|
||||
width = width_value * 8
|
||||
height = height_value * 8
|
||||
else:
|
||||
if tile_data_size > 0 and bytes_per_pixel > 0:
|
||||
total_pixels = int(tile_data_size / bytes_per_pixel)
|
||||
else:
|
||||
total_pixels = 65536
|
||||
common_sizes = [(256, 256), (256, 128), (128, 256), (128, 128), (256, 64), (64, 256), (512, 256), (256, 512), (512, 512), (64, 64), (32, 32), (16, 16)]
|
||||
width, height = (256, 256)
|
||||
for w, h in common_sizes:
|
||||
if w * h == total_pixels:
|
||||
width, height = (w, h)
|
||||
break
|
||||
else:
|
||||
width = int(total_pixels ** 0.5)
|
||||
height = total_pixels // width if width > 0 else 8
|
||||
width = max(8, (width + 7) // 8 * 8)
|
||||
height = max(8, (height + 7) // 8 * 8)
|
||||
width = max(8, min(512, width))
|
||||
height = max(8, min(512, height))
|
||||
data_off = rahc_off + 32
|
||||
if data_off >= len(rgcn_data):
|
||||
for try_offset in [16, 20, 24, 28, 32, 36, 40]:
|
||||
test_off = rahc_off + try_offset
|
||||
if test_off < len(rgcn_data):
|
||||
data_off = test_off
|
||||
break
|
||||
if data_off >= len(rgcn_data):
|
||||
print('Warning: Graphics data offset out of range')
|
||||
return (b'', width, height, bpp)
|
||||
if tile_data_size > 0 and data_off + tile_data_size <= len(rgcn_data):
|
||||
gfx_data = rgcn_data[data_off:data_off + tile_data_size]
|
||||
else:
|
||||
gfx_data = rgcn_data[data_off:]
|
||||
return (gfx_data, width, height, bpp)
|
||||
|
||||
def create_error_tileset(width: int, height: int, error_msg: str) -> Image.Image:
|
||||
img = Image.new('RGBA', (width, height), (60, 60, 80, 255))
|
||||
draw = ImageDraw.Draw(img)
|
||||
draw.rectangle([0, 0, width - 1, height - 1], outline=(255, 0, 0, 255), width=2)
|
||||
for i in range(0, max(width, height), 16):
|
||||
draw.line([(i, 0), (i - height, height)], fill=(255, 0, 0, 128), width=1)
|
||||
if width >= 100 and height >= 50:
|
||||
try:
|
||||
text_lines = ['ERROR', error_msg[:20]]
|
||||
y_offset = height // 2 - 20
|
||||
for line in text_lines:
|
||||
bbox = draw.textbbox((0, 0), line)
|
||||
text_width = bbox[2] - bbox[0]
|
||||
x = (width - text_width) // 2
|
||||
draw.text((x, y_offset), line, fill=(255, 255, 0, 255))
|
||||
y_offset += 20
|
||||
except:
|
||||
pass
|
||||
return img
|
||||
|
||||
def render_tileset(rgcn_data: bytes, rlcn_data: bytes) -> Image.Image:
|
||||
width, height = (256, 256)
|
||||
if not rlcn_data or len(rlcn_data) < 32:
|
||||
print('Warning: No valid palette data, using default grayscale')
|
||||
palette = [(i, i, i, 255 if i > 0 else 0) for i in range(256)]
|
||||
else:
|
||||
palette = parse_palette(rlcn_data)
|
||||
while len(palette) < 256:
|
||||
palette.append((255, 0, 255, 255))
|
||||
if not rgcn_data or len(rgcn_data) < 32:
|
||||
print('Warning: No valid graphics data')
|
||||
return create_error_tileset(width, height, 'No RGCN data')
|
||||
gfx_data, width, height, bpp = parse_graphics(rgcn_data)
|
||||
if not gfx_data or len(gfx_data) == 0:
|
||||
print('Warning: No graphics data found in RGCN')
|
||||
return create_error_tileset(width, height, 'Empty graphics')
|
||||
img = Image.new('RGBA', (width, height), (0, 0, 0, 0))
|
||||
pixels = img.load()
|
||||
try:
|
||||
indices = []
|
||||
if bpp == 4:
|
||||
for byte in gfx_data:
|
||||
indices.append(byte & 15)
|
||||
indices.append(byte >> 4 & 15)
|
||||
else:
|
||||
indices.extend(gfx_data)
|
||||
pixels_rendered = 0
|
||||
for i, idx in enumerate(indices):
|
||||
if i >= width * height:
|
||||
break
|
||||
y, x = divmod(i, width)
|
||||
if x < width and y < height:
|
||||
color = palette[idx % len(palette)]
|
||||
pixels[x, y] = color
|
||||
pixels_rendered += 1
|
||||
if pixels_rendered == 0:
|
||||
print('Warning: No pixels rendered')
|
||||
return create_error_tileset(width, height, 'Render failed')
|
||||
except Exception as e:
|
||||
print(f'Error rendering tileset: {e}')
|
||||
return create_error_tileset(width, height, str(e)[:20])
|
||||
return img
|
||||
|
||||
class TilesetRenderer:
|
||||
|
||||
def __init__(self):
|
||||
self.tilesets = []
|
||||
self.selected_tileset_index = None
|
||||
self.rendered_image = None
|
||||
self.tk_image = None
|
||||
self.on_tileset_selected = None
|
||||
self.on_tileset_rendered = None
|
||||
|
||||
def load_tilesets(self, tilesets: List[Dict]):
|
||||
self.tilesets = tilesets
|
||||
self.selected_tileset_index = None
|
||||
self.rendered_image = None
|
||||
self.tk_image = None
|
||||
print(f'\n=== TilesetRenderer: Loaded {len(tilesets)} tilesets ===')
|
||||
for i, ts in enumerate(tilesets):
|
||||
has_rgcn = 'Yes' if ts.get('RGCN') or ts.get('NCGR') else 'No'
|
||||
has_rlcn = 'Yes' if ts.get('RLCN') or ts.get('NCLR') else 'No'
|
||||
print(f' Tileset {i}: RGCN={has_rgcn}, RLCN={has_rlcn}')
|
||||
|
||||
def get_tilesets(self) -> List[Dict]:
|
||||
return self.tilesets
|
||||
|
||||
def get_tileset_count(self) -> int:
|
||||
return len(self.tilesets)
|
||||
|
||||
def select_tileset(self, index: int) -> bool:
|
||||
if index < 0 or index >= len(self.tilesets):
|
||||
print(f'Warning: Invalid tileset index {index}')
|
||||
return False
|
||||
self.selected_tileset_index = index
|
||||
tileset = self.tilesets[index]
|
||||
print(f'\n=== Tileset {index} selected ===')
|
||||
rgcn_data = tileset.get('RGCN') or tileset.get('NCGR')
|
||||
rlcn_data = tileset.get('RLCN') or tileset.get('NCLR')
|
||||
if not rgcn_data and (not rlcn_data):
|
||||
print('Error: No graphics or palette data available')
|
||||
if 'error' in tileset:
|
||||
print(f" Error: {tileset['error']}")
|
||||
return False
|
||||
if self.on_tileset_selected:
|
||||
self.on_tileset_selected(index, tileset)
|
||||
self.render_current_tileset()
|
||||
return True
|
||||
|
||||
def render_current_tileset(self) -> Optional[Image.Image]:
|
||||
if self.selected_tileset_index is None:
|
||||
print('Warning: No tileset selected')
|
||||
return None
|
||||
tileset = self.tilesets[self.selected_tileset_index]
|
||||
rgcn_data = tileset.get('RGCN') or tileset.get('NCGR') or b''
|
||||
rlcn_data = tileset.get('RLCN') or tileset.get('NCLR') or b''
|
||||
print(f'Rendering tileset {self.selected_tileset_index}...')
|
||||
print(f' RGCN size: {len(rgcn_data)} bytes')
|
||||
print(f' RLCN size: {len(rlcn_data)} bytes')
|
||||
self.rendered_image = render_tileset(rgcn_data, rlcn_data)
|
||||
print(f' Rendered: {self.rendered_image.size[0]}x{self.rendered_image.size[1]}')
|
||||
if self.on_tileset_rendered:
|
||||
self.on_tileset_rendered(self.rendered_image)
|
||||
return self.rendered_image
|
||||
|
||||
def get_rendered_image(self) -> Optional[Image.Image]:
|
||||
return self.rendered_image
|
||||
|
||||
def get_tk_image(self, max_width: int=800, max_height: int=600) -> Optional[ImageTk.PhotoImage]:
|
||||
if self.rendered_image is None:
|
||||
return None
|
||||
img_width, img_height = self.rendered_image.size
|
||||
scale_x = max_width / img_width if img_width > max_width else 1.0
|
||||
scale_y = max_height / img_height if img_height > max_height else 1.0
|
||||
scale = min(scale_x, scale_y, 1.0)
|
||||
if scale < 1.0:
|
||||
new_width = int(img_width * scale)
|
||||
new_height = int(img_height * scale)
|
||||
scaled_img = self.rendered_image.resize((new_width, new_height), Image.NEAREST)
|
||||
else:
|
||||
scaled_img = self.rendered_image
|
||||
self.tk_image = ImageTk.PhotoImage(scaled_img)
|
||||
return self.tk_image
|
||||
|
||||
def export_png(self, output_path: str) -> bool:
|
||||
if self.rendered_image is None:
|
||||
print('Error: No tileset rendered')
|
||||
return False
|
||||
try:
|
||||
self.rendered_image.save(output_path, 'PNG')
|
||||
print(f'Tileset exported to: {output_path}')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'Error exporting PNG: {e}')
|
||||
return False
|
||||
|
||||
def clear(self):
|
||||
self.tilesets = []
|
||||
self.selected_tileset_index = None
|
||||
self.rendered_image = None
|
||||
self.tk_image = None
|
||||
print('TilesetRenderer: Cleared')
|
||||
6
load/__init__.py
Normal file
6
load/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from load.maploader import MapLoader, MapData
|
||||
from load.datparser import parse_dat_map
|
||||
from load.texparser import parse_tex_map
|
||||
from load.lz10util import decompress_lz10, compress_lz10
|
||||
from load.narcutil import parse_narc, build_narc
|
||||
__all__ = ['MapLoader', 'MapData', 'parse_dat_map', 'parse_tex_map', 'decompress_lz10', 'compress_lz10', 'parse_narc', 'build_narc']
|
||||
44
load/datparser.py
Normal file
44
load/datparser.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
from typing import Dict, List
|
||||
from load.lz10util import decompress_lz10
|
||||
from load.narcutil import parse_narc
|
||||
LAYER_NAMES = {1: '0X01 LAYER', 2: '0X02 LAYER', 3: '0X03 LAYER', 4: '0X04 LAYER', 5: '0X05 LAYER', 6: '0X06 LAYER', 7: '0X07 LAYER', 8: '0X08 LAYER', 9: '0X09 LAYER', 10: '0X0A LAYER', 13: '0X0D LAYER', 14: '0X0E LAYER'}
|
||||
|
||||
def parse_dat_map(dat_path: str) -> Dict:
|
||||
with open(dat_path, 'rb') as f:
|
||||
raw = f.read()
|
||||
dec = decompress_lz10(raw)
|
||||
outer_files = parse_narc(dec)
|
||||
mpif = None
|
||||
txif = None
|
||||
lyr = None
|
||||
cta = None
|
||||
for bf in outer_files:
|
||||
sig = bf[:4]
|
||||
if sig == b'MPIF':
|
||||
mpif = bf
|
||||
elif sig == b'TXIF':
|
||||
txif = bf
|
||||
elif sig == b'LYR\x00':
|
||||
lyr = bf
|
||||
elif sig == b'CTA\x00':
|
||||
cta = bf
|
||||
layers = []
|
||||
if lyr:
|
||||
if lyr[:4] == b'LYR\x00':
|
||||
lyr = lyr[4:]
|
||||
lyr_stage1 = parse_narc(lyr)
|
||||
final_candidates = []
|
||||
for blob in lyr_stage1:
|
||||
if blob[:4] == b'NARC':
|
||||
inner = parse_narc(blob)
|
||||
final_candidates.extend(inner)
|
||||
else:
|
||||
final_candidates.append(blob)
|
||||
for lb in final_candidates:
|
||||
if len(lb) >= 4:
|
||||
ltype = int.from_bytes(lb[0:4], 'little')
|
||||
else:
|
||||
ltype = -1
|
||||
name = LAYER_NAMES.get(ltype, f'UNKNOWN_{ltype:02X}')
|
||||
layers.append({'type': ltype, 'name': name, 'data': lb})
|
||||
return {'mpif': mpif, 'txif': txif, 'layers': layers, 'cta': cta}
|
||||
267
load/importtileset.py
Normal file
267
load/importtileset.py
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
from pathlib import Path
|
||||
from typing import Tuple, Optional, Dict, List
|
||||
import struct
|
||||
from load.lz10util import compress_lz10, decompress_lz10
|
||||
from load.narcutil import parse_narc, build_narc
|
||||
from load.datparser import parse_dat_map
|
||||
from load.texparser import parse_tex_map
|
||||
|
||||
def detect_file_type(file_data: bytes) -> Tuple[str, str]:
|
||||
if not file_data or len(file_data) < 4:
|
||||
return ('UNKNOWN', 'Too small')
|
||||
magic = file_data[:4]
|
||||
graphics_magics = [(b'RGCN', 'RGCN'), (b'NCGR', 'NCGR'), (b'NCBR', 'NCBR'), (b'NCER', 'NCER')]
|
||||
palette_magics = [(b'RLCN', 'RLCN'), (b'NCLR', 'NCLR'), (b'RTFN', 'RTFN')]
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic == magic_bytes:
|
||||
return ('RGCN', name)
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic == magic_bytes:
|
||||
return ('RLCN', name)
|
||||
magic_reversed = magic[::-1]
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic_reversed == magic_bytes:
|
||||
return ('RGCN', f'{name}_REVERSED')
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic_reversed == magic_bytes:
|
||||
return ('RLCN', f'{name}_REVERSED')
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic[:3] == magic_bytes[:3]:
|
||||
return ('RGCN', f'{name}_PARTIAL')
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic[:3] == magic_bytes[:3]:
|
||||
return ('RLCN', f'{name}_PARTIAL')
|
||||
for offset in range(0, min(len(file_data) - 4, 128), 4):
|
||||
section_magic = file_data[offset:offset + 4]
|
||||
if section_magic in [b'RAHC', b'CHAR']:
|
||||
return ('RGCN', 'RGCN_BY_SECTION')
|
||||
if section_magic in [b'TTLP', b'PLTT']:
|
||||
return ('RLCN', 'RLCN_BY_SECTION')
|
||||
return ('UNKNOWN', f'Magic: {magic.hex().upper()}')
|
||||
|
||||
def validate_tileset_files(rgcn_data: bytes, rlcn_data: bytes) -> Tuple[bool, str]:
|
||||
errors = []
|
||||
rgcn_type, rgcn_format = detect_file_type(rgcn_data)
|
||||
if rgcn_type != 'RGCN':
|
||||
errors.append(f'RGCN file invalid: detected as {rgcn_type} ({rgcn_format})')
|
||||
if len(rgcn_data) < 32:
|
||||
errors.append(f'RGCN file too small: {len(rgcn_data)} bytes')
|
||||
rlcn_type, rlcn_format = detect_file_type(rlcn_data)
|
||||
if rlcn_type != 'RLCN':
|
||||
errors.append(f'RLCN file invalid: detected as {rlcn_type} ({rlcn_format})')
|
||||
if len(rlcn_data) < 32:
|
||||
errors.append(f'RLCN file too small: {len(rlcn_data)} bytes')
|
||||
if errors:
|
||||
return (False, '; '.join(errors))
|
||||
return (True, 'Valid')
|
||||
|
||||
def create_txif_rule(texture_id: int, rule_type: int=1, unknown1: int=0, unknown2: int=0) -> bytes:
|
||||
rule = bytearray(8)
|
||||
struct.pack_into('<H', rule, 0, rule_type)
|
||||
struct.pack_into('<H', rule, 2, texture_id)
|
||||
struct.pack_into('<H', rule, 4, unknown1)
|
||||
struct.pack_into('<H', rule, 6, unknown2)
|
||||
return bytes(rule)
|
||||
|
||||
def add_txif_rule(existing_txif: bytes, new_texture_id: int, rule_type: int=1) -> bytes:
|
||||
if not existing_txif or len(existing_txif) < 8:
|
||||
raise ValueError('Invalid TXIF section')
|
||||
magic = existing_txif[:4]
|
||||
if magic != b'TXIF':
|
||||
raise ValueError(f'Invalid TXIF magic: {magic.hex()}')
|
||||
rule_count = struct.unpack('<H', existing_txif[4:6])[0]
|
||||
override_flag = struct.unpack('<H', existing_txif[6:8])[0]
|
||||
print(f' Current TXIF: {rule_count} rules')
|
||||
print(f' Adding rule for texture ID: {new_texture_id}')
|
||||
new_rule = create_txif_rule(new_texture_id, rule_type)
|
||||
new_txif = bytearray()
|
||||
new_txif.extend(b'TXIF')
|
||||
new_txif.extend(struct.pack('<H', rule_count + 1))
|
||||
new_txif.extend(struct.pack('<H', override_flag))
|
||||
new_txif.extend(existing_txif[8:])
|
||||
new_txif.extend(new_rule)
|
||||
print(f' New TXIF: {rule_count + 1} rules')
|
||||
return bytes(new_txif)
|
||||
|
||||
def determine_next_texture_id(dat_result: Dict, tex_result: Dict) -> int:
|
||||
tileset_count = tex_result.get('tileset_count', 0)
|
||||
print(f'\n=== Texture ID Determination ===')
|
||||
print(f' TEX currently has {tileset_count} tilesets')
|
||||
print(f' New tileset will be at position: {tileset_count}')
|
||||
print(f' Creating TXIF rule for texture ID: {tileset_count}')
|
||||
return tileset_count
|
||||
|
||||
def add_tileset_to_tex(tex_result: Dict, rgcn_data: bytes, rlcn_data: bytes) -> Dict:
|
||||
new_index = len(tex_result['tilesets'])
|
||||
new_tileset = {'index': new_index, 'RGCN': rgcn_data, 'RLCN': rlcn_data, 'NCGR': rgcn_data, 'NCLR': rlcn_data}
|
||||
tex_result['tilesets'].append(new_tileset)
|
||||
tex_result['tileset_count'] = len(tex_result['tilesets'])
|
||||
print(f'\n=== TEX Modification ===')
|
||||
print(f' Added tileset at index: {new_index}')
|
||||
print(f' RGCN size: {len(rgcn_data):,} bytes')
|
||||
print(f' RLCN size: {len(rlcn_data):,} bytes')
|
||||
print(f" Total tilesets: {tex_result['tileset_count']}")
|
||||
return tex_result
|
||||
|
||||
def build_tex_file(tex_result: Dict) -> bytes:
|
||||
print(f'\n=== Building TEX File ===')
|
||||
tilesets = tex_result.get('tilesets', [])
|
||||
print(f' Building {len(tilesets)} tilesets')
|
||||
tileset_narcs = []
|
||||
for ts in tilesets:
|
||||
rgcn = ts.get('RGCN')
|
||||
rlcn = ts.get('RLCN')
|
||||
if not rgcn or not rlcn:
|
||||
print(f" WARNING: Tileset {ts['index']} missing data, skipping")
|
||||
continue
|
||||
inner_narc = build_narc([rgcn, rlcn])
|
||||
tileset_narcs.append(inner_narc)
|
||||
print(f" Tileset {ts['index']}: NARC size = {len(inner_narc):,} bytes")
|
||||
outer_narc = build_narc(tileset_narcs)
|
||||
print(f' Outer NARC size: {len(outer_narc):,} bytes')
|
||||
tex_file = b'TEX\x00' + outer_narc
|
||||
print(f' Final TEX size: {len(tex_file):,} bytes (uncompressed)')
|
||||
return tex_file
|
||||
|
||||
def modify_dat_txif(dat_result: Dict, new_texture_id: int) -> Dict:
|
||||
existing_txif = dat_result.get('txif')
|
||||
if not existing_txif:
|
||||
raise ValueError('DAT file has no TXIF section')
|
||||
print(f'\n=== DAT TXIF Modification ===')
|
||||
new_txif = add_txif_rule(existing_txif, new_texture_id)
|
||||
dat_result['txif'] = new_txif
|
||||
return dat_result
|
||||
|
||||
def build_dat_file(dat_result: Dict) -> bytes:
|
||||
print(f'\n=== Building DAT File ===')
|
||||
sections = []
|
||||
if dat_result.get('mpif'):
|
||||
sections.append(dat_result['mpif'])
|
||||
print(f" MPIF: {len(dat_result['mpif']):,} bytes")
|
||||
if dat_result.get('txif'):
|
||||
sections.append(dat_result['txif'])
|
||||
print(f" TXIF: {len(dat_result['txif']):,} bytes")
|
||||
layers = dat_result.get('layers', [])
|
||||
if layers:
|
||||
layer_data_list = [layer['data'] for layer in layers]
|
||||
inner_layer_narc = build_narc(layer_data_list)
|
||||
lyr_section = b'LYR\x00' + inner_layer_narc
|
||||
sections.append(lyr_section)
|
||||
print(f' LYR: {len(lyr_section):,} bytes ({len(layers)} layers)')
|
||||
if dat_result.get('cta'):
|
||||
sections.append(dat_result['cta'])
|
||||
print(f" CTA: {len(dat_result['cta']):,} bytes")
|
||||
main_narc = build_narc(sections)
|
||||
print(f' Main NARC size: {len(main_narc):,} bytes (uncompressed)')
|
||||
return main_narc
|
||||
|
||||
def import_tileset(dat_path: str, tex_path: str, rgcn_path: str, rlcn_path: str, output_dat_path: str=None, output_tex_path: str=None) -> Tuple[bool, str]:
|
||||
try:
|
||||
print('\n' + '=' * 60)
|
||||
print('=== TILESET IMPORT OPERATION ===')
|
||||
print('=' * 60)
|
||||
if output_dat_path is None:
|
||||
output_dat_path = dat_path
|
||||
if output_tex_path is None:
|
||||
output_tex_path = tex_path
|
||||
print('\n=== Step 1: Loading Files ===')
|
||||
print(f' RGCN: {rgcn_path}')
|
||||
print(f' RLCN: {rlcn_path}')
|
||||
with open(rgcn_path, 'rb') as f:
|
||||
rgcn_data = f.read()
|
||||
with open(rlcn_path, 'rb') as f:
|
||||
rlcn_data = f.read()
|
||||
print(f' RGCN size: {len(rgcn_data):,} bytes')
|
||||
print(f' RLCN size: {len(rlcn_data):,} bytes')
|
||||
rgcn_type, rgcn_format = detect_file_type(rgcn_data)
|
||||
rlcn_type, rlcn_format = detect_file_type(rlcn_data)
|
||||
print(f' RGCN detected as: {rgcn_type} ({rgcn_format})')
|
||||
print(f' RLCN detected as: {rlcn_type} ({rlcn_format})')
|
||||
is_valid, error_msg = validate_tileset_files(rgcn_data, rlcn_data)
|
||||
if not is_valid:
|
||||
return (False, f'Validation failed: {error_msg}')
|
||||
print(' Validation: PASSED')
|
||||
print('\n=== Step 2: Parsing Map Files ===')
|
||||
print(f' DAT: {dat_path}')
|
||||
print(f' TEX: {tex_path}')
|
||||
dat_result = parse_dat_map(dat_path)
|
||||
tex_result = parse_tex_map(tex_path)
|
||||
if not dat_result or not tex_result:
|
||||
return (False, 'Failed to parse map files')
|
||||
print(f" DAT parsed: {len(dat_result.get('layers', []))} layers")
|
||||
print(f" TEX parsed: {tex_result.get('tileset_count', 0)} tilesets")
|
||||
print('\n=== Step 3: Determining Texture ID ===')
|
||||
new_texture_id = determine_next_texture_id(dat_result, tex_result)
|
||||
print('\n=== Step 4: Adding Tileset to TEX ===')
|
||||
tex_result = add_tileset_to_tex(tex_result, rgcn_data, rlcn_data)
|
||||
print('\n=== Step 5: Modifying DAT TXIF ===')
|
||||
dat_result = modify_dat_txif(dat_result, new_texture_id)
|
||||
print('\n=== Step 6: Building Files ===')
|
||||
new_tex = build_tex_file(tex_result)
|
||||
new_dat = build_dat_file(dat_result)
|
||||
print('\n=== Step 7: Compressing and Saving ===')
|
||||
print(' Compressing TEX...')
|
||||
tex_compressed = compress_lz10(new_tex)
|
||||
print(f' TEX compressed: {len(tex_compressed):,} bytes')
|
||||
print(' Compressing DAT...')
|
||||
dat_compressed = compress_lz10(new_dat)
|
||||
print(f' DAT compressed: {len(dat_compressed):,} bytes')
|
||||
print(f'\n Saving TEX to: {output_tex_path}')
|
||||
with open(output_tex_path, 'wb') as f:
|
||||
f.write(tex_compressed)
|
||||
print(f' Saving DAT to: {output_dat_path}')
|
||||
with open(output_dat_path, 'wb') as f:
|
||||
f.write(dat_compressed)
|
||||
print('\n' + '=' * 60)
|
||||
print('=== IMPORT COMPLETE ===')
|
||||
print('=' * 60)
|
||||
print(f'Successfully imported tileset!')
|
||||
print(f' New tileset index: {new_texture_id}')
|
||||
print(f" Total tilesets: {tex_result['tileset_count']}")
|
||||
print(f' Files saved:')
|
||||
print(f' - {output_dat_path}')
|
||||
print(f' - {output_tex_path}')
|
||||
print('=' * 60)
|
||||
return (True, f'Tileset imported successfully as texture ID {new_texture_id}')
|
||||
except Exception as e:
|
||||
error_msg = f'Import failed: {str(e)}'
|
||||
print(f'\nERROR: {error_msg}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return (False, error_msg)
|
||||
|
||||
def import_tileset_auto_detect(dat_path: str, tex_path: str, file1_path: str, file2_path: str, output_dat_path: str=None, output_tex_path: str=None) -> Tuple[bool, str]:
|
||||
print('\n=== Auto-Detecting File Types ===')
|
||||
with open(file1_path, 'rb') as f:
|
||||
file1_data = f.read()
|
||||
with open(file2_path, 'rb') as f:
|
||||
file2_data = f.read()
|
||||
file1_type, file1_format = detect_file_type(file1_data)
|
||||
file2_type, file2_format = detect_file_type(file2_data)
|
||||
print(f' File 1: {file1_type} ({file1_format})')
|
||||
print(f' File 2: {file2_type} ({file2_format})')
|
||||
if file1_type == 'RGCN' and file2_type == 'RLCN':
|
||||
rgcn_path = file1_path
|
||||
rlcn_path = file2_path
|
||||
elif file1_type == 'RLCN' and file2_type == 'RGCN':
|
||||
rgcn_path = file2_path
|
||||
rlcn_path = file1_path
|
||||
else:
|
||||
return (False, f'Could not identify files: {file1_type} and {file2_type}')
|
||||
print(f' Identified: RGCN = {Path(rgcn_path).name}, RLCN = {Path(rlcn_path).name}')
|
||||
return import_tileset(dat_path, tex_path, rgcn_path, rlcn_path, output_dat_path, output_tex_path)
|
||||
|
||||
def get_file_info(file_path: str) -> Dict:
|
||||
with open(file_path, 'rb') as f:
|
||||
data = f.read()
|
||||
file_type, format_name = detect_file_type(data)
|
||||
return {'path': file_path, 'name': Path(file_path).name, 'size': len(data), 'type': file_type, 'format': format_name, 'magic': data[:4].hex().upper() if len(data) >= 4 else 'N/A'}
|
||||
|
||||
def print_file_info(file_path: str):
|
||||
info = get_file_info(file_path)
|
||||
print(f"\nFile: {info['name']}")
|
||||
print(f" Path: {info['path']}")
|
||||
print(f" Size: {info['size']:,} bytes")
|
||||
print(f" Type: {info['type']}")
|
||||
print(f" Format: {info['format']}")
|
||||
print(f" Magic: {info['magic']}")
|
||||
117
load/lz10util.py
Normal file
117
load/lz10util.py
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
def decompress_lz10(data: bytes) -> bytes:
|
||||
if not data or data[0] != 16:
|
||||
return data
|
||||
dst_size = data[1] | data[2] << 8 | data[3] << 16
|
||||
src_i = 4
|
||||
out = bytearray()
|
||||
while len(out) < dst_size and src_i < len(data):
|
||||
flags = data[src_i]
|
||||
src_i += 1
|
||||
for bit in range(8):
|
||||
if len(out) >= dst_size or src_i >= len(data):
|
||||
break
|
||||
if flags & 128 >> bit == 0:
|
||||
out.append(data[src_i])
|
||||
src_i += 1
|
||||
else:
|
||||
if src_i + 1 >= len(data):
|
||||
break
|
||||
b1 = data[src_i]
|
||||
b2 = data[src_i + 1]
|
||||
src_i += 2
|
||||
disp = (b1 & 15) << 8 | b2
|
||||
length = (b1 >> 4) + 3
|
||||
copy_pos = len(out) - (disp + 1)
|
||||
for _ in range(length):
|
||||
if copy_pos < 0 or copy_pos >= len(out):
|
||||
break
|
||||
out.append(out[copy_pos])
|
||||
copy_pos += 1
|
||||
return bytes(out[:dst_size])
|
||||
|
||||
def compress_lz10(data: bytes) -> bytes:
|
||||
n = len(data)
|
||||
output = bytearray()
|
||||
output.append(16)
|
||||
output.append(n & 255)
|
||||
output.append(n >> 8 & 255)
|
||||
output.append(n >> 16 & 255)
|
||||
if n == 0:
|
||||
return bytes(output)
|
||||
HASH_SIZE = 1 << 15
|
||||
head = [-1] * HASH_SIZE
|
||||
lru = [-1] * HASH_SIZE
|
||||
|
||||
def hash3(pos: int) -> int:
|
||||
if pos + 2 >= n:
|
||||
return 0
|
||||
return (data[pos] << 16 | data[pos + 1] << 8 | data[pos + 2]) & HASH_SIZE - 1
|
||||
|
||||
def find_best_match(pos: int) -> tuple:
|
||||
if pos + 2 >= n:
|
||||
return (0, 0)
|
||||
best_len = 0
|
||||
best_dist = 0
|
||||
h = hash3(pos)
|
||||
j = head[h]
|
||||
checked = 0
|
||||
max_checks = 64
|
||||
while j >= 0 and checked < max_checks:
|
||||
if pos - j > 4096:
|
||||
break
|
||||
if j >= pos:
|
||||
break
|
||||
match_len = 0
|
||||
limit = min(18, n - pos)
|
||||
while match_len < limit and data[j + match_len] == data[pos + match_len]:
|
||||
match_len += 1
|
||||
if match_len >= 3 and match_len > best_len:
|
||||
best_len = match_len
|
||||
best_dist = pos - j - 1
|
||||
if best_len == 18:
|
||||
break
|
||||
j = lru[j & HASH_SIZE - 1]
|
||||
checked += 1
|
||||
return (best_len, best_dist)
|
||||
for i in range(min(4096, n - 2)):
|
||||
h = hash3(i)
|
||||
lru[i & HASH_SIZE - 1] = head[h]
|
||||
head[h] = i
|
||||
pos = 0
|
||||
while pos < n:
|
||||
block_header_pos = len(output)
|
||||
output.append(0)
|
||||
flags = 0
|
||||
for bit in range(8):
|
||||
if pos >= n:
|
||||
break
|
||||
look_ahead = pos + 4096
|
||||
if look_ahead < n - 2:
|
||||
h = hash3(look_ahead)
|
||||
lru[look_ahead & HASH_SIZE - 1] = head[h]
|
||||
head[h] = look_ahead
|
||||
best_match_len, best_match_dist = find_best_match(pos)
|
||||
if best_match_len >= 3:
|
||||
flags |= 1 << 7 - bit
|
||||
length_part = best_match_len - 3 & 15
|
||||
dist_high = best_match_dist >> 8 & 15
|
||||
dist_low = best_match_dist & 255
|
||||
output.append(length_part << 4 | dist_high)
|
||||
output.append(dist_low)
|
||||
for skip in range(1, best_match_len):
|
||||
if pos + skip < n - 2:
|
||||
h = hash3(pos + skip)
|
||||
idx = pos + skip & HASH_SIZE - 1
|
||||
lru[idx] = head[h]
|
||||
head[h] = pos + skip
|
||||
pos += best_match_len
|
||||
else:
|
||||
output.append(data[pos])
|
||||
if pos < n - 2:
|
||||
h = hash3(pos)
|
||||
idx = pos & HASH_SIZE - 1
|
||||
lru[idx] = head[h]
|
||||
head[h] = pos
|
||||
pos += 1
|
||||
output[block_header_pos] = flags
|
||||
return bytes(output)
|
||||
143
load/maploader.py
Normal file
143
load/maploader.py
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
from pathlib import Path
|
||||
from typing import Optional, Dict, List
|
||||
from load.datparser import parse_dat_map
|
||||
from load.texparser import parse_tex_map
|
||||
|
||||
class MapData:
|
||||
|
||||
def __init__(self, map_name: str):
|
||||
self.map_name = map_name
|
||||
self.dat_data = None
|
||||
self.tex_data = None
|
||||
self.loaded = False
|
||||
|
||||
def is_loaded(self):
|
||||
return self.loaded and self.dat_data is not None and (self.tex_data is not None)
|
||||
|
||||
def get_tileset_count(self) -> int:
|
||||
if self.tex_data:
|
||||
return self.tex_data.get('tileset_count', 0)
|
||||
return 0
|
||||
|
||||
def get_layer_count(self) -> int:
|
||||
if self.dat_data:
|
||||
return len(self.dat_data.get('layers', []))
|
||||
return 0
|
||||
|
||||
def has_mpif(self) -> bool:
|
||||
return self.dat_data is not None and self.dat_data.get('mpif') is not None
|
||||
|
||||
def has_txif(self) -> bool:
|
||||
return self.dat_data is not None and self.dat_data.get('txif') is not None
|
||||
|
||||
def has_cta(self) -> bool:
|
||||
return self.dat_data is not None and self.dat_data.get('cta') is not None
|
||||
|
||||
class MapLoader:
|
||||
|
||||
def __init__(self):
|
||||
self.current_map = None
|
||||
self.on_map_loaded = None
|
||||
|
||||
def load_map(self, dat_path: Path, tex_path: Path, map_name: str) -> Optional[MapData]:
|
||||
try:
|
||||
print(f'\n=== Loading Map: {map_name} ===')
|
||||
map_data = MapData(map_name)
|
||||
print(f'Parsing DAT file: {dat_path.name}')
|
||||
dat_result = parse_dat_map(str(dat_path))
|
||||
if dat_result:
|
||||
map_data.dat_data = dat_result
|
||||
print(f" - MPIF: {('Found' if dat_result.get('mpif') else 'Missing')}")
|
||||
print(f" - TXIF: {('Found' if dat_result.get('txif') else 'Missing')}")
|
||||
print(f" - Layers: {len(dat_result.get('layers', []))}")
|
||||
print(f" - CTA: {('Found' if dat_result.get('cta') else 'Missing')}")
|
||||
else:
|
||||
print(' - ERROR: Failed to parse DAT file')
|
||||
return None
|
||||
print(f'Parsing TEX file: {tex_path.name}')
|
||||
tex_result = parse_tex_map(str(tex_path))
|
||||
if tex_result:
|
||||
map_data.tex_data = tex_result
|
||||
tileset_count = tex_result.get('tileset_count', 0)
|
||||
print(f' - Tilesets: {tileset_count}')
|
||||
for ts in tex_result.get('tilesets', []):
|
||||
idx = ts.get('index', -1)
|
||||
has_rgcn = 'Yes' if ts.get('RGCN') else 'No'
|
||||
has_rlcn = 'Yes' if ts.get('RLCN') else 'No'
|
||||
print(f' Tileset {idx}: RGCN={has_rgcn}, RLCN={has_rlcn}')
|
||||
if 'error' in ts:
|
||||
print(f" ERROR: {ts['error']}")
|
||||
elif 'warning' in ts:
|
||||
print(f" WARNING: {ts['warning']}")
|
||||
else:
|
||||
print(' - ERROR: Failed to parse TEX file')
|
||||
return None
|
||||
map_data.loaded = True
|
||||
self.current_map = map_data
|
||||
print(f'=== Map Loaded Successfully ===\n')
|
||||
if self.on_map_loaded:
|
||||
self.on_map_loaded(map_data)
|
||||
return map_data
|
||||
except Exception as e:
|
||||
print(f'ERROR loading map: {e}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
def get_current_map(self) -> Optional[MapData]:
|
||||
return self.current_map
|
||||
|
||||
def get_layers(self) -> List[Dict]:
|
||||
if self.current_map and self.current_map.dat_data:
|
||||
return self.current_map.dat_data.get('layers', [])
|
||||
return []
|
||||
|
||||
def get_tilesets(self) -> List[Dict]:
|
||||
if self.current_map and self.current_map.tex_data:
|
||||
return self.current_map.tex_data.get('tilesets', [])
|
||||
return []
|
||||
|
||||
def get_tileset(self, index: int) -> Optional[Dict]:
|
||||
tilesets = self.get_tilesets()
|
||||
if 0 <= index < len(tilesets):
|
||||
return tilesets[index]
|
||||
return None
|
||||
|
||||
def get_tileset_for_rendering(self, index: int) -> Optional[Dict]:
|
||||
tileset = self.get_tileset(index)
|
||||
if not tileset:
|
||||
return None
|
||||
rgcn = tileset.get('RGCN') or tileset.get('NCGR')
|
||||
rlcn = tileset.get('RLCN') or tileset.get('NCLR')
|
||||
render_data = {'index': index, 'RGCN': rgcn, 'RLCN': rlcn, 'NCGR': rgcn, 'NCLR': rlcn, 'has_graphics': rgcn is not None and len(rgcn) > 0, 'has_palette': rlcn is not None and len(rlcn) > 0}
|
||||
if 'error' in tileset:
|
||||
render_data['error'] = tileset['error']
|
||||
if 'warning' in tileset:
|
||||
render_data['warning'] = tileset['warning']
|
||||
return render_data
|
||||
|
||||
def get_all_tilesets_for_rendering(self) -> List[Dict]:
|
||||
tilesets = self.get_tilesets()
|
||||
render_tilesets = []
|
||||
for i, ts in enumerate(tilesets):
|
||||
render_data = self.get_tileset_for_rendering(i)
|
||||
if render_data:
|
||||
render_tilesets.append(render_data)
|
||||
return render_tilesets
|
||||
|
||||
def get_layer(self, layer_type: int) -> Optional[Dict]:
|
||||
layers = self.get_layers()
|
||||
for layer in layers:
|
||||
if layer.get('type') == layer_type:
|
||||
return layer
|
||||
return None
|
||||
|
||||
def get_layer_data(self, layer_type: int) -> Optional[bytes]:
|
||||
layer = self.get_layer(layer_type)
|
||||
if layer:
|
||||
return layer.get('data')
|
||||
return None
|
||||
|
||||
def clear(self):
|
||||
self.current_map = None
|
||||
print('MapLoader: Cleared current map data')
|
||||
76
load/narcutil.py
Normal file
76
load/narcutil.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
import struct
|
||||
from typing import List
|
||||
|
||||
def u16(b: bytes, o: int) -> int:
|
||||
return b[o] | b[o + 1] << 8
|
||||
|
||||
def u32(b: bytes, o: int) -> int:
|
||||
return b[o] | b[o + 1] << 8 | b[o + 2] << 16 | b[o + 3] << 24
|
||||
|
||||
def parse_narc(blob: bytes) -> List[bytes]:
|
||||
if blob[:4] != b'NARC':
|
||||
raise ValueError('Not a NARC file')
|
||||
off = 16
|
||||
if blob[off:off + 4] != b'BTAF':
|
||||
raise ValueError('NARC missing BTAF')
|
||||
btaf_size = u32(blob, off + 4)
|
||||
count = u32(blob, off + 8)
|
||||
entries_off = off + 12
|
||||
entries = []
|
||||
for i in range(count):
|
||||
s = u32(blob, entries_off + i * 8)
|
||||
e = u32(blob, entries_off + i * 8 + 4)
|
||||
entries.append((s, e))
|
||||
fntb_off = off + btaf_size
|
||||
if blob[fntb_off:fntb_off + 4] != b'BTNF':
|
||||
raise ValueError('NARC missing BTNF')
|
||||
fntb_size = u32(blob, fntb_off + 4)
|
||||
fimg_off = fntb_off + fntb_size
|
||||
if blob[fimg_off:fimg_off + 4] != b'GMIF':
|
||||
raise ValueError('NARC missing GMIF')
|
||||
base = fimg_off + 8
|
||||
files = []
|
||||
for s, e in entries:
|
||||
files.append(blob[base + s:base + e])
|
||||
return files
|
||||
|
||||
def build_narc(files: List[bytes]) -> bytes:
|
||||
|
||||
def align4(n: int) -> int:
|
||||
return n + 3 & ~3
|
||||
|
||||
def pad4(data: bytes) -> bytes:
|
||||
padding = -len(data) & 3
|
||||
return data + b'\x00' * padding
|
||||
gmif_data = bytearray()
|
||||
file_offsets = []
|
||||
for file_data in files:
|
||||
file_offsets.append(len(gmif_data))
|
||||
gmif_data.extend(file_data)
|
||||
padding = -len(gmif_data) & 3
|
||||
if padding:
|
||||
gmif_data.extend(b'\x00' * padding)
|
||||
file_offsets.append(len(gmif_data))
|
||||
gmif_size = 8 + len(gmif_data)
|
||||
gmif_section = b'GMIF' + struct.pack('<I', gmif_size) + bytes(gmif_data)
|
||||
file_count = len(files)
|
||||
btaf_entries = bytearray()
|
||||
for i in range(file_count):
|
||||
start_offset = file_offsets[i]
|
||||
end_offset = file_offsets[i + 1]
|
||||
btaf_entries.extend(struct.pack('<II', start_offset, end_offset))
|
||||
btaf_size = 12 + len(btaf_entries)
|
||||
btaf_size = align4(btaf_size)
|
||||
btaf_padding = btaf_size - (12 + len(btaf_entries))
|
||||
btaf_section = b'BTAF' + struct.pack('<II', btaf_size, file_count) + bytes(btaf_entries) + b'\x00' * btaf_padding
|
||||
btnf_size = 16
|
||||
btnf_section = b'BTNF' + struct.pack('<I', btnf_size) + b'\x00' * 8
|
||||
total_size = 16 + len(btaf_section) + len(btnf_section) + len(gmif_section)
|
||||
header = b'NARC'
|
||||
header += b'\xfe\xff'
|
||||
header += b'\x00\x01'
|
||||
header += struct.pack('<I', total_size)
|
||||
header += struct.pack('<H', 16)
|
||||
header += struct.pack('<H', 3)
|
||||
narc = header + btaf_section + btnf_section + gmif_section
|
||||
return narc
|
||||
462
load/pngtilesettransfer.py
Normal file
462
load/pngtilesettransfer.py
Normal file
|
|
@ -0,0 +1,462 @@
|
|||
from pathlib import Path
|
||||
from typing import Tuple, Optional, List
|
||||
import struct
|
||||
from PIL import Image
|
||||
from load.lz10util import compress_lz10, decompress_lz10
|
||||
from load.narcutil import parse_narc, build_narc
|
||||
from load.datparser import parse_dat_map
|
||||
from load.texparser import parse_tex_map
|
||||
|
||||
def quantize_image_simple(img: Image.Image) -> Tuple[Optional[Image.Image], Optional[List[Tuple[int, int, int]]]]:
|
||||
if img.mode != 'RGBA':
|
||||
img = img.convert('RGBA')
|
||||
quantized_img = img.quantize(colors=256, method=Image.Quantize.FASTOCTREE, dither=Image.Dither.NONE)
|
||||
palette_flat = quantized_img.getpalette()
|
||||
if not palette_flat:
|
||||
return (None, None)
|
||||
palette = []
|
||||
for i in range(0, min(len(palette_flat), 256 * 3), 3):
|
||||
palette.append((palette_flat[i], palette_flat[i + 1], palette_flat[i + 2]))
|
||||
quantized_img = quantized_img.convert('RGBA')
|
||||
alpha = img.getchannel('A')
|
||||
quantized_img.putalpha(alpha)
|
||||
final_quantized = quantized_img.quantize(colors=256, method=Image.Quantize.FASTOCTREE, dither=Image.Dither.NONE)
|
||||
final_palette_flat = final_quantized.getpalette()
|
||||
final_palette = []
|
||||
if final_palette_flat:
|
||||
for i in range(0, min(len(final_palette_flat), 256 * 3), 3):
|
||||
final_palette.append((final_palette_flat[i], final_palette_flat[i + 1], final_palette_flat[i + 2]))
|
||||
return (final_quantized.convert('P'), final_palette)
|
||||
|
||||
def quantize_image_with_banks(img: Image.Image, max_colors: int=15) -> Tuple[Optional[Image.Image], Optional[List[Tuple[int, int, int]]]]:
|
||||
if img.mode != 'RGBA':
|
||||
img = img.convert('RGBA')
|
||||
alpha = img.getchannel('A')
|
||||
img_with_bg = Image.new('RGBA', img.size, (0, 0, 0, 255))
|
||||
img_with_bg.paste(img, mask=alpha)
|
||||
quantized_img = img_with_bg.quantize(colors=max_colors, method=Image.Quantize.FASTOCTREE, dither=Image.Dither.NONE)
|
||||
palette_flat = quantized_img.getpalette()
|
||||
if not palette_flat:
|
||||
return (None, None)
|
||||
palette = []
|
||||
palette.append((0, 0, 0))
|
||||
for i in range(0, min(len(palette_flat), max_colors * 3), 3):
|
||||
palette.append((palette_flat[i], palette_flat[i + 1], palette_flat[i + 2]))
|
||||
pixel_data = list(quantized_img.getdata())
|
||||
new_pixel_data = []
|
||||
for idx, alpha_val in enumerate(alpha.getdata()):
|
||||
if alpha_val < 128:
|
||||
new_pixel_data.append(0)
|
||||
else:
|
||||
new_pixel_data.append(pixel_data[idx] + 1 if pixel_data[idx] < 15 else 15)
|
||||
final_img = Image.new('P', quantized_img.size)
|
||||
final_img.putdata(new_pixel_data)
|
||||
while len(palette) < 16:
|
||||
palette.append((0, 0, 0))
|
||||
palette = palette[:16]
|
||||
return (final_img, palette)
|
||||
|
||||
def build_rlcn_256color(palette: List[Tuple[int, int, int]]) -> bytes:
|
||||
while len(palette) < 256:
|
||||
palette.append((0, 0, 0))
|
||||
palette = palette[:256]
|
||||
ttlp_data = bytearray()
|
||||
for r, g, b in palette:
|
||||
r5 = r >> 3 & 31
|
||||
g5 = g >> 3 & 31
|
||||
b5 = b >> 3 & 31
|
||||
bgr555 = b5 << 10 | g5 << 5 | r5
|
||||
ttlp_data.extend(struct.pack('<H', bgr555))
|
||||
ttlp_section_size = 536
|
||||
ttlp_header = b'TTLP'
|
||||
ttlp_header += struct.pack('<I', ttlp_section_size)
|
||||
ttlp_header += struct.pack('<I', 3)
|
||||
ttlp_header += struct.pack('<I', 0)
|
||||
ttlp_header += struct.pack('<I', 512)
|
||||
ttlp_header += struct.pack('<I', 16)
|
||||
ttlp_section = ttlp_header + ttlp_data
|
||||
file_size = 552
|
||||
rlcn_header = b'RLCN'
|
||||
rlcn_header += b'\xff\xfe\x01\x00'
|
||||
rlcn_header += struct.pack('<I', file_size)
|
||||
rlcn_header += struct.pack('<H', 16)
|
||||
rlcn_header += struct.pack('<H', 1)
|
||||
return rlcn_header + ttlp_section
|
||||
|
||||
def build_rlcn_with_banks(palette: List[Tuple[int, int, int]], num_banks: int=16) -> bytes:
|
||||
full_palette = []
|
||||
for bank in range(num_banks):
|
||||
if bank == 0:
|
||||
for i, (r, g, b) in enumerate(palette[:16]):
|
||||
if i == 0:
|
||||
full_palette.append((0, 0, 0))
|
||||
else:
|
||||
full_palette.append((r, g, b))
|
||||
while len(full_palette) < (bank + 1) * 16:
|
||||
full_palette.append((0, 0, 0))
|
||||
else:
|
||||
for i in range(16):
|
||||
if i == 0:
|
||||
full_palette.append((0, 0, 0))
|
||||
elif len(palette) > i:
|
||||
full_palette.append(palette[i])
|
||||
else:
|
||||
gray = i * 17 % 256
|
||||
full_palette.append((gray, gray, gray))
|
||||
ttlp_data = bytearray()
|
||||
for r, g, b in full_palette:
|
||||
r5 = r >> 3 & 31
|
||||
g5 = g >> 3 & 31
|
||||
b5 = b >> 3 & 31
|
||||
bgr555 = b5 << 10 | g5 << 5 | r5
|
||||
ttlp_data.extend(struct.pack('<H', bgr555))
|
||||
ttlp_section_size = 536
|
||||
ttlp_header = b'TTLP'
|
||||
ttlp_header += struct.pack('<I', ttlp_section_size)
|
||||
ttlp_header += struct.pack('<I', 3)
|
||||
ttlp_header += struct.pack('<I', 0)
|
||||
ttlp_header += struct.pack('<I', 512)
|
||||
ttlp_header += struct.pack('<I', 16)
|
||||
ttlp_section = ttlp_header + ttlp_data
|
||||
file_size = 552
|
||||
rlcn_header = b'RLCN'
|
||||
rlcn_header += b'\xff\xfe\x01\x00'
|
||||
rlcn_header += struct.pack('<I', file_size)
|
||||
rlcn_header += struct.pack('<H', 16)
|
||||
rlcn_header += struct.pack('<H', 1)
|
||||
return rlcn_header + ttlp_section
|
||||
|
||||
def build_rgcn(img: Image.Image) -> bytes:
|
||||
width, height = img.size
|
||||
if width % 8 != 0 or height % 8 != 0:
|
||||
new_width = (width + 7) // 8 * 8
|
||||
new_height = (height + 7) // 8 * 8
|
||||
padded_img = Image.new('P', (new_width, new_height), 0)
|
||||
padded_img.paste(img, (0, 0))
|
||||
img = padded_img
|
||||
width, height = (new_width, new_height)
|
||||
print(f' Image padded to {width}x{height} for 8-pixel alignment')
|
||||
linear_indices = bytearray()
|
||||
pixels = list(img.getdata())
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
pixel_index = y * width + x
|
||||
if pixel_index < len(pixels):
|
||||
linear_indices.append(pixels[pixel_index] & 15)
|
||||
else:
|
||||
linear_indices.append(0)
|
||||
packed_data = bytearray()
|
||||
for i in range(0, len(linear_indices), 2):
|
||||
low_nibble = linear_indices[i] & 15
|
||||
high_nibble = (linear_indices[i + 1] & 15) << 4 if i + 1 < len(linear_indices) else 0
|
||||
packed_data.append(low_nibble | high_nibble)
|
||||
tile_data_size = len(packed_data)
|
||||
char_section_size = 32 + tile_data_size
|
||||
char_header = b'RAHC'
|
||||
char_header += struct.pack('<I', char_section_size)
|
||||
char_header += struct.pack('<H', height // 8)
|
||||
char_header += struct.pack('<H', width // 8)
|
||||
char_header += struct.pack('<I', 3)
|
||||
char_header += struct.pack('<I', 0)
|
||||
char_header += struct.pack('<I', 1)
|
||||
char_header += struct.pack('<I', tile_data_size)
|
||||
char_header += struct.pack('<I', 24)
|
||||
char_section = char_header + packed_data
|
||||
sopc_section = b'SOPC'
|
||||
sopc_section += struct.pack('<I', 16)
|
||||
sopc_section += struct.pack('<I', 0)
|
||||
sopc_section += struct.pack('<H', width // 8)
|
||||
sopc_section += struct.pack('<H', height // 8)
|
||||
file_size = 16 + len(char_section) + len(sopc_section)
|
||||
rgcn_header = b'RGCN'
|
||||
rgcn_header += b'\xff\xfe\x01\x01'
|
||||
rgcn_header += struct.pack('<I', file_size)
|
||||
rgcn_header += struct.pack('<H', 16)
|
||||
rgcn_header += struct.pack('<H', 2)
|
||||
return rgcn_header + char_section + sopc_section
|
||||
|
||||
def convert_png_to_tileset(png_path: str, use_tile_banks: bool=True, output_rgcn_path: str=None, output_rlcn_path: str=None) -> Tuple[bool, str, Optional[bytes], Optional[bytes]]:
|
||||
try:
|
||||
print(f'\n=== Converting PNG to Tileset ===')
|
||||
print(f' Input: {png_path}')
|
||||
print(f" Mode: {('Tile Banking (15 colors)' if use_tile_banks else 'Standard (256 colors)')}")
|
||||
img = Image.open(png_path)
|
||||
print(f' Image size: {img.size[0]}x{img.size[1]}')
|
||||
print(f' Image mode: {img.mode}')
|
||||
if use_tile_banks:
|
||||
quantized_img, palette = quantize_image_with_banks(img, max_colors=15)
|
||||
else:
|
||||
quantized_img, palette = quantize_image_simple(img)
|
||||
if quantized_img is None or palette is None:
|
||||
return (False, 'Image quantization failed', None, None)
|
||||
print(f' Quantized to {len(palette)} palette colors')
|
||||
print(' Building RGCN...')
|
||||
rgcn_data = build_rgcn(quantized_img)
|
||||
print(f' RGCN size: {len(rgcn_data):,} bytes')
|
||||
print(' Building RLCN...')
|
||||
if use_tile_banks:
|
||||
rlcn_data = build_rlcn_with_banks(palette, num_banks=16)
|
||||
else:
|
||||
rlcn_data = build_rlcn_256color(palette)
|
||||
print(f' RLCN size: {len(rlcn_data):,} bytes')
|
||||
if output_rgcn_path:
|
||||
with open(output_rgcn_path, 'wb') as f:
|
||||
f.write(rgcn_data)
|
||||
print(f' Saved RGCN: {output_rgcn_path}')
|
||||
if output_rlcn_path:
|
||||
with open(output_rlcn_path, 'wb') as f:
|
||||
f.write(rlcn_data)
|
||||
print(f' Saved RLCN: {output_rlcn_path}')
|
||||
print('=== Conversion Complete ===\n')
|
||||
return (True, 'PNG converted successfully', rgcn_data, rlcn_data)
|
||||
except Exception as e:
|
||||
error_msg = f'PNG conversion failed: {str(e)}'
|
||||
print(f'ERROR: {error_msg}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return (False, error_msg, None, None)
|
||||
|
||||
def create_txif_rule(texture_id: int, rule_type: int=1, unknown1: int=0, unknown2: int=0) -> bytes:
|
||||
rule = bytearray(8)
|
||||
struct.pack_into('<H', rule, 0, rule_type)
|
||||
struct.pack_into('<H', rule, 2, texture_id)
|
||||
struct.pack_into('<H', rule, 4, unknown1)
|
||||
struct.pack_into('<H', rule, 6, unknown2)
|
||||
return bytes(rule)
|
||||
|
||||
def add_txif_rule(existing_txif: bytes, new_texture_id: int, rule_type: int=1) -> bytes:
|
||||
if not existing_txif or len(existing_txif) < 8:
|
||||
raise ValueError('Invalid TXIF section')
|
||||
magic = existing_txif[:4]
|
||||
if magic != b'TXIF':
|
||||
raise ValueError(f'Invalid TXIF magic: {magic.hex()}')
|
||||
rule_count = struct.unpack('<H', existing_txif[4:6])[0]
|
||||
override_flag = struct.unpack('<H', existing_txif[6:8])[0]
|
||||
print(f' Current TXIF: {rule_count} rules')
|
||||
print(f' Adding rule for texture ID: {new_texture_id}')
|
||||
new_rule = create_txif_rule(new_texture_id, rule_type)
|
||||
new_txif = bytearray()
|
||||
new_txif.extend(b'TXIF')
|
||||
new_txif.extend(struct.pack('<H', rule_count + 1))
|
||||
new_txif.extend(struct.pack('<H', override_flag))
|
||||
new_txif.extend(existing_txif[8:])
|
||||
new_txif.extend(new_rule)
|
||||
print(f' New TXIF: {rule_count + 1} rules')
|
||||
return bytes(new_txif)
|
||||
|
||||
def add_tileset_to_tex(tex_result: dict, rgcn_data: bytes, rlcn_data: bytes) -> dict:
|
||||
new_index = len(tex_result['tilesets'])
|
||||
new_tileset = {'index': new_index, 'RGCN': rgcn_data, 'RLCN': rlcn_data, 'NCGR': rgcn_data, 'NCLR': rlcn_data}
|
||||
tex_result['tilesets'].append(new_tileset)
|
||||
tex_result['tileset_count'] = len(tex_result['tilesets'])
|
||||
print(f'\n=== TEX Modification ===')
|
||||
print(f' Added tileset at index: {new_index}')
|
||||
print(f' RGCN size: {len(rgcn_data):,} bytes')
|
||||
print(f' RLCN size: {len(rlcn_data):,} bytes')
|
||||
print(f" Total tilesets: {tex_result['tileset_count']}")
|
||||
return tex_result
|
||||
|
||||
def build_tex_file(tex_result: dict) -> bytes:
|
||||
print(f'\n=== Building TEX File ===')
|
||||
tilesets = tex_result.get('tilesets', [])
|
||||
print(f' Building {len(tilesets)} tilesets')
|
||||
tileset_narcs = []
|
||||
for ts in tilesets:
|
||||
rgcn = ts.get('RGCN')
|
||||
rlcn = ts.get('RLCN')
|
||||
if not rgcn or not rlcn:
|
||||
print(f" WARNING: Tileset {ts['index']} missing data, skipping")
|
||||
continue
|
||||
inner_narc = build_narc([rgcn, rlcn])
|
||||
tileset_narcs.append(inner_narc)
|
||||
print(f" Tileset {ts['index']}: NARC size = {len(inner_narc):,} bytes")
|
||||
outer_narc = build_narc(tileset_narcs)
|
||||
print(f' Outer NARC size: {len(outer_narc):,} bytes')
|
||||
tex_file = b'TEX\x00' + outer_narc
|
||||
print(f' Final TEX size: {len(tex_file):,} bytes (uncompressed)')
|
||||
return tex_file
|
||||
|
||||
def modify_dat_txif(dat_result: dict, new_texture_id: int) -> dict:
|
||||
existing_txif = dat_result.get('txif')
|
||||
if not existing_txif:
|
||||
raise ValueError('DAT file has no TXIF section')
|
||||
print(f'\n=== DAT TXIF Modification ===')
|
||||
new_txif = add_txif_rule(existing_txif, new_texture_id)
|
||||
dat_result['txif'] = new_txif
|
||||
return dat_result
|
||||
|
||||
def build_dat_file(dat_result: dict) -> bytes:
|
||||
print(f'\n=== Building DAT File ===')
|
||||
sections = []
|
||||
if dat_result.get('mpif'):
|
||||
sections.append(dat_result['mpif'])
|
||||
print(f" MPIF: {len(dat_result['mpif']):,} bytes")
|
||||
if dat_result.get('txif'):
|
||||
sections.append(dat_result['txif'])
|
||||
print(f" TXIF: {len(dat_result['txif']):,} bytes")
|
||||
layers = dat_result.get('layers', [])
|
||||
if layers:
|
||||
layer_data_list = [layer['data'] for layer in layers]
|
||||
inner_layer_narc = build_narc(layer_data_list)
|
||||
lyr_section = b'LYR\x00' + inner_layer_narc
|
||||
sections.append(lyr_section)
|
||||
print(f' LYR: {len(lyr_section):,} bytes ({len(layers)} layers)')
|
||||
if dat_result.get('cta'):
|
||||
sections.append(dat_result['cta'])
|
||||
print(f" CTA: {len(dat_result['cta']):,} bytes")
|
||||
main_narc = build_narc(sections)
|
||||
print(f' Main NARC size: {len(main_narc):,} bytes (uncompressed)')
|
||||
return main_narc
|
||||
|
||||
def transfer_png_to_map(png_path: str, dat_path: str, tex_path: str, use_tile_banks: bool=None, output_dat_path: str=None, output_tex_path: str=None) -> Tuple[bool, str]:
|
||||
try:
|
||||
print('\n' + '=' * 60)
|
||||
print('=== PNG TILESET TRANSFER OPERATION ===')
|
||||
print('=' * 60)
|
||||
if output_dat_path is None:
|
||||
output_dat_path = dat_path
|
||||
if output_tex_path is None:
|
||||
output_tex_path = tex_path
|
||||
if use_tile_banks is None:
|
||||
print('\n=== Auto-Detecting Best Conversion Mode ===')
|
||||
png_info = get_png_info(png_path)
|
||||
if 'error' in png_info:
|
||||
return (False, f"Failed to analyze PNG: {png_info['error']}")
|
||||
unique_colors = png_info.get('unique_colors', 256)
|
||||
print(f' PNG has {unique_colors} unique colors')
|
||||
if unique_colors <= 16:
|
||||
use_tile_banks = True
|
||||
mode_reason = 'PNG has <=16 colors, optimal for tile banking'
|
||||
else:
|
||||
use_tile_banks = False
|
||||
mode_reason = 'PNG has >16 colors, using standard 256-color mode'
|
||||
print(f" Auto-selected: {('Tile Banking Mode' if use_tile_banks else 'Standard 256-Color Mode')}")
|
||||
print(f' Reason: {mode_reason}')
|
||||
else:
|
||||
mode_reason = 'User-specified mode'
|
||||
mode_name = 'Tile Banking (15 colors + transparency)' if use_tile_banks else 'Standard (256 colors)'
|
||||
print(f' Conversion mode: {mode_name}')
|
||||
print('\n=== Step 1: Converting PNG ===')
|
||||
success, message, rgcn_data, rlcn_data = convert_png_to_tileset(png_path, use_tile_banks=use_tile_banks)
|
||||
if not success:
|
||||
return (False, message)
|
||||
print('\n=== Step 2: Parsing Map Files ===')
|
||||
print(f' DAT: {dat_path}')
|
||||
print(f' TEX: {tex_path}')
|
||||
dat_result = parse_dat_map(dat_path)
|
||||
tex_result = parse_tex_map(tex_path)
|
||||
if not dat_result or not tex_result:
|
||||
return (False, 'Failed to parse map files')
|
||||
print(f" DAT parsed: {len(dat_result.get('layers', []))} layers")
|
||||
print(f" TEX parsed: {tex_result.get('tileset_count', 0)} tilesets")
|
||||
print('\n=== Step 3: Determining Texture ID ===')
|
||||
tileset_count = tex_result.get('tileset_count', 0)
|
||||
new_texture_id = tileset_count
|
||||
print(f' New tileset will be at index: {new_texture_id}')
|
||||
print('\n=== Step 4: Adding Tileset to TEX ===')
|
||||
tex_result = add_tileset_to_tex(tex_result, rgcn_data, rlcn_data)
|
||||
print('\n=== Step 5: Modifying DAT TXIF ===')
|
||||
dat_result = modify_dat_txif(dat_result, new_texture_id)
|
||||
print('\n=== Step 6: Building Files ===')
|
||||
new_tex = build_tex_file(tex_result)
|
||||
new_dat = build_dat_file(dat_result)
|
||||
print('\n=== Step 7: Compressing and Saving ===')
|
||||
print(' Compressing TEX...')
|
||||
tex_compressed = compress_lz10(new_tex)
|
||||
print(f' TEX compressed: {len(tex_compressed):,} bytes')
|
||||
print(' Compressing DAT...')
|
||||
dat_compressed = compress_lz10(new_dat)
|
||||
print(f' DAT compressed: {len(dat_compressed):,} bytes')
|
||||
print(f'\n Saving TEX to: {output_tex_path}')
|
||||
with open(output_tex_path, 'wb') as f:
|
||||
f.write(tex_compressed)
|
||||
print(f' Saving DAT to: {output_dat_path}')
|
||||
with open(output_dat_path, 'wb') as f:
|
||||
f.write(dat_compressed)
|
||||
print('\n' + '=' * 60)
|
||||
print('=== TRANSFER COMPLETE ===')
|
||||
print('=' * 60)
|
||||
print(f'PNG tileset successfully integrated!')
|
||||
print(f' New tileset index: {new_texture_id}')
|
||||
print(f" Total tilesets: {tex_result['tileset_count']}")
|
||||
print(f' Mode: {mode_name}')
|
||||
print(f' Selection: {mode_reason}')
|
||||
print(f' Files saved:')
|
||||
print(f' - {output_dat_path}')
|
||||
print(f' - {output_tex_path}')
|
||||
print('=' * 60)
|
||||
return (True, f'PNG tileset integrated as texture ID {new_texture_id} using {mode_name}')
|
||||
except Exception as e:
|
||||
error_msg = f'Transfer failed: {str(e)}'
|
||||
print(f'\nERROR: {error_msg}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return (False, error_msg)
|
||||
|
||||
def get_png_info(png_path: str) -> dict:
|
||||
try:
|
||||
img = Image.open(png_path)
|
||||
img_rgba = img.convert('RGBA')
|
||||
unique_colors = len(set(img_rgba.getdata()))
|
||||
has_alpha = img.mode == 'RGBA' and img.getchannel('A').getextrema() != (255, 255)
|
||||
return {'path': png_path, 'name': Path(png_path).name, 'width': img.size[0], 'height': img.size[1], 'mode': img.mode, 'unique_colors': unique_colors, 'has_transparency': has_alpha, 'file_size': Path(png_path).stat().st_size}
|
||||
except Exception as e:
|
||||
return {'path': png_path, 'error': str(e)}
|
||||
|
||||
def print_png_info(png_path: str):
|
||||
info = get_png_info(png_path)
|
||||
if 'error' in info:
|
||||
print(f"\nError reading PNG: {info['error']}")
|
||||
return
|
||||
print(f"\nPNG File: {info['name']}")
|
||||
print(f" Path: {info['path']}")
|
||||
print(f" Size: {info['width']}x{info['height']} pixels")
|
||||
print(f" Mode: {info['mode']}")
|
||||
print(f" Unique colors: {info['unique_colors']}")
|
||||
print(f" Has transparency: {('Yes' if info['has_transparency'] else 'No')}")
|
||||
print(f" File size: {info['file_size']:,} bytes")
|
||||
if info['unique_colors'] <= 16:
|
||||
print(' Recommendation: Use tile banking mode (15 colors)')
|
||||
elif info['unique_colors'] <= 256:
|
||||
print(' Recommendation: Use standard mode (256 colors)')
|
||||
else:
|
||||
print(' WARNING: Image has >256 colors, quantization will reduce quality')
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if len(sys.argv) < 2:
|
||||
print('PNG Tileset Transfer - Standalone Mode')
|
||||
print('\nUsage:')
|
||||
print(' Convert PNG only:')
|
||||
print(' python pngtilesettransfer.py <png_file> [--simple]')
|
||||
print('\n Transfer to map:')
|
||||
print(' python pngtilesettransfer.py <png_file> <dat_file> <tex_file> [--simple]')
|
||||
print('\nOptions:')
|
||||
print(' --simple Use simple 256-color mode instead of tile banking')
|
||||
sys.exit(1)
|
||||
png_path = sys.argv[1]
|
||||
use_tile_banks = '--simple' not in sys.argv
|
||||
if len(sys.argv) >= 4:
|
||||
dat_path = sys.argv[2]
|
||||
tex_path = sys.argv[3]
|
||||
success, message = transfer_png_to_map(png_path, dat_path, tex_path, use_tile_banks=use_tile_banks)
|
||||
if success:
|
||||
print(f'\nSUCCESS: {message}')
|
||||
sys.exit(0)
|
||||
else:
|
||||
print(f'\nFAILED: {message}')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print_png_info(png_path)
|
||||
base_name = Path(png_path).stem
|
||||
rgcn_path = f'{base_name}.rgcn'
|
||||
rlcn_path = f'{base_name}.rlcn'
|
||||
success, message, rgcn_data, rlcn_data = convert_png_to_tileset(png_path, use_tile_banks=use_tile_banks, output_rgcn_path=rgcn_path, output_rlcn_path=rlcn_path)
|
||||
if success:
|
||||
print(f'\nSUCCESS: {message}')
|
||||
print(f'Files saved:')
|
||||
print(f' - {rgcn_path}')
|
||||
print(f' - {rlcn_path}')
|
||||
sys.exit(0)
|
||||
else:
|
||||
print(f'\nFAILED: {message}')
|
||||
sys.exit(1)
|
||||
679
load/saverom.py
Normal file
679
load/saverom.py
Normal file
|
|
@ -0,0 +1,679 @@
|
|||
import struct
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
CRC16_TABLE = [0, 49345, 49537, 320, 49921, 960, 640, 49729, 50689, 1728, 1920, 51009, 1280, 50625, 50305, 1088, 52225, 3264, 3456, 52545, 3840, 53185, 52865, 3648, 2560, 51905, 52097, 2880, 51457, 2496, 2176, 51265, 55297, 6336, 6528, 55617, 6912, 56257, 55937, 6720, 7680, 57025, 57217, 8000, 56577, 7616, 7296, 56385, 5120, 54465, 54657, 5440, 55041, 6080, 5760, 54849, 53761, 4800, 4992, 54081, 4352, 53697, 53377, 4160, 61441, 12480, 12672, 61761, 13056, 62401, 62081, 12864, 13824, 63169, 63361, 14144, 62721, 13760, 13440, 62529, 15360, 64705, 64897, 15680, 65281, 16320, 16000, 65089, 64001, 15040, 15232, 64321, 14592, 63937, 63617, 14400, 10240, 59585, 59777, 10560, 60161, 11200, 10880, 59969, 60929, 11968, 12160, 61249, 11520, 60865, 60545, 11328, 58369, 9408, 9600, 58689, 9984, 59329, 59009, 9792, 8704, 58049, 58241, 9024, 57601, 8640, 8320, 57409, 40961, 24768, 24960, 41281, 25344, 41921, 41601, 25152, 26112, 42689, 42881, 26432, 42241, 26048, 25728, 42049, 27648, 44225, 44417, 27968, 44801, 28608, 28288, 44609, 43521, 27328, 27520, 43841, 26880, 43457, 43137, 26688, 30720, 47297, 47489, 31040, 47873, 31680, 31360, 47681, 48641, 32448, 32640, 48961, 32000, 48577, 48257, 31808, 46081, 29888, 30080, 46401, 30464, 47041, 46721, 30272, 29184, 45761, 45953, 29504, 45313, 29120, 28800, 45121, 20480, 37057, 37249, 20800, 37633, 21440, 21120, 37441, 38401, 22208, 22400, 38721, 21760, 38337, 38017, 21568, 39937, 23744, 23936, 40257, 24320, 40897, 40577, 24128, 23040, 39617, 39809, 23360, 39169, 22976, 22656, 38977, 34817, 18624, 18816, 35137, 19200, 35777, 35457, 19008, 19968, 36545, 36737, 20288, 36097, 19904, 19584, 35905, 17408, 33985, 34177, 17728, 34561, 18368, 18048, 34369, 33281, 17088, 17280, 33601, 16640, 33217, 32897, 16448]
|
||||
|
||||
def calculate_crc16(data: bytes) -> int:
|
||||
crc = 65535
|
||||
for byte in data:
|
||||
crc = crc >> 8 & 255 ^ CRC16_TABLE[(crc ^ byte) & 255]
|
||||
return crc & 65535
|
||||
|
||||
@dataclass
|
||||
class NDSHeader:
|
||||
game_title: bytes = field(default_factory=lambda: b'\x00' * 12)
|
||||
game_code: bytes = field(default_factory=lambda: b'\x00' * 4)
|
||||
maker_code: bytes = field(default_factory=lambda: b'\x00' * 2)
|
||||
unit_code: int = 0
|
||||
device_type: int = 0
|
||||
device_size: int = 0
|
||||
reserved1: bytes = field(default_factory=lambda: b'\x00' * 9)
|
||||
rom_version: int = 0
|
||||
flags: int = 0
|
||||
arm9_rom_addr: int = 0
|
||||
arm9_entry_addr: int = 0
|
||||
arm9_ram_addr: int = 0
|
||||
arm9_size: int = 0
|
||||
arm7_rom_addr: int = 0
|
||||
arm7_entry_addr: int = 0
|
||||
arm7_ram_addr: int = 0
|
||||
arm7_size: int = 0
|
||||
filename_table_addr: int = 0
|
||||
filename_size: int = 0
|
||||
fat_addr: int = 0
|
||||
fat_size: int = 0
|
||||
arm9_overlay_addr: int = 0
|
||||
arm9_overlay_size: int = 0
|
||||
arm7_overlay_addr: int = 0
|
||||
arm7_overlay_size: int = 0
|
||||
normal_commands_settings: int = 0
|
||||
key1_commands_settings: int = 0
|
||||
icon_title_addr: int = 0
|
||||
secure_area_crc16: int = 0
|
||||
secure_area_loading_timeout: int = 0
|
||||
arm9_autoload_list_ram_addr: int = 0
|
||||
arm7_autoload_list_ram_addr: int = 0
|
||||
secure_area_disable: int = 0
|
||||
rom_size: int = 0
|
||||
header_size: int = 0
|
||||
reserved2: bytes = field(default_factory=lambda: b'\x00' * 56)
|
||||
nintendo_logo: bytes = field(default_factory=lambda: b'\x00' * 156)
|
||||
nintendo_logo_crc: int = 0
|
||||
header_crc16: int = 0
|
||||
debug_rom_addr: int = 0
|
||||
debug_size: int = 0
|
||||
debug_ram_addr: int = 0
|
||||
reserved3: bytes = field(default_factory=lambda: b'\x00' * 4)
|
||||
reserved4: bytes = field(default_factory=lambda: b'\x00' * 144)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> 'NDSHeader':
|
||||
if len(data) < 512:
|
||||
raise ValueError(f'Header data too short: {len(data)} < 512 bytes')
|
||||
h = cls()
|
||||
h.game_title = data[0:12]
|
||||
h.game_code = data[12:16]
|
||||
h.maker_code = data[16:18]
|
||||
h.unit_code = data[18]
|
||||
h.device_type = data[19]
|
||||
h.device_size = data[20]
|
||||
h.reserved1 = data[21:30]
|
||||
h.rom_version = data[30]
|
||||
h.flags = data[31]
|
||||
h.arm9_rom_addr = struct.unpack_from('<I', data, 32)[0]
|
||||
h.arm9_entry_addr = struct.unpack_from('<I', data, 36)[0]
|
||||
h.arm9_ram_addr = struct.unpack_from('<I', data, 40)[0]
|
||||
h.arm9_size = struct.unpack_from('<I', data, 44)[0]
|
||||
h.arm7_rom_addr = struct.unpack_from('<I', data, 48)[0]
|
||||
h.arm7_entry_addr = struct.unpack_from('<I', data, 52)[0]
|
||||
h.arm7_ram_addr = struct.unpack_from('<I', data, 56)[0]
|
||||
h.arm7_size = struct.unpack_from('<I', data, 60)[0]
|
||||
h.filename_table_addr = struct.unpack_from('<I', data, 64)[0]
|
||||
h.filename_size = struct.unpack_from('<I', data, 68)[0]
|
||||
h.fat_addr = struct.unpack_from('<I', data, 72)[0]
|
||||
h.fat_size = struct.unpack_from('<I', data, 76)[0]
|
||||
h.arm9_overlay_addr = struct.unpack_from('<I', data, 80)[0]
|
||||
h.arm9_overlay_size = struct.unpack_from('<I', data, 84)[0]
|
||||
h.arm7_overlay_addr = struct.unpack_from('<I', data, 88)[0]
|
||||
h.arm7_overlay_size = struct.unpack_from('<I', data, 92)[0]
|
||||
h.normal_commands_settings = struct.unpack_from('<I', data, 96)[0]
|
||||
h.key1_commands_settings = struct.unpack_from('<I', data, 100)[0]
|
||||
h.icon_title_addr = struct.unpack_from('<I', data, 104)[0]
|
||||
h.secure_area_crc16 = struct.unpack_from('<H', data, 108)[0]
|
||||
h.secure_area_loading_timeout = struct.unpack_from('<H', data, 110)[0]
|
||||
h.arm9_autoload_list_ram_addr = struct.unpack_from('<I', data, 112)[0]
|
||||
h.arm7_autoload_list_ram_addr = struct.unpack_from('<I', data, 116)[0]
|
||||
h.secure_area_disable = struct.unpack_from('<Q', data, 120)[0]
|
||||
h.rom_size = struct.unpack_from('<I', data, 128)[0]
|
||||
h.header_size = struct.unpack_from('<I', data, 132)[0]
|
||||
h.reserved2 = data[136:192]
|
||||
h.nintendo_logo = data[192:348]
|
||||
h.nintendo_logo_crc = struct.unpack_from('<H', data, 348)[0]
|
||||
h.header_crc16 = struct.unpack_from('<H', data, 350)[0]
|
||||
h.debug_rom_addr = struct.unpack_from('<I', data, 352)[0]
|
||||
h.debug_size = struct.unpack_from('<I', data, 356)[0]
|
||||
h.debug_ram_addr = struct.unpack_from('<I', data, 360)[0]
|
||||
h.reserved3 = data[364:368]
|
||||
h.reserved4 = data[368:512]
|
||||
return h
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
data = bytearray(512)
|
||||
data[0:12] = self.game_title
|
||||
data[12:16] = self.game_code
|
||||
data[16:18] = self.maker_code
|
||||
data[18] = self.unit_code
|
||||
data[19] = self.device_type
|
||||
data[20] = self.device_size
|
||||
data[21:30] = self.reserved1
|
||||
data[30] = self.rom_version
|
||||
data[31] = self.flags
|
||||
struct.pack_into('<I', data, 32, self.arm9_rom_addr)
|
||||
struct.pack_into('<I', data, 36, self.arm9_entry_addr)
|
||||
struct.pack_into('<I', data, 40, self.arm9_ram_addr)
|
||||
struct.pack_into('<I', data, 44, self.arm9_size)
|
||||
struct.pack_into('<I', data, 48, self.arm7_rom_addr)
|
||||
struct.pack_into('<I', data, 52, self.arm7_entry_addr)
|
||||
struct.pack_into('<I', data, 56, self.arm7_ram_addr)
|
||||
struct.pack_into('<I', data, 60, self.arm7_size)
|
||||
struct.pack_into('<I', data, 64, self.filename_table_addr)
|
||||
struct.pack_into('<I', data, 68, self.filename_size)
|
||||
struct.pack_into('<I', data, 72, self.fat_addr)
|
||||
struct.pack_into('<I', data, 76, self.fat_size)
|
||||
struct.pack_into('<I', data, 80, self.arm9_overlay_addr)
|
||||
struct.pack_into('<I', data, 84, self.arm9_overlay_size)
|
||||
struct.pack_into('<I', data, 88, self.arm7_overlay_addr)
|
||||
struct.pack_into('<I', data, 92, self.arm7_overlay_size)
|
||||
struct.pack_into('<I', data, 96, self.normal_commands_settings)
|
||||
struct.pack_into('<I', data, 100, self.key1_commands_settings)
|
||||
struct.pack_into('<I', data, 104, self.icon_title_addr)
|
||||
struct.pack_into('<H', data, 108, self.secure_area_crc16)
|
||||
struct.pack_into('<H', data, 110, self.secure_area_loading_timeout)
|
||||
struct.pack_into('<I', data, 112, self.arm9_autoload_list_ram_addr)
|
||||
struct.pack_into('<I', data, 116, self.arm7_autoload_list_ram_addr)
|
||||
struct.pack_into('<Q', data, 120, self.secure_area_disable)
|
||||
struct.pack_into('<I', data, 128, self.rom_size)
|
||||
struct.pack_into('<I', data, 132, self.header_size)
|
||||
data[136:192] = self.reserved2
|
||||
data[192:348] = self.nintendo_logo
|
||||
struct.pack_into('<H', data, 348, self.nintendo_logo_crc)
|
||||
struct.pack_into('<H', data, 350, self.header_crc16)
|
||||
struct.pack_into('<I', data, 352, self.debug_rom_addr)
|
||||
struct.pack_into('<I', data, 356, self.debug_size)
|
||||
struct.pack_into('<I', data, 360, self.debug_ram_addr)
|
||||
data[364:368] = self.reserved3
|
||||
data[368:512] = self.reserved4
|
||||
return bytes(data)
|
||||
|
||||
def update_crc(self):
|
||||
self.header_crc16 = calculate_crc16(self.to_bytes()[:350])
|
||||
|
||||
@property
|
||||
def game_title_str(self) -> str:
|
||||
return self.game_title.decode('ascii', errors='ignore').strip('\x00')
|
||||
|
||||
@property
|
||||
def game_code_str(self) -> str:
|
||||
return self.game_code.decode('ascii', errors='ignore')
|
||||
|
||||
@property
|
||||
def fat_entry_count(self) -> int:
|
||||
return self.fat_size // 8
|
||||
|
||||
@dataclass
|
||||
class FATEntry:
|
||||
start_addr: int
|
||||
end_addr: int
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
return self.end_addr - self.start_addr
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> 'FATEntry':
|
||||
start, end = struct.unpack('<II', data[0:8])
|
||||
return cls(start, end)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return struct.pack('<II', self.start_addr, self.end_addr)
|
||||
|
||||
def __repr__(self):
|
||||
return f'FATEntry(start=0x{self.start_addr:08X} end=0x{self.end_addr:08X} size={self.size:,})'
|
||||
MOD_TYPE_LABELS: Dict[str, str] = {'layer_swap': 'Layer Swap', 'import_tileset': 'Import Tileset', 'png_transfer': 'PNG Transfer', 'map_modification': 'Map Modification', 'direct': 'Direct Replace'}
|
||||
|
||||
@dataclass
|
||||
class ModificationRecord:
|
||||
file_path: Path
|
||||
new_data: bytes
|
||||
mod_type: str
|
||||
timestamp: datetime = field(default_factory=datetime.now)
|
||||
fat_index: int = -1
|
||||
|
||||
@property
|
||||
def label(self) -> str:
|
||||
return MOD_TYPE_LABELS.get(self.mod_type, self.mod_type)
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
return len(self.new_data)
|
||||
|
||||
@property
|
||||
def resolved(self) -> bool:
|
||||
return self.fat_index >= 0
|
||||
|
||||
def __str__(self):
|
||||
status = f'FAT#{self.fat_index}' if self.resolved else 'unresolved'
|
||||
return f"[{self.label}] {self.file_path.name} {self.size:,} bytes {status} @ {self.timestamp.strftime('%H:%M:%S')}"
|
||||
|
||||
class ModificationTracker:
|
||||
|
||||
def __init__(self):
|
||||
self._mods: Dict[str, ModificationRecord] = {}
|
||||
print('[ModificationTracker] Initialized')
|
||||
|
||||
def register(self, file_path: Path, new_data: bytes, mod_type: str='direct') -> bool:
|
||||
if not new_data:
|
||||
print(f'[ModificationTracker] WARNING: empty data for {file_path.name} — skipping')
|
||||
return False
|
||||
key = str(file_path.resolve())
|
||||
is_new = key not in self._mods
|
||||
record = ModificationRecord(file_path=file_path.resolve(), new_data=new_data, mod_type=mod_type)
|
||||
self._mods[key] = record
|
||||
action = 'Added' if is_new else 'Updated'
|
||||
print(f'[ModificationTracker] {action}: {record}')
|
||||
return True
|
||||
|
||||
def register_from_disk(self, file_path: Path, mod_type: str='direct') -> bool:
|
||||
try:
|
||||
data = Path(file_path).read_bytes()
|
||||
return self.register(Path(file_path), data, mod_type)
|
||||
except Exception as e:
|
||||
print(f'[ModificationTracker] ERROR reading {file_path}: {e}')
|
||||
return False
|
||||
|
||||
def register_map_files(self, dat_path: Path, tex_path: Path, mod_type: str='map_modification') -> bool:
|
||||
ok1 = self.register_from_disk(Path(dat_path), mod_type)
|
||||
ok2 = self.register_from_disk(Path(tex_path), mod_type)
|
||||
if ok1 and ok2:
|
||||
print(f'[ModificationTracker] Map pair registered: {Path(dat_path).name} + {Path(tex_path).name}')
|
||||
return ok1 and ok2
|
||||
|
||||
def has_modifications(self) -> bool:
|
||||
return bool(self._mods)
|
||||
|
||||
def count(self) -> int:
|
||||
return len(self._mods)
|
||||
|
||||
def count_by_type(self, mod_type: str) -> int:
|
||||
return sum((1 for m in self._mods.values() if m.mod_type == mod_type))
|
||||
|
||||
def get_all(self) -> List[ModificationRecord]:
|
||||
return list(self._mods.values())
|
||||
|
||||
def get_by_type(self, mod_type: str) -> List[ModificationRecord]:
|
||||
return [m for m in self._mods.values() if m.mod_type == mod_type]
|
||||
|
||||
def get_summary(self) -> Dict:
|
||||
mods = self.get_all()
|
||||
by_type: Dict[str, int] = {}
|
||||
total_size = 0
|
||||
files: List[Dict] = []
|
||||
for m in mods:
|
||||
by_type[m.mod_type] = by_type.get(m.mod_type, 0) + 1
|
||||
total_size += m.size
|
||||
files.append({'name': m.file_path.name, 'path': str(m.file_path), 'type': m.mod_type, 'label': m.label, 'size': m.size, 'timestamp': m.timestamp.strftime('%H:%M:%S'), 'resolved': m.resolved, 'fat_index': m.fat_index})
|
||||
return {'total_count': len(mods), 'by_type': by_type, 'total_size': total_size, 'files': files}
|
||||
|
||||
def get_display_lines(self) -> List[str]:
|
||||
mods = self.get_all()
|
||||
if not mods:
|
||||
return ['No modifications registered.']
|
||||
lines: List[str] = []
|
||||
lines.append(f'Pending modifications: {len(mods)}')
|
||||
lines.append('─' * 50)
|
||||
by_type: Dict[str, List[ModificationRecord]] = {}
|
||||
for m in mods:
|
||||
by_type.setdefault(m.mod_type, []).append(m)
|
||||
for mod_type, group in by_type.items():
|
||||
label = MOD_TYPE_LABELS.get(mod_type, mod_type)
|
||||
lines.append(f'\n {label} ({len(group)}):')
|
||||
for m in group:
|
||||
status = f'FAT#{m.fat_index}' if m.resolved else 'pending'
|
||||
lines.append(f' • {m.file_path.name} ({m.size:,} bytes) [{status}]')
|
||||
return lines
|
||||
|
||||
def clear(self):
|
||||
self._mods.clear()
|
||||
print('[ModificationTracker] Cleared all modifications')
|
||||
|
||||
def remove(self, file_path: Path) -> bool:
|
||||
key = str(file_path.resolve())
|
||||
if key in self._mods:
|
||||
del self._mods[key]
|
||||
print(f'[ModificationTracker] Removed: {file_path.name}')
|
||||
return True
|
||||
return False
|
||||
|
||||
class FNTParser:
|
||||
|
||||
def parse(self, rom_data: bytes, fnt_offset: int, fnt_size: int) -> Dict[str, int]:
|
||||
index: Dict[str, int] = {}
|
||||
try:
|
||||
self._walk_dir(rom_data, fnt_offset, fnt_size, dir_id=61440, parent_path='', index=index)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(f'[FNTParser] Error during walk: {e}')
|
||||
print(f'[FNTParser] Built index: {len(index)} files')
|
||||
return index
|
||||
|
||||
def _walk_dir(self, rom: bytes, fnt_base: int, fnt_size: int, dir_id: int, parent_path: str, index: Dict[str, int]):
|
||||
dir_num = dir_id & 4095
|
||||
dir_entry_offset = fnt_base + dir_num * 8
|
||||
if dir_entry_offset + 8 > len(rom):
|
||||
return
|
||||
entries_rel = struct.unpack_from('<I', rom, dir_entry_offset)[0]
|
||||
first_idx = struct.unpack_from('<H', rom, dir_entry_offset + 4)[0]
|
||||
pos = fnt_base + entries_rel
|
||||
current_file_idx = first_idx
|
||||
fnt_end = fnt_base + fnt_size
|
||||
while pos < fnt_end and pos < len(rom):
|
||||
type_len = rom[pos]
|
||||
pos += 1
|
||||
if type_len == 0:
|
||||
break
|
||||
is_subdir = bool(type_len & 128)
|
||||
name_len = type_len & 127
|
||||
if pos + name_len > len(rom):
|
||||
break
|
||||
name = rom[pos:pos + name_len].decode('ascii', errors='replace')
|
||||
pos += name_len
|
||||
full_path = (f'{parent_path}/{name}' if parent_path else name).lower()
|
||||
if is_subdir:
|
||||
if pos + 2 > len(rom):
|
||||
break
|
||||
sub_dir_id = struct.unpack_from('<H', rom, pos)[0]
|
||||
pos += 2
|
||||
self._walk_dir(rom, fnt_base, fnt_size, sub_dir_id, full_path, index)
|
||||
else:
|
||||
index[full_path] = current_file_idx
|
||||
current_file_idx += 1
|
||||
|
||||
class ROMModificationCache:
|
||||
|
||||
def __init__(self):
|
||||
self.tracker: ModificationTracker = ModificationTracker()
|
||||
self.original_rom_path: Optional[Path] = None
|
||||
self.header: Optional[NDSHeader] = None
|
||||
self._fat_index_map: Dict[str, int] = {}
|
||||
self._fat_index_map_built: bool = False
|
||||
print('[ROMModificationCache] Initialized')
|
||||
|
||||
def has_modifications(self) -> bool:
|
||||
return self.tracker.has_modifications()
|
||||
|
||||
def get_modification_count(self) -> int:
|
||||
return self.tracker.count()
|
||||
|
||||
def get_modified_files(self) -> List[ModificationRecord]:
|
||||
return self.tracker.get_all()
|
||||
|
||||
def add_modification(self, file_path: Path, new_data: bytes, modification_type: str='direct') -> bool:
|
||||
return self.tracker.register(file_path, new_data, modification_type)
|
||||
|
||||
def clear(self):
|
||||
self.tracker.clear()
|
||||
self._fat_index_map = {}
|
||||
self._fat_index_map_built = False
|
||||
|
||||
def set_rom_path(self, rom_path: Path):
|
||||
self.original_rom_path = rom_path
|
||||
self._fat_index_map_built = False
|
||||
print(f'[ROMModificationCache] ROM path set: {rom_path}')
|
||||
|
||||
def load_header(self, rom_path: Path) -> bool:
|
||||
try:
|
||||
with open(rom_path, 'rb') as f:
|
||||
raw = f.read(512)
|
||||
self.header = NDSHeader.from_bytes(raw)
|
||||
print(f"[ROMModificationCache] Header loaded: '{self.header.game_title_str}' ({self.header.game_code_str})")
|
||||
print(f' ROM size : {self.header.rom_size:,} bytes')
|
||||
print(f' FAT entries: {self.header.fat_entry_count} @ 0x{self.header.fat_addr:08X}')
|
||||
print(f' FNT size : {self.header.filename_size:,} bytes @ 0x{self.header.filename_table_addr:08X}')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'[ROMModificationCache] ERROR loading header: {e}')
|
||||
return False
|
||||
|
||||
def build_file_index(self, rom_path: Path) -> bool:
|
||||
if not self.header:
|
||||
print('[ROMModificationCache] Cannot build index: header not loaded')
|
||||
return False
|
||||
try:
|
||||
with open(rom_path, 'rb') as f:
|
||||
rom_data = f.read()
|
||||
parser = FNTParser()
|
||||
self._fat_index_map = parser.parse(rom_data, self.header.filename_table_addr, self.header.filename_size)
|
||||
self._fat_index_map_built = True
|
||||
print(f'[ROMModificationCache] File index built: {len(self._fat_index_map)} entries')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'[ROMModificationCache] ERROR building file index: {e}')
|
||||
return False
|
||||
|
||||
def resolve_fat_index(self, file_path: Path) -> int:
|
||||
if not self._fat_index_map_built:
|
||||
print('[ROMModificationCache] WARNING: FNT index not built yet; call build_file_index() first')
|
||||
abs_str = str(file_path.resolve()).replace('\\', '/')
|
||||
if self.original_rom_path:
|
||||
extracted_root = self.original_rom_path.parent / (self.original_rom_path.stem + '_extracted')
|
||||
root_str = str(extracted_root).replace('\\', '/').lower()
|
||||
lower_str = abs_str.lower()
|
||||
if lower_str.startswith(root_str):
|
||||
rel = lower_str[len(root_str):].lstrip('/')
|
||||
if rel in self._fat_index_map:
|
||||
return self._fat_index_map[rel]
|
||||
parts = abs_str.lower().split('/')
|
||||
for start in range(len(parts)):
|
||||
candidate = '/'.join(parts[start:])
|
||||
if candidate in self._fat_index_map:
|
||||
idx = self._fat_index_map[candidate]
|
||||
print(f"[ROMModificationCache] Suffix match: '{candidate}' → FAT#{idx}")
|
||||
return idx
|
||||
print(f'[ROMModificationCache] WARN: no FAT index for {file_path.name}')
|
||||
return -1
|
||||
|
||||
class ROMBuilder:
|
||||
|
||||
def __init__(self, cache: 'ROMModificationCache'):
|
||||
self.cache = cache
|
||||
|
||||
def build_rom(self, output_path: Path, progress_callback=None) -> Tuple[bool, str]:
|
||||
try:
|
||||
if not self.cache.original_rom_path:
|
||||
return (False, 'No original ROM path set. Load a ROM first.')
|
||||
if not self.cache.original_rom_path.exists():
|
||||
return (False, f'Original ROM not found:\n{self.cache.original_rom_path}')
|
||||
if not self.cache.header:
|
||||
return (False, 'ROM header not loaded. Call initialize() first.')
|
||||
if not self.cache.has_modifications():
|
||||
return (False, 'No modifications are registered to save.')
|
||||
divider = '=' * 60
|
||||
print(f'\n{divider}')
|
||||
print('ROM BUILD START')
|
||||
print(f' Source : {self.cache.original_rom_path.name}')
|
||||
print(f' Output : {output_path.name}')
|
||||
print(f' Mods : {self.cache.get_modification_count()}')
|
||||
print(divider)
|
||||
if not self.cache._fat_index_map_built:
|
||||
_progress(progress_callback, 'Building ROM file index...')
|
||||
self.cache.build_file_index(self.cache.original_rom_path)
|
||||
mods = self.cache.get_modified_files()
|
||||
for mod in mods:
|
||||
if not mod.resolved:
|
||||
mod.fat_index = self.cache.resolve_fat_index(mod.file_path)
|
||||
resolvable = [m for m in mods if m.resolved]
|
||||
unresolvable = [m for m in mods if not m.resolved]
|
||||
if unresolvable:
|
||||
names = ', '.join((m.file_path.name for m in unresolvable))
|
||||
print(f'WARNING: {len(unresolvable)} mod(s) unresolved: {names}')
|
||||
if not resolvable:
|
||||
return (False, "None of the registered modifications could be matched\nto entries in the ROM's File Allocation Table.\n\nEnsure the ROM was extracted properly and the FNT index\nwas built before saving.")
|
||||
_progress(progress_callback, 'Copying original ROM…')
|
||||
shutil.copy2(self.cache.original_rom_path, output_path)
|
||||
print('Step 1: ROM copied')
|
||||
_progress(progress_callback, 'Loading File Allocation Table…')
|
||||
fat_entries = self._load_fat()
|
||||
if fat_entries is None:
|
||||
return (False, 'Failed to read the File Allocation Table.')
|
||||
print(f'Step 2: FAT loaded — {len(fat_entries)} entries')
|
||||
_progress(progress_callback, f'Applying {len(resolvable)} modification(s)…')
|
||||
modified_fat = self._apply_modifications(output_path, fat_entries, resolvable, progress_callback)
|
||||
if modified_fat is None:
|
||||
return (False, 'Failed to apply modifications to ROM.')
|
||||
print('Step 3: Modifications applied')
|
||||
_progress(progress_callback, 'Writing updated File Allocation Table…')
|
||||
if not self._write_fat(output_path, modified_fat):
|
||||
return (False, 'Failed to write updated FAT to ROM.')
|
||||
print('Step 4: FAT written')
|
||||
_progress(progress_callback, 'Updating ROM header…')
|
||||
if not self._update_header(output_path):
|
||||
return (False, 'Failed to update ROM header.')
|
||||
print('Step 5: Header updated')
|
||||
final_size = output_path.stat().st_size
|
||||
summary_lines = [f'ROM saved successfully!', f'', f'Modifications applied : {len(resolvable)}', f'Final ROM size : {final_size:,} bytes']
|
||||
if unresolvable:
|
||||
summary_lines += [f'', f'NOTE: {len(unresolvable)} modification(s) could not be matched to FAT entries and were skipped:']
|
||||
for m in unresolvable:
|
||||
summary_lines.append(f' • {m.file_path.name}')
|
||||
msg = '\n'.join(summary_lines)
|
||||
print(f'\n{divider}')
|
||||
print('ROM BUILD COMPLETE')
|
||||
print(msg)
|
||||
print(f'{divider}\n')
|
||||
return (True, msg)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return (False, f'ROM build failed with an unexpected error:\n{e}')
|
||||
|
||||
def _load_fat(self) -> Optional[List[FATEntry]]:
|
||||
try:
|
||||
with open(self.cache.original_rom_path, 'rb') as f:
|
||||
f.seek(self.cache.header.fat_addr)
|
||||
fat_raw = f.read(self.cache.header.fat_size)
|
||||
entries: List[FATEntry] = []
|
||||
for i in range(0, len(fat_raw), 8):
|
||||
if i + 8 <= len(fat_raw):
|
||||
entries.append(FATEntry.from_bytes(fat_raw[i:i + 8]))
|
||||
return entries
|
||||
except Exception as e:
|
||||
print(f'[ROMBuilder] ERROR loading FAT: {e}')
|
||||
return None
|
||||
|
||||
def _apply_modifications(self, rom_path: Path, fat_entries: List[FATEntry], mods: List[ModificationRecord], progress_callback=None) -> Optional[List[FATEntry]]:
|
||||
try:
|
||||
with open(rom_path, 'r+b') as rom_file:
|
||||
modified_fat = list(fat_entries)
|
||||
current_rom_end = rom_file.seek(0, 2)
|
||||
for i, mod in enumerate(mods):
|
||||
if progress_callback and i % 5 == 0:
|
||||
progress_callback(f'Writing mod {i + 1}/{len(mods)}: {mod.file_path.name}…')
|
||||
idx = mod.fat_index
|
||||
if idx < 0 or idx >= len(modified_fat):
|
||||
print(f' SKIP: FAT#{idx} out of range for {mod.file_path.name}')
|
||||
continue
|
||||
old_entry = modified_fat[idx]
|
||||
old_size = old_entry.size
|
||||
new_size = mod.size
|
||||
print(f' [{i + 1}/{len(mods)}] FAT#{idx} {mod.file_path.name} {old_size:,} → {new_size:,} bytes')
|
||||
if new_size <= old_size:
|
||||
rom_file.seek(old_entry.start_addr)
|
||||
rom_file.write(mod.new_data)
|
||||
modified_fat[idx] = FATEntry(old_entry.start_addr, old_entry.start_addr + new_size)
|
||||
print(f' In-place @ 0x{old_entry.start_addr:08X}')
|
||||
else:
|
||||
aligned_end = _align4(current_rom_end)
|
||||
if aligned_end > current_rom_end:
|
||||
rom_file.seek(current_rom_end)
|
||||
rom_file.write(b'\xff' * (aligned_end - current_rom_end))
|
||||
rom_file.seek(aligned_end)
|
||||
rom_file.write(mod.new_data)
|
||||
modified_fat[idx] = FATEntry(aligned_end, aligned_end + new_size)
|
||||
current_rom_end = aligned_end + new_size
|
||||
print(f' Appended @ 0x{aligned_end:08X}')
|
||||
return modified_fat
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(f'[ROMBuilder] ERROR applying modifications: {e}')
|
||||
return None
|
||||
|
||||
def _write_fat(self, rom_path: Path, fat_entries: List[FATEntry]) -> bool:
|
||||
try:
|
||||
fat_raw = bytearray()
|
||||
for entry in fat_entries:
|
||||
fat_raw.extend(entry.to_bytes())
|
||||
with open(rom_path, 'r+b') as f:
|
||||
f.seek(self.cache.header.fat_addr)
|
||||
f.write(fat_raw)
|
||||
print(f'[ROMBuilder] FAT written: {len(fat_entries)} entries ({len(fat_raw):,} bytes)')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'[ROMBuilder] ERROR writing FAT: {e}')
|
||||
return False
|
||||
|
||||
def _update_header(self, rom_path: Path) -> bool:
|
||||
try:
|
||||
rom_size = rom_path.stat().st_size
|
||||
self.cache.header.rom_size = rom_size
|
||||
self.cache.header.update_crc()
|
||||
header_bytes = self.cache.header.to_bytes()
|
||||
with open(rom_path, 'r+b') as f:
|
||||
f.seek(0)
|
||||
f.write(header_bytes)
|
||||
print(f'[ROMBuilder] Header updated: size={rom_size:,} CRC=0x{self.cache.header.header_crc16:04X}')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'[ROMBuilder] ERROR updating header: {e}')
|
||||
return False
|
||||
|
||||
def _align4(value: int) -> int:
|
||||
return value + 3 & ~3
|
||||
|
||||
def _progress(callback, message: str):
|
||||
if callback:
|
||||
try:
|
||||
callback(message)
|
||||
except Exception:
|
||||
pass
|
||||
print(f' [progress] {message}')
|
||||
|
||||
class ROMSaver:
|
||||
|
||||
def __init__(self):
|
||||
self.cache = ROMModificationCache()
|
||||
self.builder = ROMBuilder(self.cache)
|
||||
print('[ROMSaver] Ready')
|
||||
|
||||
def initialize(self, rom_path: Path) -> bool:
|
||||
print(f'\n[ROMSaver] Initializing: {rom_path.name}')
|
||||
self.cache.set_rom_path(rom_path)
|
||||
if not self.cache.load_header(rom_path):
|
||||
print('[ROMSaver] FAILED: could not load ROM header')
|
||||
return False
|
||||
if not self.cache.build_file_index(rom_path):
|
||||
print('[ROMSaver] WARNING: FNT index could not be built; FAT resolution will attempt lazy build at save time')
|
||||
print('[ROMSaver] Initialization complete\n')
|
||||
return True
|
||||
|
||||
def is_initialized(self) -> bool:
|
||||
return self.cache.original_rom_path is not None and self.cache.header is not None
|
||||
|
||||
def get_rom_info(self) -> Optional[Dict]:
|
||||
if not self.is_initialized():
|
||||
return None
|
||||
h = self.cache.header
|
||||
return {'title': h.game_title_str, 'code': h.game_code_str, 'rom_size': h.rom_size, 'fat_entries': h.fat_entry_count, 'path': str(self.cache.original_rom_path)}
|
||||
|
||||
def register_modification(self, file_path: Path, new_data: bytes, mod_type: str='direct') -> bool:
|
||||
return self.cache.tracker.register(file_path, new_data, mod_type)
|
||||
|
||||
def register_file_on_disk(self, file_path: Path, mod_type: str='direct') -> bool:
|
||||
return self.cache.tracker.register_from_disk(file_path, mod_type)
|
||||
|
||||
def add_modified_map_files(self, dat_path: Path, tex_path: Path) -> bool:
|
||||
return self.cache.tracker.register_map_files(dat_path, tex_path, mod_type='map_modification')
|
||||
|
||||
def has_modifications(self) -> bool:
|
||||
return self.cache.has_modifications()
|
||||
|
||||
def get_modification_count(self) -> int:
|
||||
return self.cache.get_modification_count()
|
||||
|
||||
def get_layer_swap_count(self) -> int:
|
||||
return self.cache.tracker.count_by_type('layer_swap')
|
||||
|
||||
def get_modification_summary(self) -> Dict:
|
||||
return self.cache.tracker.get_summary()
|
||||
|
||||
def get_status_lines(self) -> List[str]:
|
||||
return self.cache.tracker.get_display_lines()
|
||||
|
||||
def get_layer_swap_files(self) -> List[Dict]:
|
||||
mods = self.cache.tracker.get_by_type('layer_swap')
|
||||
return [{'name': m.file_path.name, 'path': str(m.file_path), 'size': m.size, 'timestamp': m.timestamp.strftime('%H:%M:%S'), 'resolved': m.resolved, 'fat_index': m.fat_index} for m in mods]
|
||||
|
||||
def save_rom(self, output_path: Path, progress_callback=None) -> Tuple[bool, str]:
|
||||
if not self.is_initialized():
|
||||
return (False, 'ROM is not initialized.\nLoad a ROM before saving.')
|
||||
if not self.has_modifications():
|
||||
return (False, 'No modifications are queued to save.')
|
||||
return self.builder.build_rom(output_path, progress_callback)
|
||||
|
||||
def clear_modifications(self):
|
||||
self.cache.clear()
|
||||
print('[ROMSaver] Modifications cleared')
|
||||
|
||||
def remove_modification(self, file_path: Path) -> bool:
|
||||
return self.cache.tracker.remove(file_path)
|
||||
|
||||
def reset(self):
|
||||
self.cache.clear()
|
||||
self.cache.original_rom_path = None
|
||||
self.cache.header = None
|
||||
self.cache._fat_index_map_built = False
|
||||
print('[ROMSaver] Full reset')
|
||||
149
load/texparser.py
Normal file
149
load/texparser.py
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
from typing import Dict, List, Tuple, Optional
|
||||
from load.lz10util import decompress_lz10
|
||||
from load.narcutil import parse_narc
|
||||
|
||||
def detect_graphics_magic(data: bytes) -> Tuple[bool, str]:
|
||||
if not data or len(data) < 4:
|
||||
return (False, '')
|
||||
magic = data[:4]
|
||||
graphics_magics = [(b'RGCN', 'RGCN'), (b'NCGR', 'NCGR'), (b'NCBR', 'NCBR'), (b'NCER', 'NCER'), (b'RNAN', 'RNAN')]
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic == magic_bytes:
|
||||
return (True, name)
|
||||
magic_reversed = magic[::-1]
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic_reversed == magic_bytes:
|
||||
return (True, f'{name}_REVERSED')
|
||||
for magic_bytes, name in graphics_magics:
|
||||
if magic[:3] == magic_bytes[:3] or magic[1:4] == magic_bytes[1:4]:
|
||||
return (True, f'{name}_PARTIAL')
|
||||
return (False, '')
|
||||
|
||||
def detect_palette_magic(data: bytes) -> Tuple[bool, str]:
|
||||
if not data or len(data) < 4:
|
||||
return (False, '')
|
||||
magic = data[:4]
|
||||
palette_magics = [(b'RLCN', 'RLCN'), (b'NCLR', 'NCLR'), (b'RTFN', 'RTFN')]
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic == magic_bytes:
|
||||
return (True, name)
|
||||
magic_reversed = magic[::-1]
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic_reversed == magic_bytes:
|
||||
return (True, f'{name}_REVERSED')
|
||||
for magic_bytes, name in palette_magics:
|
||||
if magic[:3] == magic_bytes[:3] or magic[1:4] == magic_bytes[1:4]:
|
||||
return (True, f'{name}_PARTIAL')
|
||||
return (False, '')
|
||||
|
||||
def try_parse_as_graphics(data: bytes) -> Optional[bytes]:
|
||||
if not data or len(data) < 32:
|
||||
return None
|
||||
is_gfx, fmt = detect_graphics_magic(data)
|
||||
if is_gfx:
|
||||
return data
|
||||
for offset in range(0, min(len(data) - 4, 64)):
|
||||
section_magic = data[offset:offset + 4]
|
||||
if section_magic in [b'RAHC', b'CHAR', b'CRAH', b'RAHC'[::-1]]:
|
||||
return data
|
||||
return None
|
||||
|
||||
def try_parse_as_palette(data: bytes) -> Optional[bytes]:
|
||||
if not data or len(data) < 32:
|
||||
return None
|
||||
is_pal, fmt = detect_palette_magic(data)
|
||||
if is_pal:
|
||||
return data
|
||||
for offset in range(0, min(len(data) - 4, 64)):
|
||||
section_magic = data[offset:offset + 4]
|
||||
if section_magic in [b'TTLP', b'PLTT', b'PLTL', b'TTLP'[::-1]]:
|
||||
return data
|
||||
return None
|
||||
|
||||
def classify_tileset_data(inner_files: List[bytes]) -> Tuple[Optional[bytes], Optional[bytes]]:
|
||||
rgcn = None
|
||||
rlcn = None
|
||||
for bf in inner_files:
|
||||
if not bf or len(bf) < 4:
|
||||
continue
|
||||
if not rgcn:
|
||||
graphics_data = try_parse_as_graphics(bf)
|
||||
if graphics_data:
|
||||
rgcn = graphics_data
|
||||
continue
|
||||
if not rlcn:
|
||||
palette_data = try_parse_as_palette(bf)
|
||||
if palette_data:
|
||||
rlcn = palette_data
|
||||
continue
|
||||
if rgcn and rlcn:
|
||||
break
|
||||
if not rgcn or not rlcn:
|
||||
for bf in inner_files:
|
||||
if not bf:
|
||||
continue
|
||||
if not rgcn and len(bf) >= 1024:
|
||||
rgcn = bf
|
||||
elif not rlcn and len(bf) < 1024:
|
||||
rlcn = bf
|
||||
return (rgcn, rlcn)
|
||||
|
||||
def parse_tex_map(tex_path: str) -> Dict:
|
||||
try:
|
||||
with open(tex_path, 'rb') as f:
|
||||
raw = f.read()
|
||||
dec = decompress_lz10(raw)
|
||||
if dec[:4] in [b'TEX\x00', b'TEX.', b'TEX\xff', b'TEX ', b'\x00XET']:
|
||||
dec = dec[4:]
|
||||
elif dec[:3] == b'TEX':
|
||||
dec = dec[4:]
|
||||
try:
|
||||
outer_files = parse_narc(dec)
|
||||
except ValueError as e:
|
||||
print(f'Warning: TEX not a valid NARC ({e}), treating as single tileset')
|
||||
outer_files = [dec]
|
||||
tilesets = []
|
||||
for i, ts_blob in enumerate(outer_files):
|
||||
if not ts_blob:
|
||||
tilesets.append({'index': i, 'RGCN': None, 'RLCN': None, 'NCGR': None, 'NCLR': None, 'error': 'Empty tileset data'})
|
||||
continue
|
||||
inner_files = []
|
||||
if ts_blob[:4] == b'NARC':
|
||||
try:
|
||||
inner_files = parse_narc(ts_blob)
|
||||
except ValueError:
|
||||
inner_files = [ts_blob]
|
||||
elif len(ts_blob) > 8:
|
||||
parts = []
|
||||
current_start = 0
|
||||
for offset in range(4, len(ts_blob) - 4):
|
||||
magic = ts_blob[offset:offset + 4]
|
||||
is_gfx, _ = detect_graphics_magic(magic)
|
||||
is_pal, _ = detect_palette_magic(magic)
|
||||
if is_gfx or is_pal:
|
||||
if current_start < offset:
|
||||
parts.append(ts_blob[current_start:offset])
|
||||
current_start = offset
|
||||
if current_start < len(ts_blob):
|
||||
parts.append(ts_blob[current_start:])
|
||||
if len(parts) > 1:
|
||||
inner_files = parts
|
||||
else:
|
||||
inner_files = [ts_blob]
|
||||
else:
|
||||
inner_files = [ts_blob]
|
||||
rgcn, rlcn = classify_tileset_data(inner_files)
|
||||
tileset_entry = {'index': i, 'RGCN': rgcn, 'RLCN': rlcn, 'NCGR': rgcn, 'NCLR': rlcn}
|
||||
if not rgcn and (not rlcn):
|
||||
tileset_entry['error'] = 'No valid graphics or palette data found'
|
||||
elif not rgcn:
|
||||
tileset_entry['warning'] = 'Graphics data (RGCN) missing'
|
||||
elif not rlcn:
|
||||
tileset_entry['warning'] = 'Palette data (RLCN) missing'
|
||||
tilesets.append(tileset_entry)
|
||||
return {'tilesets': tilesets, 'tileset_count': len(tilesets)}
|
||||
except Exception as e:
|
||||
print(f'ERROR parsing TEX file: {e}')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return {'tilesets': [], 'tileset_count': 0, 'error': str(e)}
|
||||
Loading…
Reference in New Issue
Block a user