Last active
July 19, 2021 23:34
-
-
Save cabalex/167bb168ccbe72a7ce829a2351db49e0 to your computer and use it in GitHub Desktop.
Astral Chain PKZ repacker made in Python. Requires the Zstandard Python library (obviously). For more modding stuff, visit https://cabalex.github.io/astral-extractor/
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import zstandard | |
import os | |
import sys | |
import math | |
from struct import unpack, pack | |
print( | |
"--- ASTRAL EXTRACTOR PKZ REPACKER ---", | |
"\nMade by Cabalex - Companion program to the Astral Extractor", | |
"\nhttps://cabalex.github.io/astral-extractor", | |
"\nUsing the Python ZStandard library because the JS one only works up to 16 MB.\n", | |
) | |
if not os.path.isdir("replacement/"): | |
os.mkdir('replacement') | |
replacementFiles = [x for x in os.listdir('replacement/') if len(x.split(".")) > 1] | |
if len(replacementFiles) == 0: | |
print("No use repacking a file without any files to repack with! Insert any files you wanna replace in the replacement/ folder.") | |
print("Curious about what files are in the PKZ you're repacking? Visit the Astral Extractor.") | |
exit() | |
if len(sys.argv) == 1: | |
print( | |
"--- HELP ---" | |
"Insert your files that are to be replaced in the .PKZ in the replacement/ folder, e.g. .DAT/.DTT, .BIN, etc.", | |
"\nThen, run the program with 'zstd-repacker.py <filename>.pkz' to repack 'em.", | |
"\nNOTE: In a lot of cases, you don't need to repack PKZ files- You can just place them in the folder and everything works.", | |
"\nHowever, if you are using Ryujinx (Yuzu is fine), or repacking files in core/, event/, or Text/, then you'll need this. :)" | |
) | |
exit() | |
filename = sys.argv[1] | |
f = open(filename, 'rb') | |
print("[!] Reading PKZ...") | |
tmpFiles = [] | |
magic, unk, size, numFiles, offset_file_descriptors, fileNameTableLength = unpack('2IQ2IQ', f.read(32)) | |
for i in range(numFiles): | |
unpacked = unpack('4Q', f.read(32)) | |
tmpFiles.append({ | |
'nameOffset': unpacked[0], | |
'size': unpacked[1], | |
'offset': unpacked[2], | |
'compressedSize': unpacked[3], | |
'kind': 'extracted' | |
}) | |
filestrings = f.read(fileNameTableLength) | |
fileNames = [x for x in filestrings.decode('utf-8').split("\x00") if x][1:] # Remove all empty space + 'ZStandard' | |
files = {} | |
offset = tmpFiles[0]['offset'] | |
compressor = zstandard.ZstdCompressor() | |
for i, fname in enumerate(fileNames): | |
files[fname] = tmpFiles[i] | |
files[fname]['newOffset'] = offset | |
if fname in replacementFiles: | |
print(f"[+] Found {fname} - Repacking with ZSTD...") | |
files[fname]['kind'] = "custom" | |
files[fname]['size'] = os.stat(f"replacement/{fname}").st_size | |
# Probably not the most optimized but eh | |
with open(f"replacement/{fname}", 'rb') as rawf: | |
files[fname]['fp'] = compressor.compress(rawf.read()) | |
files[fname]['compressedSize'] = len(files[fname]['fp']) | |
offset += math.ceil(files[fname]['compressedSize']/64)*64 # Padded to 64 byte increments | |
print("[!] Repacking everything...") | |
# Repack nameTable | |
nameTableOffsets = [] | |
nameTableStr = b"ZStandard\x00\x00\x00\x00\x00\x00\x00" | |
for fname in fileNames: | |
nameTableOffsets.append(len(nameTableStr)) | |
nameTableStr += fname.encode('utf-8') | |
nameTableStr += b''.join([b'\x00'] * (8 - (len(nameTableStr) % 8))) | |
# Repack | |
newf = open(filename.replace(".pkz", "new.pkz"), 'wb') | |
newf.write(pack('2IQ2IQ', magic, unk, 32 + len(nameTableStr) + sum([math.ceil(x['compressedSize']/64)*64 for x in files.values()]), numFiles, 32, len(nameTableStr))) | |
for i, fname in enumerate(fileNames): | |
newf.write(pack('4Q', nameTableOffsets[i], files[fname]['size'], files[fname]['newOffset'], files[fname]['compressedSize'])) | |
newf.write(nameTableStr) | |
if newf.tell() < math.ceil(newf.tell()/64)*64: | |
newf.write(b''.join([b'\x00'] * (math.ceil(newf.tell()/64)*64 - newf.tell()))) | |
# Write files and padding | |
for fname in fileNames: | |
if files[fname]['kind'] == 'custom': | |
newf.write(files[fname]['fp']) | |
else: | |
f.seek(files[fname]['offset']) | |
newf.write(f.read(files[fname]['compressedSize'])) | |
if newf.tell() < math.ceil(newf.tell()/64)*64: | |
newf.write(b''.join([b'\x00'] * (math.ceil(newf.tell()/64)*64 - newf.tell()))) | |
f.close() | |
newf.close() | |
print(f"--- Finished, {len([x for x in files.values() if x['kind'] == 'custom'])} files replaced ({len(fileNames)} files in PKZ) ---") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment