unstable for now

This commit is contained in:
iAmAsval 2020-02-21 00:12:20 +01:00
parent eba21844aa
commit ae74c6e16f
11 changed files with 604 additions and 224 deletions

View File

@ -326,7 +326,6 @@
<Compile Include="Methods\PakReader\LocReader.cs" />
<Compile Include="Methods\PakReader\MeshExporter.cs" />
<Compile Include="Methods\PakReader\Objects.cs" />
<Compile Include="Methods\PakReader\PakIndex.cs" />
<Compile Include="Methods\PakReader\PakReader.cs" />
<Compile Include="Methods\PakReader\SigReader.cs" />
<Compile Include="Methods\PakReader\ExportObject\UScript\UScriptStruct\FTexturePlatformData\FTexture2DMipMap.cs" />

View File

@ -37,7 +37,7 @@ namespace FModel.Methods.Assets
sb.Append(
"\n- PAK File:\t" + Path.GetFileName(reader.Name) +
"\n- Path:\t\t" + entry.Name +
"\n- Position:\t" + entry.Pos +
"\n- Position:\t" + entry.Offset +
"\n- Size:\t\t" + AssetsUtility.GetReadableSize(entry.UncompressedSize) +
"\n- Encrypted:\t" + entry.Encrypted +
"\n"

View File

@ -17,50 +17,50 @@ namespace FModel.Methods.Assets
public static void SetAssetTranslation(string language)
{
string partialPath = "/FortniteGame/Content/Localization/Game_BR/{0}/Game_BR.locres";
string partialPath = "/FortniteGame/Content/Localization/Game_BR{0}/{1}/Game_BR.locres";
switch (language)
{
case "French":
PopulateDict(string.Format(partialPath, "fr"));
PopulateDict(string.Format(partialPath, string.Empty, "fr"));
break;
case "German":
PopulateDict(string.Format(partialPath, "de"));
PopulateDict(string.Format(partialPath, string.Empty, "de"));
break;
case "Italian":
PopulateDict(string.Format(partialPath, "it"));
PopulateDict(string.Format(partialPath, string.Empty, "it"));
break;
case "Spanish":
PopulateDict(string.Format(partialPath, "es"));
PopulateDict(string.Format(partialPath, string.Empty, "es"));
break;
case "Spanish (LA)":
PopulateDict(string.Format(partialPath, "es-419"));
PopulateDict(string.Format(partialPath, string.Empty, "es-419"));
break;
case "Arabic":
PopulateDict(string.Format(partialPath, "ar"));
PopulateDict(string.Format(partialPath, string.Empty, "ar"));
break;
case "Japanese":
PopulateDict(string.Format(partialPath, "ja"));
PopulateDict(string.Format(partialPath, string.Empty, "ja"));
break;
case "Korean":
PopulateDict(string.Format(partialPath, "ko"));
PopulateDict(string.Format(partialPath, string.Empty, "ko"));
break;
case "Polish":
PopulateDict(string.Format(partialPath, "pl"));
PopulateDict(string.Format(partialPath, string.Empty, "pl"));
break;
case "Portuguese (Brazil)":
PopulateDict(string.Format(partialPath, "pt-BR"));
PopulateDict(string.Format(partialPath, string.Empty, "pt-BR"));
break;
case "Russian":
PopulateDict(string.Format(partialPath, "ru"));
PopulateDict(string.Format(partialPath, string.Empty, "ru"));
break;
case "Turkish":
PopulateDict(string.Format(partialPath, "tr"));
PopulateDict(string.Format(partialPath, string.Empty, "tr"));
break;
case "Chinese (S)":
PopulateDict(string.Format(partialPath, "zh-CN"));
PopulateDict(string.Format(partialPath, string.Empty, "zh-CN"));
break;
case "Traditional Chinese":
PopulateDict(string.Format(partialPath, "zh-Hant"));
PopulateDict(string.Format(partialPath, string.Empty, "zh-Hant"));
break;
default:
if (HotfixLocResDict == null) { SetHotfixedLocResDict(); } //once, no need to do more

View File

@ -64,13 +64,13 @@ namespace FModel.Methods.PAKs
foreach (FPakEntry entry in reader.FileInfos)
{
writer.Write(entry.Pos);
writer.Write(entry.Offset);
writer.Write(entry.Size);
writer.Write(entry.UncompressedSize);
writer.Write(entry.Encrypted);
writer.Write(entry.StructSize);
writer.Write(entry.Name);
writer.Write(entry.CompressionMethod);
writer.Write(entry.CompressionMethodIndex);
}
}
}

View File

@ -274,14 +274,12 @@ namespace FModel.Methods.PAKs
List<FPakEntry> entries = new List<FPakEntry>();
while (reader.BaseStream.Position < reader.BaseStream.Length)
{
FPakEntry entry = new FPakEntry();
entry.Pos = reader.ReadInt64();
entry.Size = reader.ReadInt64();
entry.UncompressedSize = reader.ReadInt64();
entry.Encrypted = reader.ReadBoolean();
entry.StructSize = reader.ReadInt32();
entry.Name = reader.ReadString();
entry.CompressionMethod = reader.ReadInt32();
FPakEntry entry = new FPakEntry(
reader.ReadString(),
reader.ReadInt64(),
reader.ReadInt64(),
reader.ReadInt64(),
reader.ReadBytes(20), null, 0, 0, 0);
entries.Add(entry);
}
BackupEntries = entries.ToArray();

View File

@ -1,49 +1,58 @@
using System;
using System.IO;
using System.Collections.Generic;
using System.Security.Cryptography;
namespace PakReader
{
class AESDecryptor
{
const int AES_KEYBITS = 256;
const int KEY_LENGTH = AES_KEYBITS / 8;
public const int BLOCK_SIZE = 16 * 8; // 128
static readonly Rijndael Cipher;
static readonly Dictionary<byte[], ICryptoTransform> CachedTransforms = new Dictionary<byte[], ICryptoTransform>();
public static byte[] DecryptAES(byte[] data, int size, byte[] key, int keyLen)
static AESDecryptor()
{
if (keyLen <= 0)
{
keyLen = key.Length;
}
Cipher = Rijndael.Create();
Cipher.Mode = CipherMode.ECB;
Cipher.Padding = PaddingMode.Zeros;
Cipher.BlockSize = BLOCK_SIZE;
}
if (keyLen == 0)
static ICryptoTransform GetDecryptor(byte[] key)
{
if (!CachedTransforms.TryGetValue(key, out var ret))
{
throw new ArgumentOutOfRangeException("Trying to decrypt AES block without providing an AES key");
}
if (keyLen < KEY_LENGTH)
{
throw new ArgumentOutOfRangeException("AES key is too short");
}
if ((size & 15) != 0)
{
throw new ArgumentOutOfRangeException("Size is invalid");
}
byte[] ret = new byte[data.Length];
using (Rijndael cipher = Rijndael.Create())
{
cipher.Mode = CipherMode.ECB;
cipher.Padding = PaddingMode.Zeros;
cipher.Key = key;
cipher.BlockSize = 16 * 8;
using (var crypto = cipher.CreateDecryptor())
using (MemoryStream msDecrypt = new MemoryStream(data))
using (CryptoStream csDecrypt = new CryptoStream(msDecrypt, crypto, CryptoStreamMode.Read))
using (StreamReader srDecrypt = new StreamReader(csDecrypt))
csDecrypt.Read(ret, 0, ret.Length);
CachedTransforms[key] = ret = Cipher.CreateDecryptor(key, null);
}
return ret;
}
public static int FindKey(byte[] data, IList<byte[]> keys)
{
byte[] block = new byte[BLOCK_SIZE];
for (int i = 0; i < keys.Count; i++)
{
using (var crypto = GetDecryptor(keys[i]))
crypto.TransformBlock(data, 0, BLOCK_SIZE, block, 0);
int stringLen = BitConverter.ToInt32(block, 0);
if (stringLen > 512 || stringLen < -512)
continue;
if (stringLen < 0)
{
if (BitConverter.ToUInt16(block, (stringLen - 1) * 2 + 4) != 0)
continue;
}
else
{
if (block[stringLen - 1 + 4] != 0)
continue;
}
return i;
}
return -1;
}
public static byte[] DecryptAES(byte[] data, byte[] key) =>
GetDecryptor(key).TransformFinalBlock(data, 0, data.Length);
}
}

View File

@ -288,7 +288,7 @@ namespace PakReader
int l = reader.Reader.ReadInt32();
for(int i = 0; i < l; i++)
{
Map.Add(reader.ReadFName(), reader.Reader.ReadString(-1));
Map.Add(reader.ReadFName(), reader.Reader.ReadFString(-1));
}
}
}

View File

@ -6,48 +6,55 @@ namespace PakReader
{
static class Extensions
{
public static string ReadString(this BinaryReader reader, int maxLength = -1)
public static string ReadFString(this BinaryReader reader, int maxLength = -1)
{
int length = reader.ReadInt32();
if (length > 65536 || length < -65536)
// > 0 for ANSICHAR, < 0 for UCS2CHAR serialization
var SaveNum = reader.ReadInt32();
bool LoadUCS2Char = SaveNum < 0;
if (LoadUCS2Char)
{
throw new IOException($"String length too large ({length}), likely a read error.");
// If SaveNum cannot be negated due to integer overflow, Ar is corrupted.
if (SaveNum == int.MinValue)
{
throw new FileLoadException("Archive is corrupted");
}
SaveNum = -SaveNum;
}
if (maxLength != -1 && Math.Abs(length) > maxLength)
if (SaveNum == 0) return string.Empty;
// 1 byte is removed because of null terminator (\0)
if (LoadUCS2Char)
{
throw new ArgumentOutOfRangeException("String exceeded max length");
}
if (length < 0)
{
length *= -1;
ushort[] data = new ushort[length];
for (int i = 0; i < length; i++)
ushort[] data = new ushort[SaveNum];
for (int i = 0; i < SaveNum; i++)
{
data[i] = reader.ReadUInt16();
}
unsafe
{
fixed (ushort* dataPtr = &data[0])
return new string((char*)dataPtr, 0, data.Length);
return new string((char*)dataPtr, 0, data.Length - 1);
}
}
else
{
byte[] bytes = reader.ReadBytes(length);
byte[] bytes = reader.ReadBytes(SaveNum);
if (bytes.Length == 0) return string.Empty;
return Encoding.UTF8.GetString(bytes).Substring(0, length - 1);
return Encoding.UTF8.GetString(bytes).Substring(0, SaveNum - 1);
}
}
public static T[] ReadTArray<T>(this BinaryReader reader, Func<T> getter)
public static T[] ReadTArray<T>(this BinaryReader reader, Func<T> Getter)
{
int length = reader.ReadInt32();
T[] container = new T[length];
for (int i = 0; i < length; i++)
int SerializeNum = reader.ReadInt32();
T[] A = new T[SerializeNum];
for (int i = 0; i < SerializeNum; i++)
{
container[i] = getter();
A[i] = Getter();
}
return container;
return A;
}
public static byte[] SubArray(this byte[] inp, int offset, int length)

View File

@ -51,14 +51,14 @@ namespace PakReader
const int EncryptionAlign = 16; // AES-specific constant
const int EncryptedBufferSize = 256; //?? TODO: check - may be value 16 will be better for performance
public FPakFile(BinaryReader Reader, BasePakEntry Info, byte[] key = null)
public FPakFile(BinaryReader Reader, FPakEntry Info, byte[] key = null)
{
Reader.BaseStream.Seek(Info.Pos + Info.StructSize, SeekOrigin.Begin);
Reader.BaseStream.Seek(Info.Offset + Info.StructSize, SeekOrigin.Begin);
if (Info.Encrypted)
{
long encSize = (Info.Size & 15) == 0 ? Info.Size : ((Info.Size / 16) + 1) * 16;
byte[] encBuffer = Reader.ReadBytes((int)encSize);
data = AESDecryptor.DecryptAES(encBuffer, (int)encSize, key, key.Length).SubArray(0, (int)Info.UncompressedSize);
data = AESDecryptor.DecryptAES(encBuffer, key).SubArray(0, (int)Info.UncompressedSize);
if (encSize != Info.Size)
{
data = data.SubArray(0, (int)Info.UncompressedSize);
@ -191,7 +191,7 @@ namespace PakReader
public Stream GetStream() => new MemoryStream(data);
}
internal enum PAK_VERSION
public enum PAK_VERSION
{
PAK_INITIAL = 1,
PAK_NO_TIMESTAMPS = 2,
@ -201,11 +201,50 @@ namespace PakReader
PAK_DELETE_RECORDS = 6, // UE4.21+ - this constant is not used in UE4 code
PAK_ENCRYPTION_KEY_GUID = 7, // ... allows to use multiple encryption keys over the single project
PAK_FNAME_BASED_COMPRESSION_METHOD = 8, // UE4.22+ - use string instead of enum for compression method
PAK_FROZEN_INDEX = 9,
PAK_PATH_HASH_INDEX = 10,
PAK_LATEST_PLUS_ONE,
PAK_LATEST = PAK_LATEST_PLUS_ONE - 1
}
public enum ECompressionFlags : uint
{
/** No compression */
COMPRESS_None = 0x00,
/** Compress with ZLIB - DEPRECATED, USE FNAME */
COMPRESS_ZLIB = 0x01,
/** Compress with GZIP - DEPRECATED, USE FNAME */
COMPRESS_GZIP = 0x02,
/** Compress with user defined callbacks - DEPRECATED, USE FNAME */
COMPRESS_Custom = 0x04,
/** Joint of the previous ones to determine if old flags are being used */
COMPRESS_DeprecatedFormatFlagsMask = 0xF,
/** No flags specified / */
COMPRESS_NoFlags = 0x00,
/** Prefer compression that compresses smaller (ONLY VALID FOR COMPRESSION) */
COMPRESS_BiasMemory = 0x10,
/** Prefer compression that compresses faster (ONLY VALID FOR COMPRESSION) */
COMPRESS_BiasSpeed = 0x20,
/** Is the source buffer padded out (ONLY VALID FOR UNCOMPRESS) */
COMPRESS_SourceIsPadded = 0x80,
/** Set of flags that are options are still allowed */
COMPRESS_OptionsFlagsMask = 0xF0,
}
public struct FSHAHash
{
public readonly byte[] Hash;
internal FSHAHash(BinaryReader reader)
{
Hash = reader.ReadBytes(20);
}
}
public abstract class BasePakEntry
{
public long Pos;
@ -216,75 +255,171 @@ namespace PakReader
public int StructSize;
}
public class FPakEntry : BasePakEntry, IEquatable<FPakEntry>
public struct FPakEntry : IEquatable<FPakEntry>
{
public string Name;
public int CompressionMethod;
// public byte[] Hash; // 20 bytes
// public FPakCompressedBlock[] CompressionBlocks;
// public int CompressionBlockSize;
const byte Flag_None = 0x00;
const byte Flag_Encrypted = 0x01;
const byte Flag_Deleted = 0x02;
public FPakEntry(BinaryReader reader, string mountPoint, int pakVersion)
public bool Encrypted => (Flags & Flag_Encrypted) != 0;
public bool Deleted => (Flags & Flag_Deleted) != 0;
public readonly string Name;
public readonly long Offset;
public readonly long Size;
public readonly long UncompressedSize;
public readonly byte[] Hash; // why isn't this an FShaHash?
public readonly FPakCompressedBlock[] CompressionBlocks;
public readonly uint CompressionBlockSize;
public readonly uint CompressionMethodIndex;
public readonly byte Flags;
public readonly int StructSize;
internal FPakEntry(BinaryReader reader, string mountPoint, PAK_VERSION Version)
{
//replace .umap to .uasset to serialize umap files
//this will be refactored with the updated pak reader
Name = mountPoint + reader.ReadString(FPakInfo.MAX_PACKAGE_PATH).Replace(".umap", ".uasset");
CompressionBlocks = null;
CompressionBlockSize = 0;
Flags = 0;
// FPakEntry is duplicated before each stored file, without a filename. So,
// remember the serialized size of this structure to avoid recomputation later.
long StartOffset = reader.BaseStream.Position;
Pos = reader.ReadInt64();
Name = mountPoint + reader.ReadFString(FPakInfo.MAX_PACKAGE_PATH).Replace(".umap", ".uasset");
var StartOffset = reader.BaseStream.Position;
Offset = reader.ReadInt64();
Size = reader.ReadInt64();
UncompressedSize = reader.ReadInt64();
CompressionMethod = reader.ReadInt32();
if (pakVersion < (int)PAK_VERSION.PAK_NO_TIMESTAMPS)
if (Version < PAK_VERSION.PAK_FNAME_BASED_COMPRESSION_METHOD)
{
long timestamp = reader.ReadInt64();
}
/*Hash = */reader.ReadBytes(20);
if (pakVersion >= (int)PAK_VERSION.PAK_COMPRESSION_ENCRYPTION)
{
if (CompressionMethod != 0)
var LegacyCompressionMethod = reader.ReadInt32();
if (LegacyCompressionMethod == (int)ECompressionFlags.COMPRESS_None)
{
/*CompressionBlocks = */reader.ReadTArray(() => new FPakCompressedBlock(reader));
CompressionMethodIndex = 0;
}
Encrypted = reader.ReadBoolean();
/* CompressionBlockSize = */reader.ReadInt32();
}
if (pakVersion >= (int)PAK_VERSION.PAK_RELATIVE_CHUNK_OFFSETS)
{
// Convert relative compressed offsets to absolute
/*
for (int i = 0; i < CompressionBlocks?.Length; i++)
else if ((LegacyCompressionMethod & (int)ECompressionFlags.COMPRESS_ZLIB) != 0)
{
CompressionBlocks[i].CompressedStart += Pos;
CompressionBlocks[i].CompressedEnd += Pos;
CompressionMethodIndex = 1;
}
*/
else if ((LegacyCompressionMethod & (int)ECompressionFlags.COMPRESS_GZIP) != 0)
{
CompressionMethodIndex = 2;
}
else if ((LegacyCompressionMethod & (int)ECompressionFlags.COMPRESS_Custom) != 0)
{
CompressionMethodIndex = 3;
}
else
{
// https://github.com/EpicGames/UnrealEngine/blob/8b6414ae4bca5f93b878afadcc41ab518b09984f/Engine/Source/Runtime/PakFile/Public/IPlatformFilePak.h#L441
throw new FileLoadException(@"Found an unknown compression type in pak file, will need to be supported for legacy files");
}
}
else
{
CompressionMethodIndex = reader.ReadUInt32();
}
if (Version <= PAK_VERSION.PAK_INITIAL)
{
// Timestamp of type FDateTime, but the serializer only reads to the Ticks property (int64)
reader.ReadInt64();
}
Hash = reader.ReadBytes(20);
if (Version >= PAK_VERSION.PAK_COMPRESSION_ENCRYPTION)
{
if (CompressionMethodIndex != 0)
{
CompressionBlocks = reader.ReadTArray(() => new FPakCompressedBlock(reader));
}
Flags = reader.ReadByte();
CompressionBlockSize = reader.ReadUInt32();
}
// Used to seek ahead to the file data instead of parsing the entry again
StructSize = (int)(reader.BaseStream.Position - StartOffset);
}
// difference mode
internal FPakEntry(BinaryReader reader, string mountPoint)
{
CompressionBlocks = null;
CompressionBlockSize = 0;
Flags = 0;
Name = mountPoint + reader.ReadFString(FPakInfo.MAX_PACKAGE_PATH).Replace(".umap", ".uasset");
var StartOffset = reader.BaseStream.Position;
Offset = reader.ReadInt64();
Size = reader.ReadInt64();
UncompressedSize = reader.ReadInt64();
CompressionMethodIndex = reader.ReadUInt32();
Hash = reader.ReadBytes(20);
if (CompressionMethodIndex != 0)
{
CompressionBlocks = reader.ReadTArray(() => new FPakCompressedBlock(reader));
}
Flags = reader.ReadByte();
CompressionBlockSize = reader.ReadUInt32();
// Used to seek ahead to the file data instead of parsing the entry again
StructSize = (int)(reader.BaseStream.Position - StartOffset);
}
internal FPakEntry(string name, long offset, long size, long uncompressedSize, byte[] hash, FPakCompressedBlock[] compressionBlocks, uint compressionBlockSize, uint compressionMethodIndex, byte flags)
{
Name = name;
Offset = offset;
Size = size;
UncompressedSize = uncompressedSize;
Hash = hash;
CompressionBlocks = compressionBlocks;
CompressionBlockSize = compressionBlockSize;
CompressionMethodIndex = compressionMethodIndex;
Flags = flags;
StructSize = (int)GetSize(PAK_VERSION.PAK_LATEST, compressionMethodIndex, (uint)compressionBlocks.Length);
}
public static long GetSize(PAK_VERSION version, uint CompressionMethodIndex = 0, uint CompressionBlocksCount = 0)
{
long SerializedSize = sizeof(long) + sizeof(long) + sizeof(long) + 20;
if (version >= PAK_VERSION.PAK_FNAME_BASED_COMPRESSION_METHOD)
{
SerializedSize += sizeof(uint);
}
else
{
SerializedSize += sizeof(int); // Old CompressedMethod var from pre-fname based compression methods
}
if (version >= PAK_VERSION.PAK_COMPRESSION_ENCRYPTION)
{
SerializedSize += sizeof(byte) + sizeof(uint);
if (CompressionMethodIndex != 0)
{
SerializedSize += sizeof(long) * 2 * CompressionBlocksCount + sizeof(int);
}
}
if (version < PAK_VERSION.PAK_NO_TIMESTAMPS)
{
// Timestamp
SerializedSize += sizeof(long);
}
return SerializedSize;
}
public bool Equals(FPakEntry other)
{
if (other is null)
if (other.GetType() != typeof(FPakEntry))
return false;
return FProp.Default.FDiffFileSize ? this.Name == other.Name && this.UncompressedSize == other.UncompressedSize : this.Name == other.Name;
}
public override bool Equals(object obj) => Equals(obj as FPakEntry);
public override int GetHashCode() => FProp.Default.FDiffFileSize ? (Name, UncompressedSize).GetHashCode() : (Name).GetHashCode();
public override bool Equals(object obj) => Equals((FPakEntry)obj);
public FPakEntry() { } // xml file
public override int GetHashCode() => FProp.Default.FDiffFileSize ? (Name, UncompressedSize).GetHashCode() : (Name).GetHashCode();
}
internal struct FPakCompressedBlock
public struct FPakCompressedBlock
{
public long CompressedStart;
public long CompressedEnd;
@ -294,6 +429,11 @@ namespace PakReader
CompressedStart = reader.ReadInt64();
CompressedEnd = reader.ReadInt64();
}
public FPakCompressedBlock(long compressedStart, long compressedEnd)
{
CompressedStart = compressedStart;
CompressedEnd = compressedEnd;
}
}
internal struct FString

View File

@ -1,81 +0,0 @@
using SkiaSharp;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
namespace PakReader
{
public class PakIndex : IEnumerable<(string Path, PakPackage Package)>
{
ConcurrentDictionary<string, PakPackage> index = new ConcurrentDictionary<string, PakPackage>();
static (string Path, string Extension) GetPath(string inp)
{
int extInd = inp.LastIndexOf('.');
return (inp.Substring(0, extInd).ToLowerInvariant(), inp.Substring(extInd + 1).ToLowerInvariant());
}
static PakPackage InsertEntry(BasePakEntry entry, PakPackage package, string extension, PakReader reader)
{
package.Extensions[extension] = (entry, reader);
return package;
}
public void AddPak(string file, byte[] aes = null) => AddPak(new PakReader(file, aes));
public void AddPak(Stream stream, string name, byte[] aes = null) => AddPak(new PakReader(stream, name, aes));
public void AddPak(PakReader reader)
{
foreach (var info in reader.FileInfos)
{
var path = GetPath(info.Name);
if (!index.ContainsKey(path.Path))
{
var pak = index[path.Path] = new PakPackage();
InsertEntry(info, pak, path.Extension, reader);
}
else
{
InsertEntry(info, index[path.Path], path.Extension, reader);
}
}
}
public PakPackage GetPackage(string name) => index.TryGetValue(name.ToLowerInvariant(), out PakPackage ret) ? ret : null;
public IEnumerator<(string Path, PakPackage Package)> GetEnumerator()
{
foreach (var kv in index)
{
yield return (kv.Key, kv.Value);
}
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}
public sealed class PakPackage
{
public SortedList<string, (BasePakEntry Entry, PakReader Reader)> Extensions = new SortedList<string, (BasePakEntry Entry, PakReader Reader)>();
public ExportObject[] Exports => GetAssetReader(true)?.Exports;
public AssetReader GetAssetReader(bool ignoreErrors = false) => Exportable ? new AssetReader(GetPackageStream("uasset"), GetPackageStream("uexp"), GetPackageStream("ubulk"), ignoreErrors) : null;
public bool Exportable => HasExtension("uasset") && HasExtension("uexp");
public bool HasExtension(string extension) => Extensions.ContainsKey(extension);
public Stream GetPackageStream(string extension) =>
Extensions.TryGetValue(extension, out var ext) ?
ext.Reader.GetPackageStream(ext.Entry) :
null;
public UObject GetUObject() => Exports[0] as UObject;
public SKImage GetTexture()
{
return Exports[0] is Texture2D tex ? ImageExporter.GetImage(tex.textures[0].mips[0], tex.textures[0].pixel_format) : null;
}
}
}

View File

@ -1,5 +1,6 @@
using FModel.Methods.Utilities;
using System;
using System.Collections.Generic;
using System.IO;
namespace PakReader
@ -10,7 +11,7 @@ namespace PakReader
readonly BinaryReader Reader;
readonly byte[] Aes;
public readonly string MountPoint;
public readonly FPakEntry[] FileInfos;
public FPakEntry[] FileInfos;
public readonly string Name;
public PakReader(string file, byte[] aes = null, bool ParseFiles = true) : this(File.OpenRead(file), file, aes, ParseFiles) { }
@ -55,7 +56,7 @@ namespace PakReader
if (info.bEncryptedIndex != 0)
{
var InfoBlock = Reader.ReadBytes((int)info.IndexSize);
InfoBlock = AESDecryptor.DecryptAES(InfoBlock, (int)info.IndexSize, Aes, Aes.Length);
InfoBlock = AESDecryptor.DecryptAES(InfoBlock, Aes);
infoReader = new BinaryReader(new MemoryStream(InfoBlock));
int stringLen = infoReader.ReadInt32();
@ -90,8 +91,7 @@ namespace PakReader
// Pak index reading time :)
infoReader.BaseStream.Seek(0, SeekOrigin.Begin);
MountPoint = infoReader.ReadString(FPakInfo.MAX_PACKAGE_PATH);
MountPoint = infoReader.ReadFString(FPakInfo.MAX_PACKAGE_PATH);
bool badMountPoint = false;
if (!MountPoint.StartsWith("../../.."))
{
@ -111,17 +111,325 @@ namespace PakReader
DebugHelper.WriteLine($".PAKs: WARNING: Pak \"{Name}\" has strange mount point \"{MountPoint}\", mounting to root");
MountPoint = "/";
}
FileInfos = new FPakEntry[infoReader.ReadInt32()];
for (int i = 0; i < FileInfos.Length; i++)
if (info.Version >= (int)PAK_VERSION.PAK_PATH_HASH_INDEX)
{
FileInfos[i] = new FPakEntry(infoReader, MountPoint, info.Version);
ReadIndexUpdated(infoReader, MountPoint, info, Aes, Stream.Length);
}
else
{
FileInfos = new FPakEntry[infoReader.ReadInt32()];
for (int i = 0; i < FileInfos.Length; i++)
{
FileInfos[i] = new FPakEntry(infoReader, MountPoint, (PAK_VERSION)info.Version);
}
}
}
public string GetFile(int i) => FileInfos[i].Name;
void ReadIndexUpdated(BinaryReader reader, string mountPoint, FPakInfo info, byte[] key, long totalSize)
{
int NumEntries = reader.ReadInt32();
ulong PathHashSeed = reader.ReadUInt64();
public Stream GetPackageStream(BasePakEntry entry)
bool bReaderHasPathHashIndex = false;
long PathHashIndexOffset = -1; // INDEX_NONE
long PathHashIndexSize = 0;
FSHAHash PathHashIndexHash = default;
bReaderHasPathHashIndex = reader.ReadInt32() != 0;
if (bReaderHasPathHashIndex)
{
PathHashIndexOffset = reader.ReadInt64();
PathHashIndexSize = reader.ReadInt64();
PathHashIndexHash = new FSHAHash(reader);
bReaderHasPathHashIndex = bReaderHasPathHashIndex && PathHashIndexOffset != -1;
}
bool bReaderHasFullDirectoryIndex = false;
long FullDirectoryIndexOffset = -1; // INDEX_NONE
long FullDirectoryIndexSize = 0;
FSHAHash FullDirectoryIndexHash = default;
bReaderHasFullDirectoryIndex = reader.ReadInt32() != 0;
if (bReaderHasFullDirectoryIndex)
{
FullDirectoryIndexOffset = reader.ReadInt64();
FullDirectoryIndexSize = reader.ReadInt64();
FullDirectoryIndexHash = new FSHAHash(reader);
bReaderHasFullDirectoryIndex = bReaderHasFullDirectoryIndex && FullDirectoryIndexOffset != -1;
}
byte[] EncodedPakEntries = reader.ReadTArray(() => reader.ReadByte());
int FilesNum = reader.ReadInt32();
if (FilesNum < 0)
// Should not be possible for any values in the PrimaryIndex to be invalid, since we verified the index hash
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
FPakEntry[] Files = new FPakEntry[FilesNum]; // from what i can see, there aren't any???
if (FilesNum > 0)
for (int FileIndex = 0; FileIndex < FilesNum; ++FileIndex)
Files[FileIndex] = new FPakEntry(reader, mountPoint, (PAK_VERSION)info.Version);
// Decide which SecondaryIndex(es) to load
bool bWillUseFullDirectoryIndex;
bool bWillUsePathHashIndex;
bool bReadFullDirectoryIndex;
if (bReaderHasPathHashIndex && bReaderHasFullDirectoryIndex)
{
bWillUseFullDirectoryIndex = false;
bWillUsePathHashIndex = !bWillUseFullDirectoryIndex;
bool bWantToReadFullDirectoryIndex = false;
bReadFullDirectoryIndex = bReaderHasFullDirectoryIndex && bWantToReadFullDirectoryIndex;
}
else if (bReaderHasPathHashIndex)
{
bWillUsePathHashIndex = true;
bWillUseFullDirectoryIndex = false;
bReadFullDirectoryIndex = false;
}
else if (bReaderHasFullDirectoryIndex)
{
// We don't support creating the PathHash Index at runtime; we want to move to having only the PathHashIndex, so supporting not having it at all is not useful enough to write
bWillUsePathHashIndex = false;
bWillUseFullDirectoryIndex = true;
bReadFullDirectoryIndex = true;
}
else
// It should not be possible for PrimaryIndexes to be built without a PathHashIndex AND without a FullDirectoryIndex; CreatePakFile in UnrealPak.exe has a check statement for it.
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
// Load the Secondary Index(es)
byte[] PathHashIndexData;
Dictionary<ulong, int> PathHashIndex;
BinaryReader PathHashIndexReader = default;
if (bWillUsePathHashIndex)
{
if (PathHashIndexOffset < 0 || totalSize < (PathHashIndexOffset + PathHashIndexSize))
// Should not be possible for these values (which came from the PrimaryIndex) to be invalid, since we verified the index hash of the PrimaryIndex
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
Reader.BaseStream.Position = PathHashIndexOffset;
PathHashIndexData = Reader.ReadBytes((int)PathHashIndexSize);
{
if (!DecryptAndValidateIndex(info.bEncryptedIndex != 0, ref PathHashIndexData, key, PathHashIndexHash, out var ComputedHash))
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
}
PathHashIndexReader = new BinaryReader(new MemoryStream(PathHashIndexData));
PathHashIndex = ReadPathHashIndex(PathHashIndexReader);
}
var DirectoryIndex = new Dictionary<string, Dictionary<string, int>>();
if (!bReadFullDirectoryIndex)
{
DirectoryIndex = ReadDirectoryIndex(PathHashIndexReader);
}
if (DirectoryIndex.Count == 0)
{
if (totalSize < (FullDirectoryIndexOffset + FullDirectoryIndexSize) || FullDirectoryIndexOffset < 0)
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
Reader.BaseStream.Position = FullDirectoryIndexOffset;
byte[] FullDirectoryIndexData = Reader.ReadBytes((int)FullDirectoryIndexSize);
{
if (!DecryptAndValidateIndex(info.bEncryptedIndex != 0, ref FullDirectoryIndexData, key, FullDirectoryIndexHash, out var ComputedHash))
throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
}
var SecondaryIndexReader = new BinaryReader(new MemoryStream(FullDirectoryIndexData));
DirectoryIndex = ReadDirectoryIndex(SecondaryIndexReader);
}
var entries = new List<FPakEntry>(NumEntries);
foreach (var stringDict in DirectoryIndex)
{
foreach (var stringInt in stringDict.Value)
{
string path = stringDict.Key + stringInt.Key;
FPakEntry entry = GetEntry(mountPoint + path, stringInt.Value, EncodedPakEntries);
entries.Add(entry);
}
}
this.FileInfos = entries.ToArray();
}
FPakEntry GetEntry(string name, int pakLocation, byte[] encodedPakEntries)
{
if (pakLocation >= 0)
{
// Grab the big bitfield value:
// Bit 31 = Offset 32-bit safe?
// Bit 30 = Uncompressed size 32-bit safe?
// Bit 29 = Size 32-bit safe?
// Bits 28-23 = Compression method
// Bit 22 = Encrypted
// Bits 21-6 = Compression blocks count
// Bits 5-0 = Compression block size
// Filter out the CompressionMethod.
long Offset, UncompressedSize, Size;
uint CompressionMethodIndex, CompressionBlockSize;
bool Encrypted, Deleted;
uint Value = BitConverter.ToUInt32(encodedPakEntries, pakLocation);
pakLocation += sizeof(uint);
CompressionMethodIndex = ((Value >> 23) & 0x3f);
// Test for 32-bit safe values. Grab it, or memcpy the 64-bit value
// to avoid alignment exceptions on platforms requiring 64-bit alignment
// for 64-bit variables.
//
// Read the Offset.
bool bIsOffset32BitSafe = (Value & (1 << 31)) != 0;
if (bIsOffset32BitSafe)
{
Offset = BitConverter.ToUInt32(encodedPakEntries, pakLocation);
pakLocation += sizeof(uint);
}
else
{
Offset = BitConverter.ToInt64(encodedPakEntries, pakLocation);
pakLocation += sizeof(long);
}
// Read the UncompressedSize.
bool bIsUncompressedSize32BitSafe = (Value & (1 << 30)) != 0;
if (bIsUncompressedSize32BitSafe)
{
UncompressedSize = BitConverter.ToUInt32(encodedPakEntries, pakLocation);
pakLocation += sizeof(uint);
}
else
{
UncompressedSize = BitConverter.ToInt64(encodedPakEntries, pakLocation);
pakLocation += sizeof(long);
}
// Fill in the Size.
if (CompressionMethodIndex != 0)
{
// Size is only present if compression is applied.
bool bIsSize32BitSafe = (Value & (1 << 29)) != 0;
if (bIsSize32BitSafe)
{
Size = BitConverter.ToUInt32(encodedPakEntries, pakLocation);
pakLocation += sizeof(uint);
}
else
{
Size = BitConverter.ToInt64(encodedPakEntries, pakLocation);
pakLocation += sizeof(long);
}
}
else
{
// The Size is the same thing as the UncompressedSize when
// CompressionMethod == COMPRESS_None.
Size = UncompressedSize;
}
// Filter the encrypted flag.
Encrypted = (Value & (1 << 22)) != 0;
// This should clear out any excess CompressionBlocks that may be valid in the user's
// passed in entry.
var CompressionBlocksCount = (Value >> 6) & 0xffff;
FPakCompressedBlock[] CompressionBlocks = new FPakCompressedBlock[CompressionBlocksCount];
// Filter the compression block size or use the UncompressedSize if less that 64k.
CompressionBlockSize = 0;
if (CompressionBlocksCount > 0)
{
CompressionBlockSize = UncompressedSize < 65536 ? (uint)UncompressedSize : ((Value & 0x3f) << 11);
}
// Set bDeleteRecord to false, because it obviously isn't deleted if we are here.
Deleted = false;
// Base offset to the compressed data
long BaseOffset = true ? 0 : Offset; // HasRelativeCompressedChunkOffsets -> Version >= PakFile_Version_RelativeChunkOffsets
// Handle building of the CompressionBlocks array.
if (CompressionBlocks.Length == 1 && !Encrypted)
{
// If the number of CompressionBlocks is 1, we didn't store any extra information.
// Derive what we can from the entry's file offset and size.
var start = BaseOffset + FPakEntry.GetSize(PAK_VERSION.PAK_LATEST, CompressionMethodIndex, CompressionBlocksCount);
CompressionBlocks[0] = new FPakCompressedBlock(start, start + Size);
}
else if (CompressionBlocks.Length > 0)
{
// Get the right pointer to start copying the CompressionBlocks information from.
// Alignment of the compressed blocks
var CompressedBlockAlignment = Encrypted ? AESDecryptor.BLOCK_SIZE : 1;
// CompressedBlockOffset is the starting offset. Everything else can be derived from there.
long CompressedBlockOffset = BaseOffset + FPakEntry.GetSize(PAK_VERSION.PAK_LATEST, CompressionMethodIndex, CompressionBlocksCount);
for (int CompressionBlockIndex = 0; CompressionBlockIndex < CompressionBlocks.Length; ++CompressionBlockIndex)
{
CompressionBlocks[CompressionBlockIndex] = new FPakCompressedBlock(CompressedBlockOffset, CompressedBlockOffset + BitConverter.ToUInt32(encodedPakEntries, pakLocation));
pakLocation += sizeof(uint);
{
var toAlign = CompressionBlocks[CompressionBlockIndex].CompressedEnd - CompressionBlocks[CompressionBlockIndex].CompressedStart;
CompressedBlockOffset += toAlign + CompressedBlockAlignment - (toAlign % CompressedBlockAlignment);
}
}
}
return new FPakEntry(name, Offset, Size, UncompressedSize, new byte[20], CompressionBlocks, CompressionBlockSize, CompressionMethodIndex, (byte)((Encrypted ? 0x01 : 0x00) | (Deleted ? 0x02 : 0x00)));
}
else
{
pakLocation = -(pakLocation + 1);
throw new FileLoadException("list indexes aren't supported");
}
}
Dictionary<ulong, int> ReadPathHashIndex(BinaryReader reader)
{
var ret = new Dictionary<ulong, int>();
var keys = reader.ReadTArray(() => (reader.ReadUInt64(), reader.ReadInt32()));
foreach (var (k, v) in keys)
{
ret[k] = v;
}
return ret;
}
Dictionary<string, Dictionary<string, int>> ReadDirectoryIndex(BinaryReader reader)
{
var ret = new Dictionary<string, Dictionary<string, int>>();
var keys = reader.ReadTArray(() => (reader.ReadFString(), ReadFPakDirectory(reader)));
foreach (var (k, v) in keys)
{
ret[k] = v;
}
return ret;
}
Dictionary<string, int> ReadFPakDirectory(BinaryReader reader)
{
var ret = new Dictionary<string, int>();
var keys = reader.ReadTArray(() => (reader.ReadFString(), reader.ReadInt32()));
foreach (var (k, v) in keys)
{
ret[k] = v;
}
return ret;
}
bool DecryptAndValidateIndex(bool bEncryptedIndex, ref byte[] IndexData, byte[] aesKey, FSHAHash ExpectedHash, out FSHAHash OutHash)
{
if (bEncryptedIndex)
{
IndexData = AESDecryptor.DecryptAES(IndexData, aesKey);
}
OutHash = ExpectedHash;
return true;
}
public Stream GetPackageStream(FPakEntry entry)
{
lock (Reader)
{
@ -129,12 +437,12 @@ namespace PakReader
}
}
public void Export(BasePakEntry uasset, BasePakEntry uexp, BasePakEntry ubulk)
public void Export(FPakEntry uasset, FPakEntry uexp, FPakEntry ubulk)
{
if (uasset == null || uexp == null) return;
if (uasset.GetType() != typeof(FPakEntry) || uexp.GetType() != typeof(FPakEntry)) return;
var assetStream = new FPakFile(Reader, uasset, Aes).GetStream();
var expStream = new FPakFile(Reader, uexp, Aes).GetStream();
var bulkStream = ubulk == null ? null : new FPakFile(Reader, ubulk, Aes).GetStream();
var bulkStream = ubulk.GetType() != typeof(FPakEntry) ? null : new FPakFile(Reader, ubulk, Aes).GetStream();
try
{