Skip to content

Commit

Permalink
Merge pull request atenfyr#93 from atenfyr/LoadUEXP
Browse files Browse the repository at this point in the history
Adds the option to forego loading the uexp when constructing a UAsset.
  • Loading branch information
atenfyr authored Oct 26, 2024
2 parents 4888246 + 73d00d2 commit 43aecfe
Show file tree
Hide file tree
Showing 3 changed files with 128 additions and 82 deletions.
7 changes: 7 additions & 0 deletions UAssetAPI/AssetBinaryReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -253,11 +253,18 @@ public List<CustomVersion> ReadCustomVersionContainer(ECustomVersionSerializatio
public class AssetBinaryReader : UnrealBinaryReader
{
public UnrealPackage Asset;
public bool LoadUexp = true;

public AssetBinaryReader(Stream stream, UnrealPackage asset = null) : base(stream)
{
Asset = asset;
}

public AssetBinaryReader(Stream stream, bool inLoadUexp, UnrealPackage asset = null) : base(stream)
{
Asset = asset;
LoadUexp = inLoadUexp;
}

public virtual Guid? ReadPropertyGuid()
{
Expand Down
124 changes: 81 additions & 43 deletions UAssetAPI/UAsset.cs
Original file line number Diff line number Diff line change
Expand Up @@ -981,7 +981,7 @@ public override void Read(AssetBinaryReader reader, int[] manualSkips = null, in
}

BulkData = [];
if (BulkDataStartOffset > 0)
if (BulkDataStartOffset > 0 && reader.LoadUexp)
{
reader.BaseStream.Seek(BulkDataStartOffset, SeekOrigin.Begin);
BulkData = reader.ReadBytes((int)(reader.BaseStream.Length - BulkDataStartOffset));
Expand Down Expand Up @@ -1045,70 +1045,88 @@ public override void Read(AssetBinaryReader reader, int[] manualSkips = null, in
}
}

// load dependencies, if needed and available
Dictionary<int, IList<int>> depsMap = new Dictionary<int, IList<int>>();
for (int i = 0; i < Exports.Count; i++)
if (reader.LoadUexp)
{
Export newExport = Exports[i];
List<FPackageIndex> deps = new List<FPackageIndex>();
deps.AddRange(newExport.SerializationBeforeSerializationDependencies);
deps.AddRange(newExport.SerializationBeforeCreateDependencies);
//deps.Add(newExport.ClassIndex);
//deps.Add(newExport.SuperIndex);

depsMap[i + 1] = new List<int>();
foreach (FPackageIndex dep in deps)
// load dependencies, if needed and available
Dictionary<int, IList<int>> depsMap = new Dictionary<int, IList<int>>();
for (int i = 0; i < Exports.Count; i++)
{
if (dep.IsImport())
Export newExport = Exports[i];
List<FPackageIndex> deps = new List<FPackageIndex>();
deps.AddRange(newExport.SerializationBeforeSerializationDependencies);
deps.AddRange(newExport.SerializationBeforeCreateDependencies);
//deps.Add(newExport.ClassIndex);
//deps.Add(newExport.SuperIndex);

depsMap[i + 1] = new List<int>();
foreach (FPackageIndex dep in deps)
{
Import imp = dep.ToImport(this);
if (imp.OuterIndex.IsImport())
if (dep.IsImport())
{
var sourcePath = imp.OuterIndex.ToImport(this).ObjectName;
this.PullSchemasFromAnotherAsset(sourcePath, imp.ObjectName);
Import imp = dep.ToImport(this);
if (imp.OuterIndex.IsImport())
{
var sourcePath = imp.OuterIndex.ToImport(this).ObjectName;
this.PullSchemasFromAnotherAsset(sourcePath, imp.ObjectName);
}
}
}

if (dep.IsExport())
{
depsMap[i + 1].Add(dep.Index);
if (dep.IsExport())
{
depsMap[i + 1].Add(dep.Index);
}
}
}
}
exportLoadOrder.AddRange(Enumerable.Range(1, Exports.Count).SortByDependencies(depsMap));
exportLoadOrder.AddRange(Enumerable.Range(1, Exports.Count).SortByDependencies(depsMap));

// Export data
if (SectionSixOffset > 0 && Exports.Count > 0)
{
foreach (int exportIdx in exportLoadOrder)
// Export data
if (SectionSixOffset > 0 && Exports.Count > 0)
{
int i = exportIdx - 1;

reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
if (manualSkips != null && manualSkips.Contains(i) && (forceReads == null || !forceReads.Contains(i)))
foreach (int exportIdx in exportLoadOrder)
{
Exports[i] = Exports[i].ConvertToChildExport<RawExport>();
((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
continue;
int i = exportIdx - 1;

reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
if (manualSkips != null && manualSkips.Contains(i) && (forceReads == null || !forceReads.Contains(i)))
{
Exports[i] = Exports[i].ConvertToChildExport<RawExport>();
((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
continue;
}

ConvertExportToChildExportAndRead(reader, i);
}

ConvertExportToChildExportAndRead(reader, i);
}
// catch any stragglers
for (int i = 0; i < Exports.Count; i++)
{
if (Exports[i].alreadySerialized) continue;

reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
if (manualSkips != null && manualSkips.Contains(i) && (forceReads == null || !forceReads.Contains(i)))
{
Exports[i] = Exports[i].ConvertToChildExport<RawExport>();
((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
continue;
}

// catch any stragglers
ConvertExportToChildExportAndRead(reader, i);
}
}
}
else
{
// skip loading dependencies & parsing export data if we don't load uexp
// convert all exports as appropriate, but do no further reading
for (int i = 0; i < Exports.Count; i++)
{
if (Exports[i].alreadySerialized) continue;

reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
if (manualSkips != null && manualSkips.Contains(i) && (forceReads == null || !forceReads.Contains(i)))
{
Exports[i] = Exports[i].ConvertToChildExport<RawExport>();
((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
continue;
}

ConvertExportToChildExportAndRead(reader, i);
ConvertExportToChildExportAndRead(reader, i, false);
}
}

Expand Down Expand Up @@ -1985,6 +2003,26 @@ public UAsset(string path, EngineVersion engineVersion = EngineVersion.UNKNOWN,

Read(PathToReader(path));
}

/// <summary>
/// Reads an asset from disk and initializes a new instance of the <see cref="UAsset"/> class to store its data in memory.
/// </summary>
/// <param name="path">The path of the asset file on disk that this instance will read from.</param>
/// <param name="loadUexp">Whether to load the .uexp file. False only reads the .uasset file.</param>
/// <param name="engineVersion">The version of the Unreal Engine that will be used to parse this asset. If the asset is versioned, this can be left unspecified.</param>
/// <param name="mappings">A valid set of mappings for the game that this asset is from. Not required unless unversioned properties are used.</param>
/// <param name="customSerializationFlags">A set of custom serialization flags, which can be used to override certain optional behavior in how UAssetAPI serializes assets.</param>
/// <exception cref="UnknownEngineVersionException">Thrown when this is an unversioned asset and <see cref="ObjectVersion"/> is unspecified.</exception>
/// <exception cref="FormatException">Throw when the asset cannot be parsed correctly.</exception>
public UAsset(string path, bool loadUexp, EngineVersion engineVersion = EngineVersion.UNKNOWN, Usmap mappings = null, CustomSerializationFlags customSerializationFlags = CustomSerializationFlags.None)
{
this.FilePath = path;
this.Mappings = mappings;
this.CustomSerializationFlags = customSerializationFlags;
SetEngineVersion(engineVersion);

Read(PathToReader(path, loadUexp));
}

/// <summary>
/// Reads an asset from a BinaryReader and initializes a new instance of the <see cref="UAsset"/> class to store its data in memory.
Expand Down
79 changes: 40 additions & 39 deletions UAssetAPI/UnrealPackage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -315,48 +315,54 @@ public virtual bool CanCreateDummies()
if (isSerializationTime) return true;
return (CustomSerializationFlags & CustomSerializationFlags.NoDummies) == 0;
}

/// <summary>
/// Creates a MemoryStream from an asset path.
/// </summary>
/// <param name="p">The path to the input file.</param>
/// <param name="loadUEXP">Whether to load the UEXP file. False only reads the UASSET.</param>
/// <returns>A new MemoryStream that stores the binary data of the input file.</returns>
public MemoryStream PathToStream(string p)
public MemoryStream PathToStream(string p, bool loadUEXP = true)
{
using (FileStream origStream = File.Open(p, FileMode.Open, new FileInfo(p).IsReadOnly ? FileAccess.Read : FileAccess.ReadWrite))
using (FileStream origStream = File.Open(p, FileMode.Open, FileAccess.Read))
{
MemoryStream completeStream = new MemoryStream();
origStream.CopyTo(completeStream);

UseSeparateBulkDataFiles = false;
try
if (loadUEXP)
{
var targetFile = Path.ChangeExtension(p, "uexp");
if (File.Exists(targetFile))
UseSeparateBulkDataFiles = false;
try
{
using (FileStream newStream = File.Open(targetFile, FileMode.Open))
var targetFile = Path.ChangeExtension(p, "uexp");
if (File.Exists(targetFile))
{
completeStream.Seek(0, SeekOrigin.End);
newStream.CopyTo(completeStream);
UseSeparateBulkDataFiles = true;
using (FileStream newStream = File.Open(targetFile, FileMode.Open))
{
completeStream.Seek(0, SeekOrigin.End);
newStream.CopyTo(completeStream);
UseSeparateBulkDataFiles = true;
}
}
}
catch (FileNotFoundException) { }
}
catch (FileNotFoundException) { }


completeStream.Seek(0, SeekOrigin.Begin);
return completeStream;
}
}

/// <summary>
/// Creates a BinaryReader from an asset path.
/// </summary>
/// <param name="p">The path to the input file.</param>
/// <param name="loadUEXP">Whether to load the .uexp file. False only reads the .uasset file.</param>
/// <returns>A new BinaryReader that stores the binary data of the input file.</returns>
public AssetBinaryReader PathToReader(string p)
public AssetBinaryReader PathToReader(string p, bool loadUEXP = true)
{
return new AssetBinaryReader(PathToStream(p), this);
return new AssetBinaryReader(PathToStream(p, loadUEXP), loadUEXP, this);
}

/// <summary>
Expand Down Expand Up @@ -716,7 +722,7 @@ internal string InternalAssetPath
_internalAssetPath = value;
}
}
protected void ConvertExportToChildExportAndRead(AssetBinaryReader reader, int i)
protected void ConvertExportToChildExportAndRead(AssetBinaryReader reader, int i, bool read = true)
{
#pragma warning disable CS0168 // Variable is declared but never used
try
Expand All @@ -739,57 +745,49 @@ protected void ConvertExportToChildExportAndRead(AssetBinaryReader reader, int i
{
case "Level":
Exports[i] = Exports[i].ConvertToChildExport<LevelExport>();
Exports[i].Read(reader, (int)nextStarting);
break;
case "Enum":
case "UserDefinedEnum":
Exports[i] = Exports[i].ConvertToChildExport<EnumExport>();
Exports[i].Read(reader, (int)nextStarting);
break;
case "Function":
Exports[i] = Exports[i].ConvertToChildExport<FunctionExport>();
Exports[i].Read(reader, (int)nextStarting);
break;
case "UserDefinedStruct":
Exports[i] = Exports[i].ConvertToChildExport<UserDefinedStructExport>();
Exports[i].Read(reader, (int)nextStarting);
break;
default:
if (exportClassType.EndsWith("DataTable"))
{
Exports[i] = Exports[i].ConvertToChildExport<DataTableExport>();
Exports[i].Read(reader, (int)nextStarting);
}
else if (exportClassType.EndsWith("StringTable"))
{
Exports[i] = Exports[i].ConvertToChildExport<StringTableExport>();
Exports[i].Read(reader, (int)nextStarting);
}
else if (exportClassType.EndsWith("BlueprintGeneratedClass"))
{
Exports[i] = Exports[i].ConvertToChildExport<ClassExport>();
Exports[i].Read(reader, (int)nextStarting);
}
else if (exportClassType == "ScriptStruct")
{
Exports[i] = Exports[i].ConvertToChildExport<StructExport>();
Exports[i].Read(reader, (int)nextStarting);
}
else if (MainSerializer.PropertyTypeRegistry.ContainsKey(exportClassType) || MainSerializer.AdditionalPropertyRegistry.Contains(exportClassType))
{
Exports[i] = Exports[i].ConvertToChildExport<PropertyExport>();
Exports[i].Read(reader, (int)nextStarting);
}
else
{
Exports[i] = Exports[i].ConvertToChildExport<NormalExport>();
Exports[i].Read(reader, (int)nextStarting);
}
break;
}

if (read) Exports[i].Read(reader, (int)nextStarting);

// if we got a StructExport, let's modify mappings/MapStructTypeOverride if we can
if (Exports[i] is StructExport fetchedStructExp && Exports[i] is not FunctionExport)
if (read && Exports[i] is StructExport fetchedStructExp && Exports[i] is not FunctionExport)
{
// check to see if we can add some new map type overrides
if (fetchedStructExp.LoadedProperties != null)
Expand Down Expand Up @@ -826,7 +824,7 @@ protected void ConvertExportToChildExportAndRead(AssetBinaryReader reader, int i
}

// if we got an enum, let's add to mappings enum map if we can
if (Exports[i] is EnumExport fetchedEnumExp)
if (read && Exports[i] is EnumExport fetchedEnumExp)
{
string enumName = fetchedEnumExp.ObjectName?.ToString();
if (Mappings?.EnumMap != null && enumName != null)
Expand All @@ -840,26 +838,29 @@ protected void ConvertExportToChildExportAndRead(AssetBinaryReader reader, int i
}
}

long extrasLen = nextStarting - reader.BaseStream.Position;
if (extrasLen < 0)
if (read)
{
throw new FormatException("Invalid padding at end of export " + (i + 1) + ": " + extrasLen + " bytes");
}
else
{
Exports[i].Extras = reader.ReadBytes((int)extrasLen);
}
long extrasLen = nextStarting - reader.BaseStream.Position;
if (extrasLen < 0)
{
throw new FormatException("Invalid padding at end of export " + (i + 1) + ": " + extrasLen + " bytes");
}
else
{
Exports[i].Extras = reader.ReadBytes((int)extrasLen);
}

Exports[i].alreadySerialized = true;
Exports[i].alreadySerialized = true;
}
}
catch (Exception ex)
{
#if DEBUGVERBOSE
Console.WriteLine("\nFailed to parse export " + (i + 1) + ": " + ex.ToString());
#endif
reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
if (read) reader.BaseStream.Seek(Exports[i].SerialOffset, SeekOrigin.Begin);
Exports[i] = Exports[i].ConvertToChildExport<RawExport>();
((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
if (read) ((RawExport)Exports[i]).Data = reader.ReadBytes((int)Exports[i].SerialSize);
}
#pragma warning restore CS0168 // Variable is declared but never used
}
Expand Down

0 comments on commit 43aecfe

Please sign in to comment.