Skip to content

Commit

Permalink
raw zip reads
Browse files Browse the repository at this point in the history
  • Loading branch information
diogotr7 committed Jun 21, 2024
1 parent b1eb3fe commit 0be383e
Show file tree
Hide file tree
Showing 15 changed files with 513 additions and 77 deletions.
4 changes: 2 additions & 2 deletions StarBreaker.Cli/ExtractP4kCommand.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@ public class ExtractP4kCommand : ICommand

public ValueTask ExecuteAsync(IConsole console)
{
using var p4k = new P4kFile(P4kFile);
var p4k = new DirectP4kReader(P4kFile);

console.Output.WriteLine("DataForge loaded.");
console.Output.WriteLine("Exporting...");

var sw = Stopwatch.StartNew();
p4k.Extract(OutputDirectory, RegexPattern, new ProgressBar(console));
p4k.Extract(OutputDirectory, new ProgressBar(console));
sw.Stop();

console.Output.WriteLine();
Expand Down
6 changes: 1 addition & 5 deletions StarBreaker.Cli/Properties/launchSettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
"commandName": "Project",
"commandLineArgs": "df-extract-single --dcb \"D:\\extract\\Game.dcb\" --output \"D:\\extract\\stbr\""
},
"extract-p4k": {
"commandName": "Project",
"commandLineArgs": "extract-p4k --p4k \"C:\\Scratch\\extract\\Data.p4k\" --output \"D:\\extract\\stbr\\p4k\" --filter \\.(cgf|cga|chr|skin|anim|soc|caf|dba)$"
},
"extract-live": {
"extract-p4k-live": {
"commandName": "Project",
"commandLineArgs": "extract-p4k --p4k \"C:\\Program Files\\Roberts Space Industries\\StarCitizen\\LIVE\\Data.p4k\" --output \"D:\\out\""
}
Expand Down
19 changes: 19 additions & 0 deletions StarBreaker.Forge/ArrayReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,23 @@ protected override void Dispose(bool disposing) { }
public override MemoryHandle Pin(int elementIndex = 0) => throw new NotSupportedException();
public override void Unpin() => throw new NotSupportedException();
}
}

public ref struct SpanReader
{
private readonly ReadOnlySpan<byte> _span;
private int _position;

public SpanReader(ReadOnlySpan<byte> span, int offset)
{
_span = span;
_position = offset;
}

public T Read<T>() where T : unmanaged
{
var value = MemoryMarshal.Read<T>(_span[_position..]);
_position += Unsafe.SizeOf<T>();
return value;
}
}
65 changes: 24 additions & 41 deletions StarBreaker.Forge/DataForge.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,6 @@ public DataForge(byte[] allBytes, string outputFolder)
_outputFolder = outputFolder;
_database = new Database(allBytes, out var bytesRead);
_offsets = ReadOffsets(bytesRead);

// Dictionary<CigGuid, int> dd = new();
// for (var index = 0; index < _database.RecordDefinitions.Span.Length; index++)
// {
// var record = _database.RecordDefinitions.Span[index];
// dd.Add(record.Hash, index);
// }
//
// var records = _database.RecordDefinitions.Span;
//
// foreach (var reference in _database.ReferenceValues.Span)
// {
// if (reference.Item1 == 0xFFFFFFFF || reference.Value == CigGuid.Empty) continue;
//
// var record = records[dd[reference.Value]];
// var offset = _offsets[record.StructIndex][reference.Item1];
// Console.WriteLine($"{offset} | {record} ");
// }
}

public void Extract(Regex? fileNameFilter = null, IProgress<double>? progress = null)
Expand All @@ -59,9 +41,7 @@ public void Extract(Regex? fileNameFilter = null, IProgress<double>? progress =

var total = structsPerFileName.Count;

Parallel.ForEach(structsPerFileName,
//new ParallelOptions { MaxDegreeOfParallelism = 12 },
data =>
Parallel.ForEach(structsPerFileName, data =>
{
var structs = _database.StructDefinitions.Span;
var properties = _database.PropertyDefinitions.Span;
Expand All @@ -76,18 +56,18 @@ public void Extract(Regex? fileNameFilter = null, IProgress<double>? progress =
var structDef = structs[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = new ArrayReader(_database.Bytes, offset);
var reader = new SpanReader(_database.Bytes, offset);
var node = new XmlNode(_database.GetString(structDef.NameOffset));
FillNode(node, structDef, reader, structs, properties);
FillNode(node, structDef, ref reader, structs, properties);
node.WriteTo(writer, 0);
}
else
{
using var writer = new StreamWriter(filePath);
writer.Write('<');
writer.Write("__root");
writer.Write('>');
Expand All @@ -98,11 +78,11 @@ public void Extract(Regex? fileNameFilter = null, IProgress<double>? progress =
var structDef = structs[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = new ArrayReader(_database.Bytes, offset);
var reader = new SpanReader(_database.Bytes, offset);
var node = new XmlNode(_database.GetString(structDef.NameOffset));
FillNode(node, structDef, reader, structs, properties);
FillNode(node, structDef, ref reader, structs, properties);
node.WriteTo(writer, 1);
}
Expand All @@ -113,10 +93,13 @@ public void Extract(Regex? fileNameFilter = null, IProgress<double>? progress =
writer.Write('>');
}
var currentProgress = Interlocked.Increment(ref _progress);
//only report progress every 250 records and when we are done
if (currentProgress == total || currentProgress % 250 == 0)
progress?.Report(currentProgress / (double)total);
lock (structsPerFileName)
{
var currentProgress = Interlocked.Increment(ref _progress);
//only report progress every 250 records and when we are done
if (currentProgress == total || currentProgress % 250 == 0)
progress?.Report(currentProgress / (double)total);
}
});
}

Expand All @@ -142,10 +125,10 @@ public void ExtractSingle(Regex? fileNameFilter = null, IProgress<double>? progr

var structDef = structs[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = new ArrayReader(_database.Bytes, offset);
var reader = new SpanReader(_database.Bytes, offset);
var child = new XmlNode(_database.GetString(structDef.NameOffset));

FillNode(child, structDef, reader, structs, properties);
FillNode(child, structDef, ref reader, structs, properties);

child.WriteTo(writer, 1);

Expand All @@ -157,9 +140,9 @@ public void ExtractSingle(Regex? fileNameFilter = null, IProgress<double>? progr
writer.WriteLine("</__root>");
}

private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ArrayReader reader, ReadOnlySpan<DataForgeStructDefinition> structs, ReadOnlySpan<DataForgePropertyDefinition> properties)
private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ref SpanReader reader, ReadOnlySpan<DataForgeStructDefinition> structs, ReadOnlySpan<DataForgePropertyDefinition> properties)
{
foreach (var prop in structDef.EnumerateProperties(structs, properties))
foreach (ref readonly var prop in structDef.EnumerateProperties(structs, properties).AsSpan())
{
if (prop.ConversionType == ConversionType.Attribute)
{
Expand All @@ -172,7 +155,7 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ArrayRe

node.AppendChild(childClass);

FillNode(childClass, structDef3, reader, structs, properties);
FillNode(childClass, structDef3, ref reader, structs, properties);
}
else if (prop.DataType is DataType.StrongPointer /* or DataType.WeakPointer*/)
{
Expand All @@ -183,13 +166,13 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ArrayRe
var structDef2 = structs[(int)ptr.StructIndex];
var offset2 = _offsets[(int)ptr.StructIndex][(int)ptr.InstanceIndex];

var reader2 = new ArrayReader(_database.Bytes, offset2);
var reader2 = new SpanReader(_database.Bytes, offset2);

var child = new XmlNode(_database.GetString(prop.NameOffset));

node.AppendChild(child);

FillNode(child, structDef2, reader2, structs, properties);
FillNode(child, structDef2, ref reader2, structs, properties);
}
else
{
Expand Down Expand Up @@ -268,13 +251,13 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ArrayRe
{
var structDef1 = structs[prop.StructIndex];
var offset1 = _offsets[prop.StructIndex][index];
var reader1 = new ArrayReader(_database.Bytes, offset1);
var reader1 = new SpanReader(_database.Bytes, offset1);

var child = new XmlNode(_database.GetString(structDef1.NameOffset));

arrayNode.AppendChild(child);

FillNode(child, structDef1, reader1, structs, properties);
FillNode(child, structDef1, ref reader1, structs, properties);
}
else if (prop.DataType is DataType.StrongPointer /*or DataType.WeakPointer*/)
{
Expand All @@ -289,13 +272,13 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ArrayRe

var structDef2 = structs[(int)reference.StructIndex];
var offset2 = _offsets[(int)reference.StructIndex][(int)reference.InstanceIndex];
var reader2 = new ArrayReader(_database.Bytes, offset2);
var reader2 = new SpanReader(_database.Bytes, offset2);

var child = new XmlNode(_database.GetString(prop.NameOffset));

arrayNode.AppendChild(child);

FillNode(child, structDef2, reader2, structs, properties);
FillNode(child, structDef2, ref reader2, structs, properties);
}
else
{
Expand Down
32 changes: 12 additions & 20 deletions StarBreaker.Forge/DataForgeStructDefinition.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using System.Collections.Concurrent;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;

Expand All @@ -6,6 +7,7 @@ namespace StarBreaker.Forge;
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public readonly record struct DataForgeStructDefinition
{
private static readonly ConcurrentDictionary<DataForgeStructDefinition, DataForgePropertyDefinition[]> _propertiesCache = new();
public readonly DataForgeStringId NameOffset;
public readonly uint ParentTypeIndex;
public readonly ushort AttributeCount;
Expand Down Expand Up @@ -55,11 +57,16 @@ public int CalculateSize(ReadOnlySpan<DataForgeStructDefinition> structs, ReadOn
return size;
}

public List<DataForgePropertyDefinition> EnumerateProperties(
public DataForgePropertyDefinition[] EnumerateProperties(
ReadOnlySpan<DataForgeStructDefinition> structs,
ReadOnlySpan<DataForgePropertyDefinition> properties
)
{
if (_propertiesCache.TryGetValue(this, out var cachedProperties))
{
return cachedProperties;
}

var _properties = new List<DataForgePropertyDefinition>();
_properties.AddRange(properties.Slice(FirstAttributeIndex, AttributeCount));

Expand All @@ -70,24 +77,9 @@ ReadOnlySpan<DataForgePropertyDefinition> properties
_properties.InsertRange(0, properties.Slice(baseStruct.FirstAttributeIndex, baseStruct.AttributeCount));
}

return _properties;
}

#if DEBUG
public string PropsAsString => string.Join("\n", Properties);
public List<DataForgePropertyDefinition> Properties
{
get
{
var _properties = new List<DataForgePropertyDefinition>();

foreach (var prop in EnumerateProperties2(DebugGlobal.Database))
{
_properties.Add(prop);
}

return _properties;
}
var arr = _properties.ToArray();
_propertiesCache.TryAdd(this, arr);

return arr;
}
#endif
}
13 changes: 4 additions & 9 deletions StarBreaker.Forge/XmlNode.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,20 @@ public sealed class XmlNode
public readonly string _name;
public readonly List<XmlNode> _children;
public readonly List<XmlAttribute> _attributes;
public XmlNode? _parent;

public XmlNode(string name)
{
_name = name;
_children = [];
_attributes = [];
_children = new List<XmlNode>();
_attributes = new List<XmlAttribute>();
}

[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void AppendChild(XmlNode child)
{
child._parent = this;
_children.Add(child);
}
public void AppendChild(XmlNode child) => _children.Add(child);

[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void AppendAttribute(XmlAttribute xmlAttribute) => _attributes.Add(xmlAttribute);

[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void WriteTo(TextWriter writer, int depth)
{
Expand Down
Loading

0 comments on commit 0be383e

Please sign in to comment.