Merscom DPK format now reads and writes DPK archives almost perfectly - although not byte-identical due to sorting differences

This commit is contained in:
Michael Becker 2022-04-13 00:40:27 -04:00
parent e602bdb69b
commit 9269f37861
No known key found for this signature in database
GPG Key ID: DA394832305DA332
2 changed files with 106 additions and 6 deletions

View File

@ -20,6 +20,8 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
using System;
using MBS.Framework;
using UniversalEditor.IO;
using UniversalEditor.ObjectModels.FileSystem;
@ -48,6 +50,8 @@ namespace UniversalEditor.Plugins.Merscom.DataFormats.FileSystem.DPK
if (fsom == null)
throw new ObjectModelNotSupportedException();
fsom.AdditionalDetails.Add("compressedSize", "Compressed size");
Reader br = base.Accessor.Reader;
string DPK4 = br.ReadFixedLengthString(4);
@ -59,7 +63,7 @@ namespace UniversalEditor.Plugins.Merscom.DataFormats.FileSystem.DPK
uint u3 = br.ReadUInt32();
for (int i = 0; i < u3; i++)
{
uint u4 = br.ReadUInt32();
uint recordSize = br.ReadUInt32(); // size of file TOC record
uint decompressedLength = br.ReadUInt32();
uint compressedLength = br.ReadUInt32(); // 371 39325
uint offset = br.ReadUInt32(); // 292196 292567
@ -71,6 +75,8 @@ namespace UniversalEditor.Plugins.Merscom.DataFormats.FileSystem.DPK
file.Properties["compressedLength"] = compressedLength;
file.Properties["offset"] = offset;
file.Properties["reader"] = Accessor.Reader;
file.SetAdditionalDetail("compressedSize", compressedLength);
file.Size = decompressedLength;
@ -78,22 +84,112 @@ namespace UniversalEditor.Plugins.Merscom.DataFormats.FileSystem.DPK
}
}
private Guid KnownSettingsGuids_EnableLogging = new Guid("{39e459e8-e8be-4836-8274-fd1c26d498cf}");
private Compression.CompressionModule zlib { get; } = Compression.CompressionModule.FromKnownCompressionMethod(Compression.CompressionMethod.Zlib);
void File_DataRequest(object sender, DataRequestEventArgs e)
{
File file = (File)sender;
uint compressedLength = (uint)file.Properties["compressedLength"];
uint offset = (uint)file.Properties["offset"];
Reader reader = (Reader)file.Properties["reader"];
Accessor.Reader.Seek(offset, SeekOrigin.Begin);
byte[] compressedData = Accessor.Reader.ReadBytes(compressedLength);
byte[] decompressedData = Compression.CompressionModule.FromKnownCompressionMethod(Compression.CompressionMethod.Zlib).Decompress(compressedData);
e.Data = decompressedData;
if (Application.Instance.GetSetting<bool>(KnownSettingsGuids_EnableLogging))
{
Console.WriteLine(String.Format("ue: merscom: dpk: unpacking '{0}' (at {1}, compressed size {2}, decompressed size {3})", file.Name, offset, compressedLength, file.Size));
}
reader.Seek(offset, SeekOrigin.Begin);
byte[] compressedData = reader.ReadBytes(compressedLength);
if (compressedLength == file.Size)
{
// file is not compressed!
e.Data = compressedData;
}
else
{
byte[] decompressedData = zlib.Decompress(compressedData);
e.Data = decompressedData;
}
}
protected override void SaveInternal(ObjectModel objectModel)
{
throw new NotImplementedException();
FileSystemObjectModel fsom = (objectModel as FileSystemObjectModel);
if (fsom == null)
throw new ObjectModelNotSupportedException();
Writer bw = base.Accessor.Writer;
bw.WriteFixedLengthString("DPK4");
File[] files = fsom.GetAllFiles();
uint archiveLength = (uint)(16 + (16 * files.Length));
uint tocLength = (uint)(16 * files.Length);
for (int i = 0; i < files.Length; i++)
{
tocLength += (uint) (files[i].Name.Length + 1).Align(4);
}
long archiveLengthOffset = bw.Accessor.Position;
bw.WriteUInt32(archiveLength);
bw.WriteUInt32(tocLength);
bw.WriteUInt32((uint)files.Length);
uint[] offsets = new uint[files.Length];
uint[] compressedLengths = new uint[files.Length];
long tocOffset = bw.Accessor.Position;
bw.Accessor.Seek(tocLength, SeekOrigin.Current); // skip over TOC for now, will come back and write it later
uint offset = (uint)(tocOffset + tocLength);
offsets[0] = offset;
for (int i = 0; i < files.Length; i++)
{
byte[] decompressedData = files[i].GetData();
byte[] compressedData = zlib.Compress(decompressedData);
if (compressedData.Length >= decompressedData.Length)
{
// no point in using the zlib
bw.WriteBytes(decompressedData);
offset += (uint)decompressedData.Length;
compressedLengths[i] = (uint)decompressedData.Length;
}
else
{
bw.WriteBytes(compressedData);
offset += (uint)compressedData.Length;
compressedLengths[i] = (uint)compressedData.Length;
}
if (i < files.Length - 1)
{
offsets[i + 1] = offset;
}
}
// now we can write the TOC
bw.Accessor.Seek(tocOffset, SeekOrigin.Begin);
for (int i = 0; i < files.Length; i++)
{
uint recordSize = (uint)(12 + (files[i].Name.Length + 1).Align(4));
bw.WriteUInt32(recordSize);
uint decompressedLength = (uint)files[i].Size;
bw.WriteUInt32(decompressedLength);
bw.WriteUInt32(compressedLengths[i]);
bw.WriteUInt32(offsets[i]);
bw.WriteNullTerminatedString(files[i].Name);
bw.Align(4);
}
bw.Accessor.Seek(archiveLengthOffset, SeekOrigin.Begin);
bw.WriteUInt32((uint)bw.Accessor.Length);
}
}
}

View File

@ -61,6 +61,10 @@
<Project>{2D4737E6-6D95-408A-90DB-8DFF38147E85}</Project>
<Name>UniversalEditor.Core</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\MBS.Framework\MBS.Framework\MBS.Framework.csproj">
<Project>{00266B21-35C9-4A7F-A6BA-D54D7FDCC25C}</Project>
<Name>MBS.Framework</Name>
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
</Project>