diff --git a/.gitignore b/.gitignore index a9e05e3..a0eb733 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # ---> VisualStudioCode +.vscode .vscode/* !.vscode/settings.json !.vscode/tasks.json diff --git a/editor-dotnet.sln b/editor-dotnet.sln new file mode 100644 index 0000000..499bead --- /dev/null +++ b/editor-dotnet.sln @@ -0,0 +1,121 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.5.002.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "editor-dotnet", "editor-dotnet", "{75210F45-D690-4A61-9CD8-96B09E5DAAC5}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{F05001E1-6FFB-48AE-BF7F-7F39A24D3B70}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "lib", "lib", "{C86F60F9-BBC1-4554-A3B0-D553F9C157A8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MBS.Editor.Core", "editor-dotnet\src\lib\MBS.Editor.Core\MBS.Editor.Core.csproj", "{8FFB417A-2CDC-429F-ABE0-19B3015530D3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MBS.Editor.UserInterface", "editor-dotnet\src\lib\MBS.Editor.UserInterface\MBS.Editor.UserInterface.csproj", "{C4316562-555A-4A79-9D71-15737976DF8B}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "app", "app", "{4ED8C38B-47EF-4368-9965-CF627465B45A}" + ProjectSection(SolutionItems) = preProject + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MBS.Editor", "editor-dotnet\src\app\MBS.Editor\MBS.Editor.csproj", "{A936C411-0184-43F8-A343-0DE8C3B7B42E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "framework-dotnet", "framework-dotnet", "{CC86007D-8193-4EAA-932D-A96B5F09847E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "framework-dotnet", "framework-dotnet", "{B9747AFE-160D-4807-B989-B3F0ACCA3634}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{BDC147D8-4D97-4663-9408-BC822E1E0B3C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "lib", "lib", "{80A728D5-7C00-4B59-A37E-321C54CC554F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MBS.Desktop", "framework-dotnet\framework-dotnet\src\lib\MBS.Desktop\MBS.Desktop.csproj", "{4F2B8AF8-E1A4-4114-B4DA-4789A3A21143}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MBS.Core", "framework-dotnet\framework-dotnet\src\lib\MBS.Core\MBS.Core.csproj", "{7565CFB4-9761-4064-B18F-5E2644730BC0}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "plugins", "plugins", "{451AD529-16B4-4049-9D0C-0C79B3DDFA52}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MBS.Editor.Plugins.Multimedia", "editor-dotnet\src\plugins\MBS.Editor.Plugins.Multimedia\MBS.Editor.Plugins.Multimedia.csproj", "{5978938E-19F6-42AE-B588-7719A65ABCA7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MBS.Editor.TestProject", "editor-dotnet\src\app\MBS.Editor.TestProject\MBS.Editor.TestProject.csproj", "{CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MBS.Editor.Plugins.CRI", "editor-dotnet\src\plugins\MBS.Editor.Plugins.CRI\MBS.Editor.Plugins.CRI.csproj", "{78B11A3E-1371-48D8-9B8E-AE6ED2380A50}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{74AD8C3F-B0B8-472F-A847-1FFFB1667B34}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MBS.Editor.Core.Tests", "editor-dotnet\src\tests\MBS.Editor.Core.Tests\MBS.Editor.Core.Tests.csproj", "{7A349FC6-BCE7-465D-ADBC-7A21242E2C78}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MBS.Editor.Plugins.CRI.Tests", "editor-dotnet\src\tests\MBS.Editor.Plugins.CRI.Tests\MBS.Editor.Plugins.CRI.Tests.csproj", "{2747FFC9-55AA-4A76-B0E9-D8A839E94E47}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {8FFB417A-2CDC-429F-ABE0-19B3015530D3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8FFB417A-2CDC-429F-ABE0-19B3015530D3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8FFB417A-2CDC-429F-ABE0-19B3015530D3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8FFB417A-2CDC-429F-ABE0-19B3015530D3}.Release|Any CPU.Build.0 = Release|Any CPU + {C4316562-555A-4A79-9D71-15737976DF8B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4316562-555A-4A79-9D71-15737976DF8B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4316562-555A-4A79-9D71-15737976DF8B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4316562-555A-4A79-9D71-15737976DF8B}.Release|Any CPU.Build.0 = Release|Any CPU + {A936C411-0184-43F8-A343-0DE8C3B7B42E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A936C411-0184-43F8-A343-0DE8C3B7B42E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A936C411-0184-43F8-A343-0DE8C3B7B42E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A936C411-0184-43F8-A343-0DE8C3B7B42E}.Release|Any CPU.Build.0 = Release|Any CPU + {4F2B8AF8-E1A4-4114-B4DA-4789A3A21143}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4F2B8AF8-E1A4-4114-B4DA-4789A3A21143}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4F2B8AF8-E1A4-4114-B4DA-4789A3A21143}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4F2B8AF8-E1A4-4114-B4DA-4789A3A21143}.Release|Any CPU.Build.0 = Release|Any CPU + {7565CFB4-9761-4064-B18F-5E2644730BC0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7565CFB4-9761-4064-B18F-5E2644730BC0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7565CFB4-9761-4064-B18F-5E2644730BC0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7565CFB4-9761-4064-B18F-5E2644730BC0}.Release|Any CPU.Build.0 = Release|Any CPU + {5978938E-19F6-42AE-B588-7719A65ABCA7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5978938E-19F6-42AE-B588-7719A65ABCA7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5978938E-19F6-42AE-B588-7719A65ABCA7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5978938E-19F6-42AE-B588-7719A65ABCA7}.Release|Any CPU.Build.0 = Release|Any CPU + {CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF}.Release|Any CPU.Build.0 = Release|Any CPU + {78B11A3E-1371-48D8-9B8E-AE6ED2380A50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {78B11A3E-1371-48D8-9B8E-AE6ED2380A50}.Debug|Any CPU.Build.0 = Debug|Any CPU + {78B11A3E-1371-48D8-9B8E-AE6ED2380A50}.Release|Any CPU.ActiveCfg = Release|Any CPU + {78B11A3E-1371-48D8-9B8E-AE6ED2380A50}.Release|Any CPU.Build.0 = Release|Any CPU + {7A349FC6-BCE7-465D-ADBC-7A21242E2C78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7A349FC6-BCE7-465D-ADBC-7A21242E2C78}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7A349FC6-BCE7-465D-ADBC-7A21242E2C78}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7A349FC6-BCE7-465D-ADBC-7A21242E2C78}.Release|Any CPU.Build.0 = Release|Any CPU + {2747FFC9-55AA-4A76-B0E9-D8A839E94E47}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2747FFC9-55AA-4A76-B0E9-D8A839E94E47}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2747FFC9-55AA-4A76-B0E9-D8A839E94E47}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2747FFC9-55AA-4A76-B0E9-D8A839E94E47}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {74AD8C3F-B0B8-472F-A847-1FFFB1667B34} = {F05001E1-6FFB-48AE-BF7F-7F39A24D3B70} + {451AD529-16B4-4049-9D0C-0C79B3DDFA52} = {F05001E1-6FFB-48AE-BF7F-7F39A24D3B70} + {F05001E1-6FFB-48AE-BF7F-7F39A24D3B70} = {75210F45-D690-4A61-9CD8-96B09E5DAAC5} + {C86F60F9-BBC1-4554-A3B0-D553F9C157A8} = {F05001E1-6FFB-48AE-BF7F-7F39A24D3B70} + {8FFB417A-2CDC-429F-ABE0-19B3015530D3} = {C86F60F9-BBC1-4554-A3B0-D553F9C157A8} + {C4316562-555A-4A79-9D71-15737976DF8B} = {C86F60F9-BBC1-4554-A3B0-D553F9C157A8} + {4ED8C38B-47EF-4368-9965-CF627465B45A} = {F05001E1-6FFB-48AE-BF7F-7F39A24D3B70} + {A936C411-0184-43F8-A343-0DE8C3B7B42E} = {4ED8C38B-47EF-4368-9965-CF627465B45A} + {B9747AFE-160D-4807-B989-B3F0ACCA3634} = {CC86007D-8193-4EAA-932D-A96B5F09847E} + {BDC147D8-4D97-4663-9408-BC822E1E0B3C} = {B9747AFE-160D-4807-B989-B3F0ACCA3634} + {80A728D5-7C00-4B59-A37E-321C54CC554F} = {BDC147D8-4D97-4663-9408-BC822E1E0B3C} + {4F2B8AF8-E1A4-4114-B4DA-4789A3A21143} = {80A728D5-7C00-4B59-A37E-321C54CC554F} + {7565CFB4-9761-4064-B18F-5E2644730BC0} = {80A728D5-7C00-4B59-A37E-321C54CC554F} + {5978938E-19F6-42AE-B588-7719A65ABCA7} = {451AD529-16B4-4049-9D0C-0C79B3DDFA52} + {CDA151F8-5BA7-47DB-883D-CBC2DD94F0DF} = {4ED8C38B-47EF-4368-9965-CF627465B45A} + {78B11A3E-1371-48D8-9B8E-AE6ED2380A50} = {451AD529-16B4-4049-9D0C-0C79B3DDFA52} + {7A349FC6-BCE7-465D-ADBC-7A21242E2C78} = {74AD8C3F-B0B8-472F-A847-1FFFB1667B34} + {2747FFC9-55AA-4A76-B0E9-D8A839E94E47} = {74AD8C3F-B0B8-472F-A847-1FFFB1667B34} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {4D0B64EB-14E9-4013-AA33-33716704909B} + EndGlobalSection +EndGlobal diff --git a/editor-dotnet/src/app/MBS.Editor.TestProject/MBS.Editor.TestProject.csproj b/editor-dotnet/src/app/MBS.Editor.TestProject/MBS.Editor.TestProject.csproj new file mode 100644 index 0000000..5778db8 --- /dev/null +++ b/editor-dotnet/src/app/MBS.Editor.TestProject/MBS.Editor.TestProject.csproj @@ -0,0 +1,15 @@ + + + + + + + + + Exe + net8.0 + enable + enable + + + diff --git a/editor-dotnet/src/app/MBS.Editor.TestProject/Program.cs b/editor-dotnet/src/app/MBS.Editor.TestProject/Program.cs new file mode 100644 index 0000000..51998a4 --- /dev/null +++ b/editor-dotnet/src/app/MBS.Editor.TestProject/Program.cs @@ -0,0 +1,51 @@ +using MBS.Core; + +using MBS.Editor.Core; +using MBS.Editor.Core.IO; +using MBS.Editor.Core.ObjectModels.FileSystem; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +class Program : Application +{ + protected override int StartInternal() + { + FileStream fs = System.IO.File.Open("/tmp/test.afs", FileMode.Create, FileAccess.Write); + /* + Writer writer = new Writer(fs); + + writer.Endianness = Endianness.BigEndian; + writer.WriteBoolean(true); + writer.WriteInt32(1024); + writer.WriteInt32(768); + writer.WriteFixedLengthString("Hello world"); + writer.WriteInt32(0x7B); + writer.Close(); + + */ + + FileSystemObjectModel fsom = new FileSystemObjectModel(); + fsom.Items.AddFile("test.ini", new ByteArrayFileSource(new byte[] { 0x20, 0x04, 0xFE, 0xDE })); + + Console.WriteLine(Environment.ProcessPath); + + Type t = MBS.Core.Reflection.TypeLoader.FindType("MBS.Editor.Plugins.CRI.DataFormats.FileSystem.AFS.AFSDataFormat"); + Console.WriteLine("found type {0}", t); + + DataFormat afs = DataFormat.FromType(t); + if (afs == null) + { + Console.WriteLine("could not load type"); + return 2; + } + + Document.Save(fsom, afs, fs); + fs.Close(); + + return base.StartInternal(); + } + + static void Main(string[] args) + { + (new Program()).Start(); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/app/MBS.Editor/Program.cs b/editor-dotnet/src/app/MBS.Editor/Program.cs index 5bde512..87c163e 100644 --- a/editor-dotnet/src/app/MBS.Editor/Program.cs +++ b/editor-dotnet/src/app/MBS.Editor/Program.cs @@ -1,3 +1,19 @@ -using MBS.Editor.UserInterface; +// +// Copyright (C) + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using MBS.Editor.UserInterface; return (new EditorApplication()).Start(); \ No newline at end of file diff --git a/editor-dotnet/src/install-plugins.sh b/editor-dotnet/src/install-plugins.sh new file mode 100755 index 0000000..6b16c80 --- /dev/null +++ b/editor-dotnet/src/install-plugins.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +CONFIDENCE=Debug +NET_VERSION=net8.0 + +if [ ! -d app/MBS.Editor/bin/$CONFIDENCE/$NET_VERSION/plugins ]; then + + mkdir app/MBS.Editor/bin/$CONFIDENCE/$NET_VERSION/plugins + +fi + +if [ ! -d app/MBS.Editor.TestProject/bin/$CONFIDENCE/$NET_VERSION/plugins ]; then + + mkdir app/MBS.Editor.TestProject/bin/$CONFIDENCE/$NET_VERSION/plugins + +fi + +for dir in plugins/* ; do + + echo "Building $dir" + + pushd $dir + dotnet build + popd + + echo "Copying $dir" + cp $dir/bin/$CONFIDENCE/$NET_VERSION/*.dll app/MBS.Editor/bin/$CONFIDENCE/$NET_VERSION/plugins + cp $dir/bin/$CONFIDENCE/$NET_VERSION/*.dll app/MBS.Editor.TestProject/bin/$CONFIDENCE/$NET_VERSION/plugins + +done diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/CRCUtilities.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/CRCUtilities.cs new file mode 100644 index 0000000..8fa8273 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/CRCUtilities.cs @@ -0,0 +1,157 @@ +using System.Runtime.CompilerServices; + +namespace MBS.Editor.Core.Checksum; + +internal static class CRCUtilities +{ + /// + /// The number of slicing lookup tables to generate. + /// + internal const int SlicingDegree = 16; + + /// + /// Generates multiple CRC lookup tables for a given polynomial, stored + /// in a linear array of uints. The first block (i.e. the first 256 + /// elements) is the same as the byte-by-byte CRC lookup table. + /// + /// The generating CRC polynomial + /// Whether the polynomial is in reversed bit order + /// A linear array of 256 * elements + /// + /// This table could also be generated as a rectangular array, but the + /// JIT compiler generates slower code than if we use a linear array. + /// Known issue, see: https://github.com/dotnet/runtime/issues/30275 + /// + internal static uint[] GenerateSlicingLookupTable(uint polynomial, bool isReversed) + { + var table = new uint[256 * SlicingDegree]; + uint one = isReversed ? 1 : (1U << 31); + + for (int i = 0; i < 256; i++) + { + uint res = (uint)(isReversed ? i : i << 24); + for (int j = 0; j < SlicingDegree; j++) + { + for (int k = 0; k < 8; k++) + { + if (isReversed) + { + res = (res & one) == 1 ? polynomial ^ (res >> 1) : res >> 1; + } + else + { + res = (res & one) != 0 ? polynomial ^ (res << 1) : res << 1; + } + } + + table[(256 * j) + i] = res; + } + } + + return table; + } + + /// + /// Mixes the first four bytes of input with + /// using normal ordering before calling . + /// + /// Array of data to checksum + /// Offset to start reading from + /// The table to use for slicing-by-16 lookup + /// Checksum state before this update call + /// A new unfinalized checksum value + /// + /// + /// Assumes input[offset]..input[offset + 15] are valid array indexes. + /// For performance reasons, this must be checked by the caller. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static uint UpdateDataForNormalPoly(byte[] input, int offset, uint[] crcTable, uint checkValue) + { + byte x1 = (byte)((byte)(checkValue >> 24) ^ input[offset]); + byte x2 = (byte)((byte)(checkValue >> 16) ^ input[offset + 1]); + byte x3 = (byte)((byte)(checkValue >> 8) ^ input[offset + 2]); + byte x4 = (byte)((byte)checkValue ^ input[offset + 3]); + + return UpdateDataCommon(input, offset, crcTable, x1, x2, x3, x4); + } + + /// + /// Mixes the first four bytes of input with + /// using reflected ordering before calling . + /// + /// Array of data to checksum + /// Offset to start reading from + /// The table to use for slicing-by-16 lookup + /// Checksum state before this update call + /// A new unfinalized checksum value + /// + /// + /// Assumes input[offset]..input[offset + 15] are valid array indexes. + /// For performance reasons, this must be checked by the caller. + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static uint UpdateDataForReversedPoly(byte[] input, int offset, uint[] crcTable, uint checkValue) + { + byte x1 = (byte)((byte)checkValue ^ input[offset]); + byte x2 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 1]); + byte x3 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 2]); + byte x4 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 3]); + + return UpdateDataCommon(input, offset, crcTable, x1, x2, x3, x4); + } + + /// + /// A shared method for updating an unfinalized CRC checksum using slicing-by-16. + /// + /// Array of data to checksum + /// Offset to start reading from + /// The table to use for slicing-by-16 lookup + /// First byte of input after mixing with the old CRC + /// Second byte of input after mixing with the old CRC + /// Third byte of input after mixing with the old CRC + /// Fourth byte of input after mixing with the old CRC + /// A new unfinalized checksum value + /// + /// + /// Even though the first four bytes of input are fed in as arguments, + /// should be the same value passed to this + /// function's caller (either or + /// ). This method will get inlined + /// into both functions, so using the same offset produces faster code. + /// + /// + /// Because most processors running C# have some kind of instruction-level + /// parallelism, the order of XOR operations can affect performance. This + /// ordering assumes that the assembly code generated by the just-in-time + /// compiler will emit a bunch of arithmetic operations for checking array + /// bounds. Then it opportunistically XORs a1 and a2 to keep the processor + /// busy while those other parts of the pipeline handle the range check + /// calculations. + /// + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static uint UpdateDataCommon(byte[] input, int offset, uint[] crcTable, byte x1, byte x2, byte x3, byte x4) + { + uint result; + uint a1 = crcTable[x1 + 3840] ^ crcTable[x2 + 3584]; + uint a2 = crcTable[x3 + 3328] ^ crcTable[x4 + 3072]; + + result = crcTable[input[offset + 4] + 2816]; + result ^= crcTable[input[offset + 5] + 2560]; + a1 ^= crcTable[input[offset + 9] + 1536]; + result ^= crcTable[input[offset + 6] + 2304]; + result ^= crcTable[input[offset + 7] + 2048]; + result ^= crcTable[input[offset + 8] + 1792]; + a2 ^= crcTable[input[offset + 13] + 512]; + result ^= crcTable[input[offset + 10] + 1280]; + result ^= crcTable[input[offset + 11] + 1024]; + result ^= crcTable[input[offset + 12] + 768]; + result ^= a1; + result ^= crcTable[input[offset + 14] + 256]; + result ^= crcTable[input[offset + 15]]; + result ^= a2; + + return result; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/ChecksumModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/ChecksumModule.cs new file mode 100644 index 0000000..e2baadb --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/ChecksumModule.cs @@ -0,0 +1,49 @@ +using System.Formats.Tar; +using System.Runtime.CompilerServices; + +namespace MBS.Editor.Core.Checksum; + +/// +/// Interface to compute a data checksum used by checked input/output streams. +/// A data checksum can be updated by one byte or with a byte array. After each +/// update the value of the current checksum can be returned by calling +/// getValue. The complete checksum object can also be reset +/// so it can be used again with new data. +/// +public abstract class ChecksumModule +{ + /// + /// Resets the data checksum as if no update was ever called. + /// + protected abstract void ResetInternal(); + + /// + /// Resets the data checksum as if no update was ever called. + /// + public void Reset() + { + ResetInternal(); + } + + protected abstract void UpdateInternal(int bval); + protected abstract long GetValueInternal(); + + private long checkValue; + + + /// + /// Returns the data checksum computed so far. + /// + public long Value { get { return GetValueInternal(); } } + + /// + /// Adds one byte to the data checksum. + /// + /// + /// the data value to add. The high byte of the int is ignored. + /// + public void Update(int bval) + { + UpdateInternal(bval); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/Modules/BZip2CRC/BZip2CRCChecksumModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/Modules/BZip2CRC/BZip2CRCChecksumModule.cs new file mode 100644 index 0000000..d4ad9c7 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Checksum/Modules/BZip2CRC/BZip2CRCChecksumModule.cs @@ -0,0 +1,172 @@ +using System; +using System.Runtime.CompilerServices; + +namespace MBS.Editor.Core.Checksum.Modules.BZip2CRC; + +/// +/// CRC-32 with unreversed data and reversed output +/// +/// +/// Generate a table for a byte-wise 32-bit CRC calculation on the polynomial: +/// x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x^1+x^0. +/// +/// Polynomials over GF(2) are represented in binary, one bit per coefficient, +/// with the lowest powers in the most significant bit. Then adding polynomials +/// is just exclusive-or, and multiplying a polynomial by x is a right shift by +/// one. If we call the above polynomial p, and represent a byte as the +/// polynomial q, also with the lowest power in the most significant bit (so the +/// byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p, +/// where a mod b means the remainder after dividing a by b. +/// +/// This calculation is done using the shift-register method of multiplying and +/// taking the remainder. The register is initialized to zero, and for each +/// incoming bit, x^32 is added mod p to the register if the bit is a one (where +/// x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by +/// x (which is shifting right by one and adding x^32 mod p if the bit shifted +/// out is a one). We start with the highest power (least significant bit) of +/// q and repeat for all eight bits of q. +/// +/// This implementation uses sixteen lookup tables stored in one linear array +/// to implement the slicing-by-16 algorithm, a variant of the slicing-by-8 +/// algorithm described in this Intel white paper: +/// +/// https://web.archive.org/web/20120722193753/http://download.intel.com/technology/comms/perfnet/download/slicing-by-8.pdf +/// +/// The first lookup table is simply the CRC of all possible eight bit values. +/// Each successive lookup table is derived from the original table generated +/// by Sarwate's algorithm. Slicing a 16-bit input and XORing the outputs +/// together will produce the same output as a byte-by-byte CRC loop with +/// fewer arithmetic and bit manipulation operations, at the cost of increased +/// memory consumed by the lookup tables. (Slicing-by-16 requires a 16KB table, +/// which is still small enough to fit in most processors' L1 cache.) +/// +public sealed class BZip2CRCChecksumModule : ChecksumModule +{ + #region Instance Fields + + private const uint crcInit = 0xFFFFFFFF; + //const uint crcXor = 0x00000000; + + private static readonly uint[] crcTable = CRCUtilities.GenerateSlicingLookupTable(0x04C11DB7, isReversed: false); + + /// + /// The CRC data checksum so far. + /// + private uint checkValue; + + #endregion Instance Fields + + /// + /// Initialise a default instance of + /// + public BZip2CRCChecksumModule() + { + Reset(); + } + + /// + /// Resets the CRC data checksum as if no update was ever called. + /// + protected override void ResetInternal() + { + checkValue = crcInit; + } + + /// + /// Returns the CRC data checksum computed so far. + /// + /// Reversed Out = true + protected override long GetValueInternal() + { + // Technically, the output should be: + //return (long)(~checkValue ^ crcXor); + // but x ^ 0 = x, so there is no point in adding + // the XOR operation + return (long)(~checkValue); + } + + /// + /// Updates the checksum with the int bval. + /// + /// + /// the byte is taken as the lower 8 bits of bval + /// + /// Reversed Data = false + [MethodImpl(MethodImplOptions.AggressiveInlining)] + protected override void UpdateInternal(int bval) + { + checkValue = unchecked(crcTable[(byte)(((checkValue >> 24) & 0xFF) ^ bval)] ^ (checkValue << 8)); + } + + + + + + + /// + /// Updates the CRC data checksum with the bytes taken from + /// a block of data. + /// + /// Contains the data to update the CRC with. + public void Update(byte[] buffer) + { + if (buffer == null) + { + throw new ArgumentNullException(nameof(buffer)); + } + + Update(buffer, 0, buffer.Length); + } + + /// + /// Update CRC data checksum based on a portion of a block of data + /// + /// + /// The chunk of data to add + /// + public void Update(ArraySegment segment) + { + Update(segment.Array, segment.Offset, segment.Count); + } + + /// + /// Internal helper function for updating a block of data using slicing. + /// + /// The array containing the data to add + /// Range start for (inclusive) + /// The number of bytes to checksum starting from + private void Update(byte[] data, int offset, int count) + { + int remainder = count % CRCUtilities.SlicingDegree; + int end = offset + count - remainder; + + while (offset != end) + { + checkValue = CRCUtilities.UpdateDataForNormalPoly(data, offset, crcTable, checkValue); + offset += CRCUtilities.SlicingDegree; + } + + if (remainder != 0) + { + SlowUpdateLoop(data, offset, end + remainder); + } + } + + /// + /// A non-inlined function for updating data that doesn't fit in a 16-byte + /// block. We don't expect to enter this function most of the time, and when + /// we do we're not here for long, so disabling inlining here improves + /// performance overall. + /// + /// The array containing the data to add + /// Range start for (inclusive) + /// Range end for (exclusive) + [MethodImpl(MethodImplOptions.NoInlining)] + private void SlowUpdateLoop(byte[] data, int offset, int end) + { + while (offset != end) + { + Update(data[offset++]); + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionException.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionException.cs new file mode 100644 index 0000000..54dcc60 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionException.cs @@ -0,0 +1,9 @@ +namespace MBS.Editor.Core.Compression; + +public class CompressionException : Exception +{ + + public CompressionException() : base() { } + public CompressionException(string message) : base(message) { } + public CompressionException(string message, Exception innerException) : base(message, innerException) { } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionModule.cs new file mode 100644 index 0000000..179f9c3 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/CompressionModule.cs @@ -0,0 +1,43 @@ +namespace MBS.Editor.Core.Compression; + +public abstract class CompressionModule +{ + + public byte[] Compress(byte[] input) + { + MemoryStream inputStream = new MemoryStream(input); + MemoryStream outputStream = new MemoryStream(); + Compress(inputStream, outputStream); + + outputStream.Flush(); + outputStream.Close(); + return outputStream.ToArray(); + } + + public void Compress(Stream inputStream, Stream outputStream) + { + CompressInternal(inputStream, outputStream); + } + + public byte[] Decompress(byte[] input) + { + MemoryStream inputStream = new MemoryStream(input); + MemoryStream outputStream = new MemoryStream(); + Decompress(inputStream, outputStream); + + outputStream.Flush(); + outputStream.Close(); + return outputStream.ToArray(); + } + public void Decompress(Stream inputStream, Stream outputStream) + { + DecompressInternal(inputStream, outputStream); + } + + //protected abstract void CompressInternal(Stream inputStream, byte[] buffer, int offset, int length); + //protected abstract void DecompressInternal(Stream inputStream, byte[] buffer, int offset, int length); + + protected abstract void CompressInternal(Stream inputStream, Stream outputStream); + protected abstract void DecompressInternal(Stream inputStream, Stream outputStream); + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/DualStreamCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/DualStreamCompressionModule.cs new file mode 100644 index 0000000..d7fd264 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/DualStreamCompressionModule.cs @@ -0,0 +1,29 @@ +using System.IO.Compression; + +namespace MBS.Editor.Core.Compression; + +public abstract class DualStreamCompressionModule : CompressionModule where TInputStream : Stream where TOutputStream : Stream +{ + protected abstract TOutputStream CreateCompressor(Stream stream); + protected abstract TInputStream CreateDecompressor(Stream stream); + + protected override void CompressInternal(Stream inputStream, Stream outputStream) + { + TOutputStream _compressor = CreateCompressor(outputStream); + inputStream.CopyTo(_compressor); + + // !!! IMPORTANT !!! DO NOT FORGET TO FLUSH !!! + _compressor.Flush(); + _compressor.Close(); + } + protected override void DecompressInternal(Stream inputStream, Stream outputStream) + { + TInputStream _decompressor = CreateDecompressor(inputStream); + _decompressor.CopyTo(outputStream); + + // !!! IMPORTANT !!! DO NOT FORGET TO FLUSH !!! + _decompressor.Flush(); + _decompressor.Close(); + } + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2CompresssionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2CompresssionModule.cs new file mode 100644 index 0000000..7ff72b5 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2CompresssionModule.cs @@ -0,0 +1,38 @@ +using System.IO.Compression; +using MBS.Editor.Core.Compression; +using MBS.Editor.Core.Compression.Modules.BZip2; + +namespace MBS.Editor.Core.Compression.Modules.GZip; + +public class BZip2CompressionModule : DualStreamCompressionModule +{ + protected override BZip2OutputStream CreateCompressor(Stream stream) + { + return new BZip2OutputStream(stream); + } + protected override BZip2InputStream CreateDecompressor(Stream stream) + { + return new BZip2InputStream(stream); + } + + /* + protected override void CompressInternal(byte[] buffer, int offset, int length) + { + if (_compressor == null) + { + MemoryStream ms = new MemoryStream(); + _compressor = new GZipStream(ms, GetSystemCompressionLevel()); + } + _compressor.Write(buffer, offset, length); + } + protected override int DecompressInternal(byte[] buffer, int offset, int length) + { + if (_decompressor == null) + { + MemoryStream ms = new MemoryStream(); + _decompressor = new GZipStream(ms, GetSystemCompressionLevel()); + } + return _decompressor.Read(buffer, offset, length); + } + */ +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Constants.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Constants.cs new file mode 100644 index 0000000..1387075 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Constants.cs @@ -0,0 +1,116 @@ +namespace MBS.Editor.Core.Compression.Modules.BZip2; + +/// +/// Defines internal values for both compression and decompression +/// +internal static class BZip2Constants +{ + /// + /// Random numbers used to randomise repetitive blocks + /// + public readonly static int[] RandomNumbers = { + 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, + 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, + 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, + 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, + 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, + 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, + 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, + 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, + 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, + 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, + 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, + 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, + 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, + 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, + 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, + 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, + 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, + 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, + 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, + 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, + 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, + 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, + 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, + 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, + 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, + 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, + 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, + 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, + 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, + 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, + 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, + 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, + 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, + 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, + 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, + 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, + 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, + 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, + 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, + 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, + 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, + 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, + 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, + 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, + 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, + 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, + 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, + 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, + 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, + 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, + 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, + 936, 638 + }; + + /// + /// When multiplied by compression parameter (1-9) gives the block size for compression + /// 9 gives the best compression but uses the most memory. + /// + public const int BaseBlockSize = 100000; + + /// + /// Backend constant + /// + public const int MaximumAlphaSize = 258; + + /// + /// Backend constant + /// + public const int MaximumCodeLength = 23; + + /// + /// Backend constant + /// + public const int RunA = 0; + + /// + /// Backend constant + /// + public const int RunB = 1; + + /// + /// Backend constant + /// + public const int GroupCount = 6; + + /// + /// Backend constant + /// + public const int GroupSize = 50; + + /// + /// Backend constant + /// + public const int NumberOfIterations = 4; + + /// + /// Backend constant + /// + public const int MaximumSelectors = (2 + (900000 / GroupSize)); + + /// + /// Backend constant + /// + public const int OvershootBytes = 20; +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Exception.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Exception.cs new file mode 100644 index 0000000..9fe6f76 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2Exception.cs @@ -0,0 +1,36 @@ +using System; +using System.Runtime.Serialization; + +namespace MBS.Editor.Core.Compression.Modules.BZip2; + +/// +/// BZip2Exception represents exceptions specific to BZip2 classes and code. +/// +public class BZip2Exception : CompressionException +{ + /// + /// Initialise a new instance of . + /// + public BZip2Exception() + { + } + + /// + /// Initialise a new instance of with its message string. + /// + /// A that describes the error. + public BZip2Exception(string message) + : base(message) + { + } + + /// + /// Initialise a new instance of . + /// + /// A that describes the error. + /// The that caused this exception. + public BZip2Exception(string message, Exception innerException) + : base(message, innerException) + { + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2InputStream.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2InputStream.cs new file mode 100644 index 0000000..b798fa8 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2InputStream.cs @@ -0,0 +1,1052 @@ +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + #define VECTORIZE_MEMORY_MOVE +#endif + +using MBS.Editor.Core.Checksum; +using System; +using System.IO; + +namespace MBS.Editor.Core.Compression.Modules.BZip2; + +/// +/// An input stream that decompresses files in the BZip2 format +/// +public class BZip2InputStream : Stream +{ + #region Constants + + private const int START_BLOCK_STATE = 1; + private const int RAND_PART_A_STATE = 2; + private const int RAND_PART_B_STATE = 3; + private const int RAND_PART_C_STATE = 4; + private const int NO_RAND_PART_A_STATE = 5; + private const int NO_RAND_PART_B_STATE = 6; + private const int NO_RAND_PART_C_STATE = 7; + +#if VECTORIZE_MEMORY_MOVE + private static readonly int VectorSize = System.Numerics.Vector.Count; +#endif // VECTORIZE_MEMORY_MOVE + +#endregion Constants + + #region Instance Fields + + /*-- + index of the last char in the block, so + the block size == last + 1. + --*/ + private int last; + + /*-- + index in zptr[] of original string after sorting. + --*/ + private int origPtr; + + /*-- + always: in the range 0 .. 9. + The current block size is 100000 * this number. + --*/ + private int blockSize100k; + + private bool blockRandomised; + + private int bsBuff; + private int bsLive; + private ChecksumModule mCrc = new Checksum.Modules.BZip2CRC.BZip2CRCChecksumModule(); + + private bool[] inUse = new bool[256]; + private int nInUse; + + private byte[] seqToUnseq = new byte[256]; + private byte[] unseqToSeq = new byte[256]; + + private byte[] selector = new byte[BZip2Constants.MaximumSelectors]; + private byte[] selectorMtf = new byte[BZip2Constants.MaximumSelectors]; + + private int[] tt; + private byte[] ll8; + + /*-- + freq table collected to save a pass over the data + during decompression. + --*/ + private int[] unzftab = new int[256]; + + private int[][] limit = new int[BZip2Constants.GroupCount][]; + private int[][] baseArray = new int[BZip2Constants.GroupCount][]; + private int[][] perm = new int[BZip2Constants.GroupCount][]; + private int[] minLens = new int[BZip2Constants.GroupCount]; + + private readonly Stream baseStream; + private bool streamEnd; + + private int currentChar = -1; + + private int currentState = START_BLOCK_STATE; + + private int storedBlockCRC, storedCombinedCRC; + private int computedBlockCRC; + private uint computedCombinedCRC; + + private int count, chPrev, ch2; + private int tPos; + private int rNToGo; + private int rTPos; + private int i2, j2; + private byte z; + + #endregion Instance Fields + + /// + /// Construct instance for reading from stream + /// + /// Data source + public BZip2InputStream(Stream stream) + { + if (stream == null) + throw new ArgumentNullException(nameof(stream)); + // init arrays + for (int i = 0; i < BZip2Constants.GroupCount; ++i) + { + limit[i] = new int[BZip2Constants.MaximumAlphaSize]; + baseArray[i] = new int[BZip2Constants.MaximumAlphaSize]; + perm[i] = new int[BZip2Constants.MaximumAlphaSize]; + } + + baseStream = stream; + bsLive = 0; + bsBuff = 0; + Initialize(); + InitBlock(); + SetupBlock(); + } + + /// + /// Get/set flag indicating ownership of underlying stream. + /// When the flag is true will close the underlying stream also. + /// + public bool IsStreamOwner { get; set; } = true; + + #region Stream Overrides + + /// + /// Gets a value indicating if the stream supports reading + /// + public override bool CanRead + { + get + { + return baseStream.CanRead; + } + } + + /// + /// Gets a value indicating whether the current stream supports seeking. + /// + public override bool CanSeek + { + get + { + return false; + } + } + + /// + /// Gets a value indicating whether the current stream supports writing. + /// This property always returns false + /// + public override bool CanWrite + { + get + { + return false; + } + } + + /// + /// Gets the length in bytes of the stream. + /// + public override long Length + { + get + { + return baseStream.Length; + } + } + + /// + /// Gets the current position of the stream. + /// Setting the position is not supported and will throw a NotSupportException. + /// + /// Any attempt to set the position. + public override long Position + { + get + { + return baseStream.Position; + } + set + { + throw new NotSupportedException("BZip2InputStream position cannot be set"); + } + } + + /// + /// Flushes the stream. + /// + public override void Flush() + { + baseStream.Flush(); + } + + /// + /// Set the streams position. This operation is not supported and will throw a NotSupportedException + /// + /// A byte offset relative to the parameter. + /// A value of type indicating the reference point used to obtain the new position. + /// The new position of the stream. + /// Any access + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException("BZip2InputStream Seek not supported"); + } + + /// + /// Sets the length of this stream to the given value. + /// This operation is not supported and will throw a NotSupportedExceptionortedException + /// + /// The new length for the stream. + /// Any access + public override void SetLength(long value) + { + throw new NotSupportedException("BZip2InputStream SetLength not supported"); + } + + /// + /// Writes a block of bytes to this stream using data from a buffer. + /// This operation is not supported and will throw a NotSupportedException + /// + /// The buffer to source data from. + /// The offset to start obtaining data from. + /// The number of bytes of data to write. + /// Any access + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("BZip2InputStream Write not supported"); + } + + /// + /// Writes a byte to the current position in the file stream. + /// This operation is not supported and will throw a NotSupportedException + /// + /// The value to write. + /// Any access + public override void WriteByte(byte value) + { + throw new NotSupportedException("BZip2InputStream WriteByte not supported"); + } + + /// + /// Read a sequence of bytes and advances the read position by one byte. + /// + /// Array of bytes to store values in + /// Offset in array to begin storing data + /// The maximum number of bytes to read + /// The total number of bytes read into the buffer. This might be less + /// than the number of bytes requested if that number of bytes are not + /// currently available or zero if the end of the stream is reached. + /// + public override int Read(byte[] buffer, int offset, int count) + { + if (buffer == null) + { + throw new ArgumentNullException(nameof(buffer)); + } + + for (int i = 0; i < count; ++i) + { + int rb = ReadByte(); + if (rb == -1) + { + return i; + } + buffer[offset + i] = (byte)rb; + } + return count; + } + + /// + /// Closes the stream, releasing any associated resources. + /// + protected override void Dispose(bool disposing) + { + if (disposing && IsStreamOwner) + { + baseStream.Dispose(); + } + } + + /// + /// Read a byte from stream advancing position + /// + /// byte read or -1 on end of stream + public override int ReadByte() + { + if (streamEnd) + { + return -1; // ok + } + + int retChar = currentChar; + switch (currentState) + { + case RAND_PART_B_STATE: + SetupRandPartB(); + break; + + case RAND_PART_C_STATE: + SetupRandPartC(); + break; + + case NO_RAND_PART_B_STATE: + SetupNoRandPartB(); + break; + + case NO_RAND_PART_C_STATE: + SetupNoRandPartC(); + break; + + case START_BLOCK_STATE: + case NO_RAND_PART_A_STATE: + case RAND_PART_A_STATE: + break; + } + return retChar; + } + + #endregion Stream Overrides + + private void MakeMaps() + { + nInUse = 0; + for (int i = 0; i < 256; ++i) + { + if (inUse[i]) + { + seqToUnseq[nInUse] = (byte)i; + unseqToSeq[i] = (byte)nInUse; + nInUse++; + } + } + } + + private void Initialize() + { + char magic1 = BsGetUChar(); + char magic2 = BsGetUChar(); + + char magic3 = BsGetUChar(); + char magic4 = BsGetUChar(); + + if (magic1 != 'B' || magic2 != 'Z' || magic3 != 'h' || magic4 < '1' || magic4 > '9') + { + streamEnd = true; + return; + } + + SetDecompressStructureSizes(magic4 - '0'); + computedCombinedCRC = 0; + } + + private void InitBlock() + { + char magic1 = BsGetUChar(); + char magic2 = BsGetUChar(); + char magic3 = BsGetUChar(); + char magic4 = BsGetUChar(); + char magic5 = BsGetUChar(); + char magic6 = BsGetUChar(); + + if (magic1 == 0x17 && magic2 == 0x72 && magic3 == 0x45 && magic4 == 0x38 && magic5 == 0x50 && magic6 == 0x90) + { + Complete(); + return; + } + + if (magic1 != 0x31 || magic2 != 0x41 || magic3 != 0x59 || magic4 != 0x26 || magic5 != 0x53 || magic6 != 0x59) + { + BadBlockHeader(); + streamEnd = true; + return; + } + + storedBlockCRC = BsGetInt32(); + + blockRandomised = (BsR(1) == 1); + + GetAndMoveToFrontDecode(); + + mCrc.Reset(); + currentState = START_BLOCK_STATE; + } + + private void EndBlock() + { + computedBlockCRC = (int)mCrc.Value; + + // -- A bad CRC is considered a fatal error. -- + if (storedBlockCRC != computedBlockCRC) + { + CrcError(); + } + + // 1528150659 + computedCombinedCRC = ((computedCombinedCRC << 1) & 0xFFFFFFFF) | (computedCombinedCRC >> 31); + computedCombinedCRC = computedCombinedCRC ^ (uint)computedBlockCRC; + } + + private void Complete() + { + storedCombinedCRC = BsGetInt32(); + if (storedCombinedCRC != (int)computedCombinedCRC) + { + CrcError(); + } + + streamEnd = true; + } + + private void FillBuffer() + { + int thech = 0; + + try + { + thech = baseStream.ReadByte(); + } + catch (Exception) + { + CompressedStreamEOF(); + } + + if (thech == -1) + { + CompressedStreamEOF(); + } + + bsBuff = (bsBuff << 8) | (thech & 0xFF); + bsLive += 8; + } + + private int BsR(int n) + { + while (bsLive < n) + { + FillBuffer(); + } + + int v = (bsBuff >> (bsLive - n)) & ((1 << n) - 1); + bsLive -= n; + return v; + } + + private char BsGetUChar() + { + return (char)BsR(8); + } + + private int BsGetIntVS(int numBits) + { + return BsR(numBits); + } + + private int BsGetInt32() + { + int result = BsR(8); + result = (result << 8) | BsR(8); + result = (result << 8) | BsR(8); + result = (result << 8) | BsR(8); + return result; + } + + private void RecvDecodingTables() + { + char[][] len = new char[BZip2Constants.GroupCount][]; + for (int i = 0; i < BZip2Constants.GroupCount; ++i) + { + len[i] = new char[BZip2Constants.MaximumAlphaSize]; + } + + bool[] inUse16 = new bool[16]; + + //--- Receive the mapping table --- + for (int i = 0; i < 16; i++) + { + inUse16[i] = (BsR(1) == 1); + } + + for (int i = 0; i < 16; i++) + { + if (inUse16[i]) + { + for (int j = 0; j < 16; j++) + { + inUse[i * 16 + j] = (BsR(1) == 1); + } + } + else + { + for (int j = 0; j < 16; j++) + { + inUse[i * 16 + j] = false; + } + } + } + + MakeMaps(); + int alphaSize = nInUse + 2; + + //--- Now the selectors --- + int nGroups = BsR(3); + int nSelectors = BsR(15); + + for (int i = 0; i < nSelectors; i++) + { + int j = 0; + while (BsR(1) == 1) + { + j++; + } + selectorMtf[i] = (byte)j; + } + + //--- Undo the MTF values for the selectors. --- + byte[] pos = new byte[BZip2Constants.GroupCount]; + for (int v = 0; v < nGroups; v++) + { + pos[v] = (byte)v; + } + + for (int i = 0; i < nSelectors; i++) + { + int v = selectorMtf[i]; + byte tmp = pos[v]; + while (v > 0) + { + pos[v] = pos[v - 1]; + v--; + } + pos[0] = tmp; + selector[i] = tmp; + } + + //--- Now the coding tables --- + for (int t = 0; t < nGroups; t++) + { + int curr = BsR(5); + for (int i = 0; i < alphaSize; i++) + { + while (BsR(1) == 1) + { + if (BsR(1) == 0) + { + curr++; + } + else + { + curr--; + } + } + len[t][i] = (char)curr; + } + } + + //--- Create the Huffman decoding tables --- + for (int t = 0; t < nGroups; t++) + { + int minLen = 32; + int maxLen = 0; + for (int i = 0; i < alphaSize; i++) + { + maxLen = Math.Max(maxLen, len[t][i]); + minLen = Math.Min(minLen, len[t][i]); + } + HbCreateDecodeTables(limit[t], baseArray[t], perm[t], len[t], minLen, maxLen, alphaSize); + minLens[t] = minLen; + } + } + + private void GetAndMoveToFrontDecode() + { + byte[] yy = new byte[256]; + int nextSym; + + int limitLast = BZip2Constants.BaseBlockSize * blockSize100k; + origPtr = BsGetIntVS(24); + + RecvDecodingTables(); + int EOB = nInUse + 1; + int groupNo = -1; + int groupPos = 0; + + /*-- + Setting up the unzftab entries here is not strictly + necessary, but it does save having to do it later + in a separate pass, and so saves a block's worth of + cache misses. + --*/ + for (int i = 0; i <= 255; i++) + { + unzftab[i] = 0; + } + + for (int i = 0; i <= 255; i++) + { + yy[i] = (byte)i; + } + + last = -1; + + if (groupPos == 0) + { + groupNo++; + groupPos = BZip2Constants.GroupSize; + } + + groupPos--; + int zt = selector[groupNo]; + int zn = minLens[zt]; + int zvec = BsR(zn); + int zj; + + while (zvec > limit[zt][zn]) + { + if (zn > 20) + { // the longest code + throw new BZip2Exception("Bzip data error"); + } + zn++; + while (bsLive < 1) + { + FillBuffer(); + } + zj = (bsBuff >> (bsLive - 1)) & 1; + bsLive--; + zvec = (zvec << 1) | zj; + } + if (zvec - baseArray[zt][zn] < 0 || zvec - baseArray[zt][zn] >= BZip2Constants.MaximumAlphaSize) + { + throw new BZip2Exception("Bzip data error"); + } + nextSym = perm[zt][zvec - baseArray[zt][zn]]; + + while (true) + { + if (nextSym == EOB) + { + break; + } + + if (nextSym == BZip2Constants.RunA || nextSym == BZip2Constants.RunB) + { + int s = -1; + int n = 1; + do + { + if (nextSym == BZip2Constants.RunA) + { + s += (0 + 1) * n; + } + else if (nextSym == BZip2Constants.RunB) + { + s += (1 + 1) * n; + } + + n <<= 1; + + if (groupPos == 0) + { + groupNo++; + groupPos = BZip2Constants.GroupSize; + } + + groupPos--; + + zt = selector[groupNo]; + zn = minLens[zt]; + zvec = BsR(zn); + + while (zvec > limit[zt][zn]) + { + zn++; + while (bsLive < 1) + { + FillBuffer(); + } + zj = (bsBuff >> (bsLive - 1)) & 1; + bsLive--; + zvec = (zvec << 1) | zj; + } + nextSym = perm[zt][zvec - baseArray[zt][zn]]; + } while (nextSym == BZip2Constants.RunA || nextSym == BZip2Constants.RunB); + + s++; + byte ch = seqToUnseq[yy[0]]; + unzftab[ch] += s; + + while (s > 0) + { + last++; + ll8[last] = ch; + s--; + } + + if (last >= limitLast) + { + BlockOverrun(); + } + continue; + } + else + { + last++; + if (last >= limitLast) + { + BlockOverrun(); + } + + byte tmp = yy[nextSym - 1]; + unzftab[seqToUnseq[tmp]]++; + ll8[last] = seqToUnseq[tmp]; + + var j = nextSym - 1; + +#if VECTORIZE_MEMORY_MOVE + // This is vectorized memory move. Going from the back, we're taking chunks of array + // and write them at the new location shifted by one. Since chunks are VectorSize long, + // at the end we have to move "tail" (or head actually) of the array using a plain loop. + // If System.Numerics.Vector API is not available, the plain loop is used to do the whole copying. + + while(j >= VectorSize) + { + var arrayPart = new System.Numerics.Vector(yy, j - VectorSize); + arrayPart.CopyTo(yy, j - VectorSize + 1); + j -= VectorSize; + } +#endif // VECTORIZE_MEMORY_MOVE + + while(j > 0) + { + yy[j] = yy[--j]; + } + + yy[0] = tmp; + + if (groupPos == 0) + { + groupNo++; + groupPos = BZip2Constants.GroupSize; + } + + groupPos--; + zt = selector[groupNo]; + zn = minLens[zt]; + zvec = BsR(zn); + while (zvec > limit[zt][zn]) + { + zn++; + while (bsLive < 1) + { + FillBuffer(); + } + zj = (bsBuff >> (bsLive - 1)) & 1; + bsLive--; + zvec = (zvec << 1) | zj; + } + nextSym = perm[zt][zvec - baseArray[zt][zn]]; + continue; + } + } + } + + private void SetupBlock() + { + int[] cftab = new int[257]; + + cftab[0] = 0; + Array.Copy(unzftab, 0, cftab, 1, 256); + + for (int i = 1; i <= 256; i++) + { + cftab[i] += cftab[i - 1]; + } + + for (int i = 0; i <= last; i++) + { + byte ch = ll8[i]; + tt[cftab[ch]] = i; + cftab[ch]++; + } + + cftab = null; + + tPos = tt[origPtr]; + + count = 0; + i2 = 0; + ch2 = 256; /*-- not a char and not EOF --*/ + + if (blockRandomised) + { + rNToGo = 0; + rTPos = 0; + SetupRandPartA(); + } + else + { + SetupNoRandPartA(); + } + } + + private void SetupRandPartA() + { + if (i2 <= last) + { + chPrev = ch2; + ch2 = ll8[tPos]; + tPos = tt[tPos]; + if (rNToGo == 0) + { + rNToGo = BZip2Constants.RandomNumbers[rTPos]; + rTPos++; + if (rTPos == 512) + { + rTPos = 0; + } + } + rNToGo--; + ch2 ^= (int)((rNToGo == 1) ? 1 : 0); + i2++; + + currentChar = ch2; + currentState = RAND_PART_B_STATE; + mCrc.Update(ch2); + } + else + { + EndBlock(); + InitBlock(); + SetupBlock(); + } + } + + private void SetupNoRandPartA() + { + if (i2 <= last) + { + chPrev = ch2; + ch2 = ll8[tPos]; + tPos = tt[tPos]; + i2++; + + currentChar = ch2; + currentState = NO_RAND_PART_B_STATE; + mCrc.Update(ch2); + } + else + { + EndBlock(); + InitBlock(); + SetupBlock(); + } + } + + private void SetupRandPartB() + { + if (ch2 != chPrev) + { + currentState = RAND_PART_A_STATE; + count = 1; + SetupRandPartA(); + } + else + { + count++; + if (count >= 4) + { + z = ll8[tPos]; + tPos = tt[tPos]; + if (rNToGo == 0) + { + rNToGo = BZip2Constants.RandomNumbers[rTPos]; + rTPos++; + if (rTPos == 512) + { + rTPos = 0; + } + } + rNToGo--; + z ^= (byte)((rNToGo == 1) ? 1 : 0); + j2 = 0; + currentState = RAND_PART_C_STATE; + SetupRandPartC(); + } + else + { + currentState = RAND_PART_A_STATE; + SetupRandPartA(); + } + } + } + + private void SetupRandPartC() + { + if (j2 < (int)z) + { + currentChar = ch2; + mCrc.Update(ch2); + j2++; + } + else + { + currentState = RAND_PART_A_STATE; + i2++; + count = 0; + SetupRandPartA(); + } + } + + private void SetupNoRandPartB() + { + if (ch2 != chPrev) + { + currentState = NO_RAND_PART_A_STATE; + count = 1; + SetupNoRandPartA(); + } + else + { + count++; + if (count >= 4) + { + z = ll8[tPos]; + tPos = tt[tPos]; + currentState = NO_RAND_PART_C_STATE; + j2 = 0; + SetupNoRandPartC(); + } + else + { + currentState = NO_RAND_PART_A_STATE; + SetupNoRandPartA(); + } + } + } + + private void SetupNoRandPartC() + { + if (j2 < (int)z) + { + currentChar = ch2; + mCrc.Update(ch2); + j2++; + } + else + { + currentState = NO_RAND_PART_A_STATE; + i2++; + count = 0; + SetupNoRandPartA(); + } + } + + private void SetDecompressStructureSizes(int newSize100k) + { + if (!(0 <= newSize100k && newSize100k <= 9 && 0 <= blockSize100k && blockSize100k <= 9)) + { + throw new BZip2Exception("Invalid block size"); + } + + blockSize100k = newSize100k; + + if (newSize100k == 0) + { + return; + } + + int n = BZip2Constants.BaseBlockSize * newSize100k; + ll8 = new byte[n]; + tt = new int[n]; + } + + private static void CompressedStreamEOF() + { + throw new EndOfStreamException("BZip2 input stream end of compressed stream"); + } + + private static void BlockOverrun() + { + throw new BZip2Exception("BZip2 input stream block overrun"); + } + + private static void BadBlockHeader() + { + throw new BZip2Exception("BZip2 input stream bad block header"); + } + + private static void CrcError() + { + throw new BZip2Exception("BZip2 input stream crc error"); + } + + private static void HbCreateDecodeTables(int[] limit, int[] baseArray, int[] perm, char[] length, int minLen, int maxLen, int alphaSize) + { + int pp = 0; + + for (int i = minLen; i <= maxLen; ++i) + { + for (int j = 0; j < alphaSize; ++j) + { + if (length[j] == i) + { + perm[pp] = j; + ++pp; + } + } + } + + for (int i = 0; i < BZip2Constants.MaximumCodeLength; i++) + { + baseArray[i] = 0; + } + + for (int i = 0; i < alphaSize; i++) + { + ++baseArray[length[i] + 1]; + } + + for (int i = 1; i < BZip2Constants.MaximumCodeLength; i++) + { + baseArray[i] += baseArray[i - 1]; + } + + for (int i = 0; i < BZip2Constants.MaximumCodeLength; i++) + { + limit[i] = 0; + } + + int vec = 0; + + for (int i = minLen; i <= maxLen; i++) + { + vec += (baseArray[i + 1] - baseArray[i]); + limit[i] = vec - 1; + vec <<= 1; + } + + for (int i = minLen + 1; i <= maxLen; i++) + { + baseArray[i] = ((limit[i - 1] + 1) << 1) - baseArray[i]; + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2OutputStream.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2OutputStream.cs new file mode 100644 index 0000000..d7956db --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/BZip2/BZip2OutputStream.cs @@ -0,0 +1,2033 @@ +using MBS.Editor.Core.Checksum; +using MBS.Editor.Core.Checksum.Modules.BZip2CRC; +using System; +using System.IO; + +namespace MBS.Editor.Core.Compression.Modules.BZip2; + +/// +/// An output stream that compresses into the BZip2 format +/// including file header chars into another stream. +/// +public class BZip2OutputStream : Stream +{ + #region Constants + + private const int SETMASK = (1 << 21); + private const int CLEARMASK = (~SETMASK); + private const int GREATER_ICOST = 15; + private const int LESSER_ICOST = 0; + private const int SMALL_THRESH = 20; + private const int DEPTH_THRESH = 10; + + /*-- + If you are ever unlucky/improbable enough + to get a stack overflow whilst sorting, + increase the following constant and try + again. In practice I have never seen the + stack go above 27 elems, so the following + limit seems very generous. + --*/ + private const int QSORT_STACK_SIZE = 1000; + + /*-- + Knuth's increments seem to work better + than Incerpi-Sedgewick here. Possibly + because the number of elems to sort is + usually small, typically <= 20. + --*/ + + private readonly int[] increments = { + 1, 4, 13, 40, 121, 364, 1093, 3280, + 9841, 29524, 88573, 265720, + 797161, 2391484 + }; + + #endregion Constants + + #region Instance Fields + + /*-- + index of the last char in the block, so + the block size == last + 1. + --*/ + private int last; + + /*-- + index in zptr[] of original string after sorting. + --*/ + private int origPtr; + + /*-- + always: in the range 0 .. 9. + The current block size is 100000 * this number. + --*/ + private int blockSize100k; + + private bool blockRandomised; + + private int bytesOut; + private int bsBuff; + private int bsLive; + private ChecksumModule mCrc = new BZip2CRCChecksumModule(); + + private bool[] inUse = new bool[256]; + private int nInUse; + + private char[] seqToUnseq = new char[256]; + private char[] unseqToSeq = new char[256]; + + private char[] selector = new char[BZip2Constants.MaximumSelectors]; + private char[] selectorMtf = new char[BZip2Constants.MaximumSelectors]; + + private byte[] block; + private int[] quadrant; + private int[] zptr; + private short[] szptr; + private int[] ftab; + + private int nMTF; + + private int[] mtfFreq = new int[BZip2Constants.MaximumAlphaSize]; + + /* + * Used when sorting. If too many long comparisons + * happen, we stop sorting, randomise the block + * slightly, and try again. + */ + private int workFactor; + private int workDone; + private int workLimit; + private bool firstAttempt; + private int nBlocksRandomised; + + private int currentChar = -1; + private int runLength; + private uint blockCRC, combinedCRC; + private int allowableBlockSize; + private readonly Stream baseStream; + private bool disposed_; + + #endregion Instance Fields + + /// + /// Construct a default output stream with maximum block size + /// + /// The stream to write BZip data onto. + public BZip2OutputStream(Stream stream) : this(stream, 9) + { + } + + /// + /// Initialise a new instance of the + /// for the specified stream, using the given blocksize. + /// + /// The stream to write compressed data to. + /// The block size to use. + /// + /// Valid block sizes are in the range 1..9, with 1 giving + /// the lowest compression and 9 the highest. + /// + public BZip2OutputStream(Stream stream, int blockSize) + { + if (stream == null) + throw new ArgumentNullException(nameof(stream)); + + baseStream = stream; + bsLive = 0; + bsBuff = 0; + bytesOut = 0; + + workFactor = 50; + if (blockSize > 9) + { + blockSize = 9; + } + + if (blockSize < 1) + { + blockSize = 1; + } + blockSize100k = blockSize; + AllocateCompressStructures(); + Initialize(); + InitBlock(); + } + + /// + /// Ensures that resources are freed and other cleanup operations + /// are performed when the garbage collector reclaims the BZip2OutputStream. + /// + ~BZip2OutputStream() + { + Dispose(false); + } + + /// + /// Gets or sets a flag indicating ownership of underlying stream. + /// When the flag is true will close the underlying stream also. + /// + /// The default value is true. + public bool IsStreamOwner { get; set; } = true; + + /// + /// Gets a value indicating whether the current stream supports reading + /// + public override bool CanRead + { + get + { + return false; + } + } + + /// + /// Gets a value indicating whether the current stream supports seeking + /// + public override bool CanSeek + { + get + { + return false; + } + } + + /// + /// Gets a value indicating whether the current stream supports writing + /// + public override bool CanWrite + { + get + { + return baseStream.CanWrite; + } + } + + /// + /// Gets the length in bytes of the stream + /// + public override long Length + { + get + { + return baseStream.Length; + } + } + + /// + /// Gets or sets the current position of this stream. + /// + public override long Position + { + get + { + return baseStream.Position; + } + set + { + throw new NotSupportedException("BZip2OutputStream position cannot be set"); + } + } + + /// + /// Sets the current position of this stream to the given value. + /// + /// The point relative to the offset from which to being seeking. + /// The reference point from which to begin seeking. + /// The new position in the stream. + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException("BZip2OutputStream Seek not supported"); + } + + /// + /// Sets the length of this stream to the given value. + /// + /// The new stream length. + public override void SetLength(long value) + { + throw new NotSupportedException("BZip2OutputStream SetLength not supported"); + } + + /// + /// Read a byte from the stream advancing the position. + /// + /// The byte read cast to an int; -1 if end of stream. + public override int ReadByte() + { + throw new NotSupportedException("BZip2OutputStream ReadByte not supported"); + } + + /// + /// Read a block of bytes + /// + /// The buffer to read into. + /// The offset in the buffer to start storing data at. + /// The maximum number of bytes to read. + /// The total number of bytes read. This might be less than the number of bytes + /// requested if that number of bytes are not currently available, or zero + /// if the end of the stream is reached. + public override int Read(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("BZip2OutputStream Read not supported"); + } + + /// + /// Write a block of bytes to the stream + /// + /// The buffer containing data to write. + /// The offset of the first byte to write. + /// The number of bytes to write. + public override void Write(byte[] buffer, int offset, int count) + { + if (buffer == null) + { + throw new ArgumentNullException(nameof(buffer)); + } + + if (offset < 0) + { + throw new ArgumentOutOfRangeException(nameof(offset)); + } + + if (count < 0) + { + throw new ArgumentOutOfRangeException(nameof(count)); + } + + if (buffer.Length - offset < count) + { + throw new ArgumentException("Offset/count out of range"); + } + + for (int i = 0; i < count; ++i) + { + WriteByte(buffer[offset + i]); + } + } + + /// + /// Write a byte to the stream. + /// + /// The byte to write to the stream. + public override void WriteByte(byte value) + { + int b = (256 + value) % 256; + if (currentChar != -1) + { + if (currentChar == b) + { + runLength++; + if (runLength > 254) + { + WriteRun(); + currentChar = -1; + runLength = 0; + } + } + else + { + WriteRun(); + runLength = 1; + currentChar = b; + } + } + else + { + currentChar = b; + runLength++; + } + } + + private void MakeMaps() + { + nInUse = 0; + for (int i = 0; i < 256; i++) + { + if (inUse[i]) + { + seqToUnseq[nInUse] = (char)i; + unseqToSeq[i] = (char)nInUse; + nInUse++; + } + } + } + + /// + /// Get the number of bytes written to output. + /// + private void WriteRun() + { + if (last < allowableBlockSize) + { + inUse[currentChar] = true; + for (int i = 0; i < runLength; i++) + { + mCrc.Update(currentChar); + } + + switch (runLength) + { + case 1: + last++; + block[last + 1] = (byte)currentChar; + break; + + case 2: + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + break; + + case 3: + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + break; + + default: + inUse[runLength - 4] = true; + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)currentChar; + last++; + block[last + 1] = (byte)(runLength - 4); + break; + } + } + else + { + EndBlock(); + InitBlock(); + WriteRun(); + } + } + + /// + /// Get the number of bytes written to the output. + /// + public int BytesWritten + { + get { return bytesOut; } + } + + /// + /// Releases the unmanaged resources used by the and optionally releases the managed resources. + /// + /// true to release both managed and unmanaged resources; false to release only unmanaged resources. + override protected void Dispose(bool disposing) + { + try + { + try + { + base.Dispose(disposing); + if (!disposed_) + { + disposed_ = true; + + if (runLength > 0) + { + WriteRun(); + } + + currentChar = -1; + EndBlock(); + EndCompression(); + Flush(); + } + } + finally + { + if (disposing) + { + if (IsStreamOwner) + { + baseStream.Dispose(); + } + } + } + } + catch + { + } + } + + /// + /// Flush output buffers + /// + public override void Flush() + { + baseStream.Flush(); + } + + private void Initialize() + { + bytesOut = 0; + nBlocksRandomised = 0; + + /*--- Write header `magic' bytes indicating file-format == huffmanised, + followed by a digit indicating blockSize100k. + ---*/ + + BsPutUChar('B'); + BsPutUChar('Z'); + + BsPutUChar('h'); + BsPutUChar('0' + blockSize100k); + + combinedCRC = 0; + } + + private void InitBlock() + { + mCrc.Reset(); + last = -1; + + for (int i = 0; i < 256; i++) + { + inUse[i] = false; + } + + /*--- 20 is just a paranoia constant ---*/ + allowableBlockSize = BZip2Constants.BaseBlockSize * blockSize100k - 20; + } + + private void EndBlock() + { + if (last < 0) + { // dont do anything for empty files, (makes empty files compatible with original Bzip) + return; + } + + blockCRC = unchecked((uint)mCrc.Value); + combinedCRC = (combinedCRC << 1) | (combinedCRC >> 31); + combinedCRC ^= blockCRC; + + /*-- sort the block and establish position of original string --*/ + DoReversibleTransformation(); + + /*-- + A 6-byte block header, the value chosen arbitrarily + as 0x314159265359 :-). A 32 bit value does not really + give a strong enough guarantee that the value will not + appear by chance in the compressed datastream. Worst-case + probability of this event, for a 900k block, is about + 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48 bits. + For a compressed file of size 100Gb -- about 100000 blocks -- + only a 48-bit marker will do. NB: normal compression/ + decompression do *not* rely on these statistical properties. + They are only important when trying to recover blocks from + damaged files. + --*/ + BsPutUChar(0x31); + BsPutUChar(0x41); + BsPutUChar(0x59); + BsPutUChar(0x26); + BsPutUChar(0x53); + BsPutUChar(0x59); + + /*-- Now the block's CRC, so it is in a known place. --*/ + unchecked + { + BsPutint((int)blockCRC); + } + + /*-- Now a single bit indicating randomisation. --*/ + if (blockRandomised) + { + BsW(1, 1); + nBlocksRandomised++; + } + else + { + BsW(1, 0); + } + + /*-- Finally, block's contents proper. --*/ + MoveToFrontCodeAndSend(); + } + + private void EndCompression() + { + /*-- + Now another magic 48-bit number, 0x177245385090, to + indicate the end of the last block. (sqrt(pi), if + you want to know. I did want to use e, but it contains + too much repetition -- 27 18 28 18 28 46 -- for me + to feel statistically comfortable. Call me paranoid.) + --*/ + BsPutUChar(0x17); + BsPutUChar(0x72); + BsPutUChar(0x45); + BsPutUChar(0x38); + BsPutUChar(0x50); + BsPutUChar(0x90); + + unchecked + { + BsPutint((int)combinedCRC); + } + + BsFinishedWithStream(); + } + + private void BsFinishedWithStream() + { + while (bsLive > 0) + { + int ch = (bsBuff >> 24); + baseStream.WriteByte((byte)ch); // write 8-bit + bsBuff <<= 8; + bsLive -= 8; + bytesOut++; + } + } + + private void BsW(int n, int v) + { + while (bsLive >= 8) + { + int ch = (bsBuff >> 24); + unchecked { baseStream.WriteByte((byte)ch); } // write 8-bit + bsBuff <<= 8; + bsLive -= 8; + ++bytesOut; + } + bsBuff |= (v << (32 - bsLive - n)); + bsLive += n; + } + + private void BsPutUChar(int c) + { + BsW(8, c); + } + + private void BsPutint(int u) + { + BsW(8, (u >> 24) & 0xFF); + BsW(8, (u >> 16) & 0xFF); + BsW(8, (u >> 8) & 0xFF); + BsW(8, u & 0xFF); + } + + private void BsPutIntVS(int numBits, int c) + { + BsW(numBits, c); + } + + private void SendMTFValues() + { + char[][] len = new char[BZip2Constants.GroupCount][]; + for (int i = 0; i < BZip2Constants.GroupCount; ++i) + { + len[i] = new char[BZip2Constants.MaximumAlphaSize]; + } + + int gs, ge, totc, bt, bc, iter; + int nSelectors = 0, alphaSize, minLen, maxLen, selCtr; + int nGroups; + + alphaSize = nInUse + 2; + for (int t = 0; t < BZip2Constants.GroupCount; t++) + { + for (int v = 0; v < alphaSize; v++) + { + len[t][v] = (char)GREATER_ICOST; + } + } + + /*--- Decide how many coding tables to use ---*/ + if (nMTF <= 0) + { + Panic(); + } + + if (nMTF < 200) + { + nGroups = 2; + } + else if (nMTF < 600) + { + nGroups = 3; + } + else if (nMTF < 1200) + { + nGroups = 4; + } + else if (nMTF < 2400) + { + nGroups = 5; + } + else + { + nGroups = 6; + } + + /*--- Generate an initial set of coding tables ---*/ + int nPart = nGroups; + int remF = nMTF; + gs = 0; + while (nPart > 0) + { + int tFreq = remF / nPart; + int aFreq = 0; + ge = gs - 1; + while (aFreq < tFreq && ge < alphaSize - 1) + { + ge++; + aFreq += mtfFreq[ge]; + } + + if (ge > gs && nPart != nGroups && nPart != 1 && ((nGroups - nPart) % 2 == 1)) + { + aFreq -= mtfFreq[ge]; + ge--; + } + + for (int v = 0; v < alphaSize; v++) + { + if (v >= gs && v <= ge) + { + len[nPart - 1][v] = (char)LESSER_ICOST; + } + else + { + len[nPart - 1][v] = (char)GREATER_ICOST; + } + } + + nPart--; + gs = ge + 1; + remF -= aFreq; + } + + int[][] rfreq = new int[BZip2Constants.GroupCount][]; + for (int i = 0; i < BZip2Constants.GroupCount; ++i) + { + rfreq[i] = new int[BZip2Constants.MaximumAlphaSize]; + } + + int[] fave = new int[BZip2Constants.GroupCount]; + short[] cost = new short[BZip2Constants.GroupCount]; + /*--- + Iterate up to N_ITERS times to improve the tables. + ---*/ + for (iter = 0; iter < BZip2Constants.NumberOfIterations; ++iter) + { + for (int t = 0; t < nGroups; ++t) + { + fave[t] = 0; + } + + for (int t = 0; t < nGroups; ++t) + { + for (int v = 0; v < alphaSize; ++v) + { + rfreq[t][v] = 0; + } + } + + nSelectors = 0; + totc = 0; + gs = 0; + while (true) + { + /*--- Set group start & end marks. --*/ + if (gs >= nMTF) + { + break; + } + ge = gs + BZip2Constants.GroupSize - 1; + if (ge >= nMTF) + { + ge = nMTF - 1; + } + + /*-- + Calculate the cost of this group as coded + by each of the coding tables. + --*/ + for (int t = 0; t < nGroups; t++) + { + cost[t] = 0; + } + + if (nGroups == 6) + { + short cost0, cost1, cost2, cost3, cost4, cost5; + cost0 = cost1 = cost2 = cost3 = cost4 = cost5 = 0; + for (int i = gs; i <= ge; ++i) + { + short icv = szptr[i]; + cost0 += (short)len[0][icv]; + cost1 += (short)len[1][icv]; + cost2 += (short)len[2][icv]; + cost3 += (short)len[3][icv]; + cost4 += (short)len[4][icv]; + cost5 += (short)len[5][icv]; + } + cost[0] = cost0; + cost[1] = cost1; + cost[2] = cost2; + cost[3] = cost3; + cost[4] = cost4; + cost[5] = cost5; + } + else + { + for (int i = gs; i <= ge; ++i) + { + short icv = szptr[i]; + for (int t = 0; t < nGroups; t++) + { + cost[t] += (short)len[t][icv]; + } + } + } + + /*-- + Find the coding table which is best for this group, + and record its identity in the selector table. + --*/ + bc = 999999999; + bt = -1; + for (int t = 0; t < nGroups; ++t) + { + if (cost[t] < bc) + { + bc = cost[t]; + bt = t; + } + } + totc += bc; + fave[bt]++; + selector[nSelectors] = (char)bt; + nSelectors++; + + /*-- + Increment the symbol frequencies for the selected table. + --*/ + for (int i = gs; i <= ge; ++i) + { + ++rfreq[bt][szptr[i]]; + } + + gs = ge + 1; + } + + /*-- + Recompute the tables based on the accumulated frequencies. + --*/ + for (int t = 0; t < nGroups; ++t) + { + HbMakeCodeLengths(len[t], rfreq[t], alphaSize, 20); + } + } + + rfreq = null; + fave = null; + cost = null; + + if (!(nGroups < 8)) + { + Panic(); + } + + if (!(nSelectors < 32768 && nSelectors <= (2 + (900000 / BZip2Constants.GroupSize)))) + { + Panic(); + } + + /*--- Compute MTF values for the selectors. ---*/ + char[] pos = new char[BZip2Constants.GroupCount]; + char ll_i, tmp2, tmp; + + for (int i = 0; i < nGroups; i++) + { + pos[i] = (char)i; + } + + for (int i = 0; i < nSelectors; i++) + { + ll_i = selector[i]; + int j = 0; + tmp = pos[j]; + while (ll_i != tmp) + { + j++; + tmp2 = tmp; + tmp = pos[j]; + pos[j] = tmp2; + } + pos[0] = tmp; + selectorMtf[i] = (char)j; + } + + int[][] code = new int[BZip2Constants.GroupCount][]; + + for (int i = 0; i < BZip2Constants.GroupCount; ++i) + { + code[i] = new int[BZip2Constants.MaximumAlphaSize]; + } + + /*--- Assign actual codes for the tables. --*/ + for (int t = 0; t < nGroups; t++) + { + minLen = 32; + maxLen = 0; + for (int i = 0; i < alphaSize; i++) + { + if (len[t][i] > maxLen) + { + maxLen = len[t][i]; + } + if (len[t][i] < minLen) + { + minLen = len[t][i]; + } + } + if (maxLen > 20) + { + Panic(); + } + if (minLen < 1) + { + Panic(); + } + HbAssignCodes(code[t], len[t], minLen, maxLen, alphaSize); + } + + /*--- Transmit the mapping table. ---*/ + bool[] inUse16 = new bool[16]; + for (int i = 0; i < 16; ++i) + { + inUse16[i] = false; + for (int j = 0; j < 16; ++j) + { + if (inUse[i * 16 + j]) + { + inUse16[i] = true; + } + } + } + + for (int i = 0; i < 16; ++i) + { + if (inUse16[i]) + { + BsW(1, 1); + } + else + { + BsW(1, 0); + } + } + + for (int i = 0; i < 16; ++i) + { + if (inUse16[i]) + { + for (int j = 0; j < 16; ++j) + { + if (inUse[i * 16 + j]) + { + BsW(1, 1); + } + else + { + BsW(1, 0); + } + } + } + } + + /*--- Now the selectors. ---*/ + BsW(3, nGroups); + BsW(15, nSelectors); + for (int i = 0; i < nSelectors; ++i) + { + for (int j = 0; j < selectorMtf[i]; ++j) + { + BsW(1, 1); + } + BsW(1, 0); + } + + /*--- Now the coding tables. ---*/ + for (int t = 0; t < nGroups; ++t) + { + int curr = len[t][0]; + BsW(5, curr); + for (int i = 0; i < alphaSize; ++i) + { + while (curr < len[t][i]) + { + BsW(2, 2); + curr++; /* 10 */ + } + while (curr > len[t][i]) + { + BsW(2, 3); + curr--; /* 11 */ + } + BsW(1, 0); + } + } + + /*--- And finally, the block data proper ---*/ + selCtr = 0; + gs = 0; + while (true) + { + if (gs >= nMTF) + { + break; + } + ge = gs + BZip2Constants.GroupSize - 1; + if (ge >= nMTF) + { + ge = nMTF - 1; + } + + for (int i = gs; i <= ge; i++) + { + BsW(len[selector[selCtr]][szptr[i]], code[selector[selCtr]][szptr[i]]); + } + + gs = ge + 1; + ++selCtr; + } + if (!(selCtr == nSelectors)) + { + Panic(); + } + } + + private void MoveToFrontCodeAndSend() + { + BsPutIntVS(24, origPtr); + GenerateMTFValues(); + SendMTFValues(); + } + + private void SimpleSort(int lo, int hi, int d) + { + int i, j, h, bigN, hp; + int v; + + bigN = hi - lo + 1; + if (bigN < 2) + { + return; + } + + hp = 0; + while (increments[hp] < bigN) + { + hp++; + } + hp--; + + for (; hp >= 0; hp--) + { + h = increments[hp]; + + i = lo + h; + while (true) + { + /*-- copy 1 --*/ + if (i > hi) + break; + v = zptr[i]; + j = i; + while (FullGtU(zptr[j - h] + d, v + d)) + { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) + break; + } + zptr[j] = v; + i++; + + /*-- copy 2 --*/ + if (i > hi) + { + break; + } + v = zptr[i]; + j = i; + while (FullGtU(zptr[j - h] + d, v + d)) + { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) + { + break; + } + } + zptr[j] = v; + i++; + + /*-- copy 3 --*/ + if (i > hi) + { + break; + } + v = zptr[i]; + j = i; + while (FullGtU(zptr[j - h] + d, v + d)) + { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) + { + break; + } + } + zptr[j] = v; + i++; + + if (workDone > workLimit && firstAttempt) + { + return; + } + } + } + } + + private void Vswap(int p1, int p2, int n) + { + int temp = 0; + while (n > 0) + { + temp = zptr[p1]; + zptr[p1] = zptr[p2]; + zptr[p2] = temp; + p1++; + p2++; + n--; + } + } + + private void QSort3(int loSt, int hiSt, int dSt) + { + int unLo, unHi, ltLo, gtHi, med, n, m; + int lo, hi, d; + + StackElement[] stack = new StackElement[QSORT_STACK_SIZE]; + + int sp = 0; + + stack[sp].ll = loSt; + stack[sp].hh = hiSt; + stack[sp].dd = dSt; + sp++; + + while (sp > 0) + { + if (sp >= QSORT_STACK_SIZE) + { + Panic(); + } + + sp--; + lo = stack[sp].ll; + hi = stack[sp].hh; + d = stack[sp].dd; + + if (hi - lo < SMALL_THRESH || d > DEPTH_THRESH) + { + SimpleSort(lo, hi, d); + if (workDone > workLimit && firstAttempt) + { + return; + } + continue; + } + + med = Med3(block[zptr[lo] + d + 1], + block[zptr[hi] + d + 1], + block[zptr[(lo + hi) >> 1] + d + 1]); + + unLo = ltLo = lo; + unHi = gtHi = hi; + + while (true) + { + while (true) + { + if (unLo > unHi) + { + break; + } + n = ((int)block[zptr[unLo] + d + 1]) - med; + if (n == 0) + { + int temp = zptr[unLo]; + zptr[unLo] = zptr[ltLo]; + zptr[ltLo] = temp; + ltLo++; + unLo++; + continue; + } + if (n > 0) + { + break; + } + unLo++; + } + + while (true) + { + if (unLo > unHi) + { + break; + } + n = ((int)block[zptr[unHi] + d + 1]) - med; + if (n == 0) + { + int temp = zptr[unHi]; + zptr[unHi] = zptr[gtHi]; + zptr[gtHi] = temp; + gtHi--; + unHi--; + continue; + } + if (n < 0) + { + break; + } + unHi--; + } + + if (unLo > unHi) + { + break; + } + + { + int temp = zptr[unLo]; + zptr[unLo] = zptr[unHi]; + zptr[unHi] = temp; + unLo++; + unHi--; + } + } + + if (gtHi < ltLo) + { + stack[sp].ll = lo; + stack[sp].hh = hi; + stack[sp].dd = d + 1; + sp++; + continue; + } + + n = ((ltLo - lo) < (unLo - ltLo)) ? (ltLo - lo) : (unLo - ltLo); + Vswap(lo, unLo - n, n); + m = ((hi - gtHi) < (gtHi - unHi)) ? (hi - gtHi) : (gtHi - unHi); + Vswap(unLo, hi - m + 1, m); + + n = lo + unLo - ltLo - 1; + m = hi - (gtHi - unHi) + 1; + + stack[sp].ll = lo; + stack[sp].hh = n; + stack[sp].dd = d; + sp++; + + stack[sp].ll = n + 1; + stack[sp].hh = m - 1; + stack[sp].dd = d + 1; + sp++; + + stack[sp].ll = m; + stack[sp].hh = hi; + stack[sp].dd = d; + sp++; + } + } + + private void MainSort() + { + int i, j, ss, sb; + int[] runningOrder = new int[256]; + int[] copy = new int[256]; + bool[] bigDone = new bool[256]; + int c1, c2; + int numQSorted; + + /*-- + In the various block-sized structures, live data runs + from 0 to last+NUM_OVERSHOOT_BYTES inclusive. First, + set up the overshoot area for block. + --*/ + + // if (verbosity >= 4) fprintf ( stderr, " sort initialise ...\n" ); + for (i = 0; i < BZip2Constants.OvershootBytes; i++) + { + block[last + i + 2] = block[(i % (last + 1)) + 1]; + } + for (i = 0; i <= last + BZip2Constants.OvershootBytes; i++) + { + quadrant[i] = 0; + } + + block[0] = (byte)(block[last + 1]); + + if (last < 4000) + { + /*-- + Use simpleSort(), since the full sorting mechanism + has quite a large constant overhead. + --*/ + for (i = 0; i <= last; i++) + { + zptr[i] = i; + } + firstAttempt = false; + workDone = workLimit = 0; + SimpleSort(0, last, 0); + } + else + { + numQSorted = 0; + for (i = 0; i <= 255; i++) + { + bigDone[i] = false; + } + for (i = 0; i <= 65536; i++) + { + ftab[i] = 0; + } + + c1 = block[0]; + for (i = 0; i <= last; i++) + { + c2 = block[i + 1]; + ftab[(c1 << 8) + c2]++; + c1 = c2; + } + + for (i = 1; i <= 65536; i++) + { + ftab[i] += ftab[i - 1]; + } + + c1 = block[1]; + for (i = 0; i < last; i++) + { + c2 = block[i + 2]; + j = (c1 << 8) + c2; + c1 = c2; + ftab[j]--; + zptr[ftab[j]] = i; + } + + j = ((block[last + 1]) << 8) + (block[1]); + ftab[j]--; + zptr[ftab[j]] = last; + + /*-- + Now ftab contains the first loc of every small bucket. + Calculate the running order, from smallest to largest + big bucket. + --*/ + + for (i = 0; i <= 255; i++) + { + runningOrder[i] = i; + } + + int vv; + int h = 1; + do + { + h = 3 * h + 1; + } while (h <= 256); + do + { + h = h / 3; + for (i = h; i <= 255; i++) + { + vv = runningOrder[i]; + j = i; + while ((ftab[((runningOrder[j - h]) + 1) << 8] - ftab[(runningOrder[j - h]) << 8]) > (ftab[((vv) + 1) << 8] - ftab[(vv) << 8])) + { + runningOrder[j] = runningOrder[j - h]; + j = j - h; + if (j <= (h - 1)) + { + break; + } + } + runningOrder[j] = vv; + } + } while (h != 1); + + /*-- + The main sorting loop. + --*/ + for (i = 0; i <= 255; i++) + { + /*-- + Process big buckets, starting with the least full. + --*/ + ss = runningOrder[i]; + + /*-- + Complete the big bucket [ss] by quicksorting + any unsorted small buckets [ss, j]. Hopefully + previous pointer-scanning phases have already + completed many of the small buckets [ss, j], so + we don't have to sort them at all. + --*/ + for (j = 0; j <= 255; j++) + { + sb = (ss << 8) + j; + if (!((ftab[sb] & SETMASK) == SETMASK)) + { + int lo = ftab[sb] & CLEARMASK; + int hi = (ftab[sb + 1] & CLEARMASK) - 1; + if (hi > lo) + { + QSort3(lo, hi, 2); + numQSorted += (hi - lo + 1); + if (workDone > workLimit && firstAttempt) + { + return; + } + } + ftab[sb] |= SETMASK; + } + } + + /*-- + The ss big bucket is now done. Record this fact, + and update the quadrant descriptors. Remember to + update quadrants in the overshoot area too, if + necessary. The "if (i < 255)" test merely skips + this updating for the last bucket processed, since + updating for the last bucket is pointless. + --*/ + bigDone[ss] = true; + + if (i < 255) + { + int bbStart = ftab[ss << 8] & CLEARMASK; + int bbSize = (ftab[(ss + 1) << 8] & CLEARMASK) - bbStart; + int shifts = 0; + + while ((bbSize >> shifts) > 65534) + { + shifts++; + } + + for (j = 0; j < bbSize; j++) + { + int a2update = zptr[bbStart + j]; + int qVal = (j >> shifts); + quadrant[a2update] = qVal; + if (a2update < BZip2Constants.OvershootBytes) + { + quadrant[a2update + last + 1] = qVal; + } + } + + if (!(((bbSize - 1) >> shifts) <= 65535)) + { + Panic(); + } + } + + /*-- + Now scan this big bucket so as to synthesise the + sorted order for small buckets [t, ss] for all t != ss. + --*/ + for (j = 0; j <= 255; j++) + { + copy[j] = ftab[(j << 8) + ss] & CLEARMASK; + } + + for (j = ftab[ss << 8] & CLEARMASK; j < (ftab[(ss + 1) << 8] & CLEARMASK); j++) + { + c1 = block[zptr[j]]; + if (!bigDone[c1]) + { + zptr[copy[c1]] = zptr[j] == 0 ? last : zptr[j] - 1; + copy[c1]++; + } + } + + for (j = 0; j <= 255; j++) + { + ftab[(j << 8) + ss] |= SETMASK; + } + } + } + } + + private void RandomiseBlock() + { + int i; + int rNToGo = 0; + int rTPos = 0; + for (i = 0; i < 256; i++) + { + inUse[i] = false; + } + + for (i = 0; i <= last; i++) + { + if (rNToGo == 0) + { + rNToGo = (int)BZip2Constants.RandomNumbers[rTPos]; + rTPos++; + if (rTPos == 512) + { + rTPos = 0; + } + } + rNToGo--; + block[i + 1] ^= (byte)((rNToGo == 1) ? 1 : 0); + // handle 16 bit signed numbers + block[i + 1] &= 0xFF; + + inUse[block[i + 1]] = true; + } + } + + private void DoReversibleTransformation() + { + workLimit = workFactor * last; + workDone = 0; + blockRandomised = false; + firstAttempt = true; + + MainSort(); + + if (workDone > workLimit && firstAttempt) + { + RandomiseBlock(); + workLimit = workDone = 0; + blockRandomised = true; + firstAttempt = false; + MainSort(); + } + + origPtr = -1; + for (int i = 0; i <= last; i++) + { + if (zptr[i] == 0) + { + origPtr = i; + break; + } + } + + if (origPtr == -1) + { + Panic(); + } + } + + private bool FullGtU(int i1, int i2) + { + int k; + byte c1, c2; + int s1, s2; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + i1++; + i2++; + + k = last + 1; + + do + { + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) + { + return s1 > s2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) + { + return s1 > s2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) + { + return s1 > s2; + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) + { + return c1 > c2; + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) + { + return s1 > s2; + } + i1++; + i2++; + + if (i1 > last) + { + i1 -= last; + i1--; + } + if (i2 > last) + { + i2 -= last; + i2--; + } + + k -= 4; + ++workDone; + } while (k >= 0); + + return false; + } + + private void AllocateCompressStructures() + { + int n = BZip2Constants.BaseBlockSize * blockSize100k; + block = new byte[(n + 1 + BZip2Constants.OvershootBytes)]; + quadrant = new int[(n + BZip2Constants.OvershootBytes)]; + zptr = new int[n]; + ftab = new int[65537]; + + if (block == null || quadrant == null || zptr == null || ftab == null) + { + // int totalDraw = (n + 1 + NUM_OVERSHOOT_BYTES) + (n + NUM_OVERSHOOT_BYTES) + n + 65537; + // compressOutOfMemory ( totalDraw, n ); + } + + /* + The back end needs a place to store the MTF values + whilst it calculates the coding tables. We could + put them in the zptr array. However, these values + will fit in a short, so we overlay szptr at the + start of zptr, in the hope of reducing the number + of cache misses induced by the multiple traversals + of the MTF values when calculating coding tables. + Seems to improve compression speed by about 1%. + */ + // szptr = zptr; + + szptr = new short[2 * n]; + } + + private void GenerateMTFValues() + { + char[] yy = new char[256]; + int i, j; + char tmp; + char tmp2; + int zPend; + int wr; + int EOB; + + MakeMaps(); + EOB = nInUse + 1; + + for (i = 0; i <= EOB; i++) + { + mtfFreq[i] = 0; + } + + wr = 0; + zPend = 0; + for (i = 0; i < nInUse; i++) + { + yy[i] = (char)i; + } + + for (i = 0; i <= last; i++) + { + char ll_i; + + ll_i = unseqToSeq[block[zptr[i]]]; + + j = 0; + tmp = yy[j]; + while (ll_i != tmp) + { + j++; + tmp2 = tmp; + tmp = yy[j]; + yy[j] = tmp2; + } + yy[0] = tmp; + + if (j == 0) + { + zPend++; + } + else + { + if (zPend > 0) + { + zPend--; + while (true) + { + switch (zPend % 2) + { + case 0: + szptr[wr] = (short)BZip2Constants.RunA; + wr++; + mtfFreq[BZip2Constants.RunA]++; + break; + + case 1: + szptr[wr] = (short)BZip2Constants.RunB; + wr++; + mtfFreq[BZip2Constants.RunB]++; + break; + } + if (zPend < 2) + { + break; + } + zPend = (zPend - 2) / 2; + } + zPend = 0; + } + szptr[wr] = (short)(j + 1); + wr++; + mtfFreq[j + 1]++; + } + } + + if (zPend > 0) + { + zPend--; + while (true) + { + switch (zPend % 2) + { + case 0: + szptr[wr] = (short)BZip2Constants.RunA; + wr++; + mtfFreq[BZip2Constants.RunA]++; + break; + + case 1: + szptr[wr] = (short)BZip2Constants.RunB; + wr++; + mtfFreq[BZip2Constants.RunB]++; + break; + } + if (zPend < 2) + { + break; + } + zPend = (zPend - 2) / 2; + } + } + + szptr[wr] = (short)EOB; + wr++; + mtfFreq[EOB]++; + + nMTF = wr; + } + + private static void Panic() + { + throw new BZip2Exception("BZip2 output stream panic"); + } + + private static void HbMakeCodeLengths(char[] len, int[] freq, int alphaSize, int maxLen) + { + /*-- + Nodes and heap entries run from 1. Entry 0 + for both the heap and nodes is a sentinel. + --*/ + int nNodes, nHeap, n1, n2, j, k; + bool tooLong; + + int[] heap = new int[BZip2Constants.MaximumAlphaSize + 2]; + int[] weight = new int[BZip2Constants.MaximumAlphaSize * 2]; + int[] parent = new int[BZip2Constants.MaximumAlphaSize * 2]; + + for (int i = 0; i < alphaSize; ++i) + { + weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8; + } + + while (true) + { + nNodes = alphaSize; + nHeap = 0; + + heap[0] = 0; + weight[0] = 0; + parent[0] = -2; + + for (int i = 1; i <= alphaSize; ++i) + { + parent[i] = -1; + nHeap++; + heap[nHeap] = i; + int zz = nHeap; + int tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) + { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + if (!(nHeap < (BZip2Constants.MaximumAlphaSize + 2))) + { + Panic(); + } + + while (nHeap > 1) + { + n1 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + int zz = 1; + int yy = 0; + int tmp = heap[zz]; + while (true) + { + yy = zz << 1; + if (yy > nHeap) + { + break; + } + if (yy < nHeap && weight[heap[yy + 1]] < weight[heap[yy]]) + { + yy++; + } + if (weight[tmp] < weight[heap[yy]]) + { + break; + } + + heap[zz] = heap[yy]; + zz = yy; + } + heap[zz] = tmp; + n2 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + + zz = 1; + yy = 0; + tmp = heap[zz]; + while (true) + { + yy = zz << 1; + if (yy > nHeap) + { + break; + } + if (yy < nHeap && weight[heap[yy + 1]] < weight[heap[yy]]) + { + yy++; + } + if (weight[tmp] < weight[heap[yy]]) + { + break; + } + heap[zz] = heap[yy]; + zz = yy; + } + heap[zz] = tmp; + nNodes++; + parent[n1] = parent[n2] = nNodes; + + weight[nNodes] = (int)((weight[n1] & 0xffffff00) + (weight[n2] & 0xffffff00)) | + (int)(1 + (((weight[n1] & 0x000000ff) > (weight[n2] & 0x000000ff)) ? (weight[n1] & 0x000000ff) : (weight[n2] & 0x000000ff))); + + parent[nNodes] = -1; + nHeap++; + heap[nHeap] = nNodes; + + zz = nHeap; + tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) + { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + if (!(nNodes < (BZip2Constants.MaximumAlphaSize * 2))) + { + Panic(); + } + + tooLong = false; + for (int i = 1; i <= alphaSize; ++i) + { + j = 0; + k = i; + while (parent[k] >= 0) + { + k = parent[k]; + j++; + } + len[i - 1] = (char)j; + tooLong |= j > maxLen; + } + + if (!tooLong) + { + break; + } + + for (int i = 1; i < alphaSize; ++i) + { + j = weight[i] >> 8; + j = 1 + (j / 2); + weight[i] = j << 8; + } + } + } + + private static void HbAssignCodes(int[] code, char[] length, int minLen, int maxLen, int alphaSize) + { + int vec = 0; + for (int n = minLen; n <= maxLen; ++n) + { + for (int i = 0; i < alphaSize; ++i) + { + if (length[i] == n) + { + code[i] = vec; + ++vec; + } + } + vec <<= 1; + } + } + + private static byte Med3(byte a, byte b, byte c) + { + byte t; + if (a > b) + { + t = a; + a = b; + b = t; + } + if (b > c) + { + t = b; + b = c; + c = t; + } + if (a > b) + { + b = a; + } + return b; + } + + private struct StackElement + { + public int ll; + public int hh; + public int dd; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/Deflate/DeflateCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/Deflate/DeflateCompressionModule.cs new file mode 100644 index 0000000..a08b22f --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/Deflate/DeflateCompressionModule.cs @@ -0,0 +1,16 @@ +using System.IO.Compression; +using MBS.Editor.Core.Compression; + +namespace MBS.Editor.Core.Compression.Modules.Deflate; + +public class DeflateCompressionModule : SystemCompressionModule +{ + protected override DeflateStream CreateCompressor(Stream stream) + { + return new DeflateStream(stream, GetSystemCompressionLevel()); + } + protected override DeflateStream CreateDecompressor(Stream stream) + { + return new DeflateStream(stream, CompressionMode.Decompress); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/GZip/GZipCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/GZip/GZipCompressionModule.cs new file mode 100644 index 0000000..2a4b3b3 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/GZip/GZipCompressionModule.cs @@ -0,0 +1,37 @@ +using System.IO.Compression; +using MBS.Editor.Core.Compression; + +namespace MBS.Editor.Core.Compression.Modules.GZip; + +public class GZipCompressionModule : SystemCompressionModule +{ + protected override GZipStream CreateCompressor(Stream stream) + { + return new GZipStream(stream, GetSystemCompressionLevel()); + } + protected override GZipStream CreateDecompressor(Stream stream) + { + return new GZipStream(stream, System.IO.Compression.CompressionMode.Decompress); + } + + /* + protected override void CompressInternal(byte[] buffer, int offset, int length) + { + if (_compressor == null) + { + MemoryStream ms = new MemoryStream(); + _compressor = new GZipStream(ms, GetSystemCompressionLevel()); + } + _compressor.Write(buffer, offset, length); + } + protected override int DecompressInternal(byte[] buffer, int offset, int length) + { + if (_decompressor == null) + { + MemoryStream ms = new MemoryStream(); + _decompressor = new GZipStream(ms, GetSystemCompressionLevel()); + } + return _decompressor.Read(buffer, offset, length); + } + */ +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwConstants.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwConstants.cs new file mode 100644 index 0000000..cccf728 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwConstants.cs @@ -0,0 +1,62 @@ +namespace MBS.Editor.Core.Compression.Modules.LZW.Internal; + +/// +/// This class contains constants used for LZW +/// +[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")] +sealed public class LzwConstants +{ + /// + /// Magic number found at start of LZW header: 0x1f 0x9d + /// + public const int MAGIC = 0x1f9d; + + /// + /// Maximum number of bits per code + /// + public const int MAX_BITS = 16; + + /* 3rd header byte: + * bit 0..4 Number of compression bits + * bit 5 Extended header + * bit 6 Free + * bit 7 Block mode + */ + + /// + /// Mask for 'number of compression bits' + /// + public const int BIT_MASK = 0x1f; + + /// + /// Indicates the presence of a fourth header byte + /// + public const int EXTENDED_MASK = 0x20; + + //public const int FREE_MASK = 0x40; + + /// + /// Reserved bits + /// + public const int RESERVED_MASK = 0x60; + + /// + /// Block compression: if table is full and compression rate is dropping, + /// clear the dictionary. + /// + public const int BLOCK_MODE_MASK = 0x80; + + /// + /// LZW file header size (in bytes) + /// + public const int HDR_SIZE = 3; + + /// + /// Initial number of bits per code + /// + public const int INIT_BITS = 9; + + private LzwConstants() + { + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwInputStream.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwInputStream.cs new file mode 100644 index 0000000..6f89c8e --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/Internal/LzwInputStream.cs @@ -0,0 +1,571 @@ +using System; +using System.IO; + +namespace MBS.Editor.Core.Compression.Modules.LZW.Internal; + +/// +/// This filter stream is used to decompress a LZW format stream. +/// Specifically, a stream that uses the LZC compression method. +/// This file format is usually associated with the .Z file extension. +/// +/// See http://en.wikipedia.org/wiki/Compress +/// See http://wiki.wxwidgets.org/Development:_Z_File_Format +/// +/// The file header consists of 3 (or optionally 4) bytes. The first two bytes +/// contain the magic marker "0x1f 0x9d", followed by a byte of flags. +/// +/// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c +/// code in the gzip package. +/// +/// This sample shows how to unzip a compressed file +/// +/// using System; +/// using System.IO; +/// +/// using ICSharpCode.SharpZipLib.Core; +/// using ICSharpCode.SharpZipLib.LZW; +/// +/// class MainClass +/// { +/// public static void Main(string[] args) +/// { +/// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0]))) +/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) { +/// byte[] buffer = new byte[4096]; +/// StreamUtils.Copy(inStream, outStream, buffer); +/// // OR +/// inStream.Read(buffer, 0, buffer.Length); +/// // now do something with the buffer +/// } +/// } +/// } +/// +/// +public class LzwInputStream : Stream +{ + /// + /// Gets or sets a flag indicating ownership of underlying stream. + /// When the flag is true will close the underlying stream also. + /// + /// The default value is true. + public bool IsStreamOwner { get; set; } = true; + + /// + /// Creates a LzwInputStream + /// + /// + /// The stream to read compressed data from (baseInputStream LZW format) + /// + public LzwInputStream(Stream baseInputStream) + { + this.baseInputStream = baseInputStream; + } + + /// + /// See + /// + /// + public override int ReadByte() + { + int b = Read(one, 0, 1); + if (b == 1) + return (one[0] & 0xff); + return -1; + } + + /// + /// Reads decompressed data into the provided buffer byte array + /// + /// + /// The array to read and decompress data into + /// + /// + /// The offset indicating where the data should be placed + /// + /// + /// The number of bytes to decompress + /// + /// The number of bytes read. Zero signals the end of stream + public override int Read(byte[] buffer, int offset, int count) + { + if (!headerParsed) + ParseHeader(); + + if (eof) + return 0; + + int start = offset; + + /* Using local copies of various variables speeds things up by as + * much as 30% in Java! Performance not tested in C#. + */ + int[] lTabPrefix = tabPrefix; + byte[] lTabSuffix = tabSuffix; + byte[] lStack = stack; + int lNBits = nBits; + int lMaxCode = maxCode; + int lMaxMaxCode = maxMaxCode; + int lBitMask = bitMask; + int lOldCode = oldCode; + byte lFinChar = finChar; + int lStackP = stackP; + int lFreeEnt = freeEnt; + byte[] lData = data; + int lBitPos = bitPos; + + // empty stack if stuff still left + int sSize = lStack.Length - lStackP; + if (sSize > 0) + { + int num = (sSize >= count) ? count : sSize; + Array.Copy(lStack, lStackP, buffer, offset, num); + offset += num; + count -= num; + lStackP += num; + } + + if (count == 0) + { + stackP = lStackP; + return offset - start; + } + + // loop, filling local buffer until enough data has been decompressed + MainLoop: + do + { + if (end < EXTRA) + { + Fill(); + } + + int bitIn = (got > 0) ? (end - end % lNBits) << 3 : + (end << 3) - (lNBits - 1); + + while (lBitPos < bitIn) + { + #region A + + // handle 1-byte reads correctly + if (count == 0) + { + nBits = lNBits; + maxCode = lMaxCode; + maxMaxCode = lMaxMaxCode; + bitMask = lBitMask; + oldCode = lOldCode; + finChar = lFinChar; + stackP = lStackP; + freeEnt = lFreeEnt; + bitPos = lBitPos; + + return offset - start; + } + + // check for code-width expansion + if (lFreeEnt > lMaxCode) + { + int nBytes = lNBits << 3; + lBitPos = (lBitPos - 1) + + nBytes - (lBitPos - 1 + nBytes) % nBytes; + + lNBits++; + lMaxCode = (lNBits == maxBits) ? lMaxMaxCode : + (1 << lNBits) - 1; + + lBitMask = (1 << lNBits) - 1; + lBitPos = ResetBuf(lBitPos); + goto MainLoop; + } + + #endregion A + + #region B + + // read next code + int pos = lBitPos >> 3; + int code = (((lData[pos] & 0xFF) | + ((lData[pos + 1] & 0xFF) << 8) | + ((lData[pos + 2] & 0xFF) << 16)) >> + (lBitPos & 0x7)) & lBitMask; + + lBitPos += lNBits; + + // handle first iteration + if (lOldCode == -1) + { + if (code >= 256) + throw new LZWException("corrupt input: " + code + " > 255"); + + lFinChar = (byte)(lOldCode = code); + buffer[offset++] = lFinChar; + count--; + continue; + } + + // handle CLEAR code + if (code == TBL_CLEAR && blockMode) + { + Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length); + lFreeEnt = TBL_FIRST - 1; + + int nBytes = lNBits << 3; + lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes; + lNBits = LzwConstants.INIT_BITS; + lMaxCode = (1 << lNBits) - 1; + lBitMask = lMaxCode; + + // Code tables reset + + lBitPos = ResetBuf(lBitPos); + goto MainLoop; + } + + #endregion B + + #region C + + // setup + int inCode = code; + lStackP = lStack.Length; + + // Handle KwK case + if (code >= lFreeEnt) + { + if (code > lFreeEnt) + { + throw new LZWException("corrupt input: code=" + code + + ", freeEnt=" + lFreeEnt); + } + + lStack[--lStackP] = lFinChar; + code = lOldCode; + } + + // Generate output characters in reverse order + while (code >= 256) + { + lStack[--lStackP] = lTabSuffix[code]; + code = lTabPrefix[code]; + } + + lFinChar = lTabSuffix[code]; + buffer[offset++] = lFinChar; + count--; + + // And put them out in forward order + sSize = lStack.Length - lStackP; + int num = (sSize >= count) ? count : sSize; + Array.Copy(lStack, lStackP, buffer, offset, num); + offset += num; + count -= num; + lStackP += num; + + #endregion C + + #region D + + // generate new entry in table + if (lFreeEnt < lMaxMaxCode) + { + lTabPrefix[lFreeEnt] = lOldCode; + lTabSuffix[lFreeEnt] = lFinChar; + lFreeEnt++; + } + + // Remember previous code + lOldCode = inCode; + + // if output buffer full, then return + if (count == 0) + { + nBits = lNBits; + maxCode = lMaxCode; + bitMask = lBitMask; + oldCode = lOldCode; + finChar = lFinChar; + stackP = lStackP; + freeEnt = lFreeEnt; + bitPos = lBitPos; + + return offset - start; + } + + #endregion D + } // while + + lBitPos = ResetBuf(lBitPos); + } while (got > 0); // do..while + + nBits = lNBits; + maxCode = lMaxCode; + bitMask = lBitMask; + oldCode = lOldCode; + finChar = lFinChar; + stackP = lStackP; + freeEnt = lFreeEnt; + bitPos = lBitPos; + + eof = true; + return offset - start; + } + + /// + /// Moves the unread data in the buffer to the beginning and resets + /// the pointers. + /// + /// + /// + private int ResetBuf(int bitPosition) + { + int pos = bitPosition >> 3; + Array.Copy(data, pos, data, 0, end - pos); + end -= pos; + return 0; + } + + private void Fill() + { + got = baseInputStream.Read(data, end, data.Length - 1 - end); + if (got > 0) + { + end += got; + } + } + + private void ParseHeader() + { + headerParsed = true; + + byte[] hdr = new byte[LzwConstants.HDR_SIZE]; + + int result = baseInputStream.Read(hdr, 0, hdr.Length); + + // Check the magic marker + if (result < 0) + throw new LZWException("Failed to read LZW header"); + + if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff)) + { + throw new LZWException(String.Format( + "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}", + hdr[0], hdr[1])); + } + + // Check the 3rd header byte + blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0; + maxBits = hdr[2] & LzwConstants.BIT_MASK; + + if (maxBits > LzwConstants.MAX_BITS) + { + throw new LZWException("Stream compressed with " + maxBits + + " bits, but decompression can only handle " + + LzwConstants.MAX_BITS + " bits."); + } + + if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0) + { + throw new LZWException("Unsupported bits set in the header."); + } + + // Initialize variables + maxMaxCode = 1 << maxBits; + nBits = LzwConstants.INIT_BITS; + maxCode = (1 << nBits) - 1; + bitMask = maxCode; + oldCode = -1; + finChar = 0; + freeEnt = blockMode ? TBL_FIRST : 256; + + tabPrefix = new int[1 << maxBits]; + tabSuffix = new byte[1 << maxBits]; + stack = new byte[1 << maxBits]; + stackP = stack.Length; + + for (int idx = 255; idx >= 0; idx--) + tabSuffix[idx] = (byte)idx; + } + + #region Stream Overrides + + /// + /// Gets a value indicating whether the current stream supports reading + /// + public override bool CanRead + { + get + { + return baseInputStream.CanRead; + } + } + + /// + /// Gets a value of false indicating seeking is not supported for this stream. + /// + public override bool CanSeek + { + get + { + return false; + } + } + + /// + /// Gets a value of false indicating that this stream is not writeable. + /// + public override bool CanWrite + { + get + { + return false; + } + } + + /// + /// A value representing the length of the stream in bytes. + /// + public override long Length + { + get + { + return got; + } + } + + /// + /// The current position within the stream. + /// Throws a NotSupportedException when attempting to set the position + /// + /// Attempting to set the position + public override long Position + { + get + { + return baseInputStream.Position; + } + set + { + throw new NotSupportedException("InflaterInputStream Position not supported"); + } + } + + /// + /// Flushes the baseInputStream + /// + public override void Flush() + { + baseInputStream.Flush(); + } + + /// + /// Sets the position within the current stream + /// Always throws a NotSupportedException + /// + /// The relative offset to seek to. + /// The defining where to seek from. + /// The new position in the stream. + /// Any access + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException("Seek not supported"); + } + + /// + /// Set the length of the current stream + /// Always throws a NotSupportedException + /// + /// The new length value for the stream. + /// Any access + public override void SetLength(long value) + { + throw new NotSupportedException("InflaterInputStream SetLength not supported"); + } + + /// + /// Writes a sequence of bytes to stream and advances the current position + /// This method always throws a NotSupportedException + /// + /// The buffer containing data to write. + /// The offset of the first byte to write. + /// The number of bytes to write. + /// Any access + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("InflaterInputStream Write not supported"); + } + + /// + /// Writes one byte to the current stream and advances the current position + /// Always throws a NotSupportedException + /// + /// The byte to write. + /// Any access + public override void WriteByte(byte value) + { + throw new NotSupportedException("InflaterInputStream WriteByte not supported"); + } + + /// + /// Closes the input stream. When + /// is true the underlying stream is also closed. + /// + protected override void Dispose(bool disposing) + { + if (!isClosed) + { + isClosed = true; + if (IsStreamOwner) + { + baseInputStream.Dispose(); + } + } + } + + #endregion Stream Overrides + + #region Instance Fields + + private Stream baseInputStream; + + /// + /// Flag indicating wether this instance has been closed or not. + /// + private bool isClosed; + + private readonly byte[] one = new byte[1]; + private bool headerParsed; + + // string table stuff + private const int TBL_CLEAR = 0x100; + + private const int TBL_FIRST = TBL_CLEAR + 1; + + private int[] tabPrefix; + private byte[] tabSuffix; + private readonly int[] zeros = new int[256]; + private byte[] stack; + + // various state + private bool blockMode; + + private int nBits; + private int maxBits; + private int maxMaxCode; + private int maxCode; + private int bitMask; + private int oldCode; + private byte finChar; + private int stackP; + private int freeEnt; + + // input buffer + private readonly byte[] data = new byte[1024 * 8]; + + private int bitPos; + private int end; + private int got; + private bool eof; + private const int EXTRA = 64; + + #endregion Instance Fields +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWCompressionModule.cs new file mode 100644 index 0000000..973f6a7 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWCompressionModule.cs @@ -0,0 +1,18 @@ +using System; +using System.IO; +using MBS.Editor.Core.Compression.Modules.LZW.Internal; + +namespace MBS.Editor.Core.Compression.Modules.LZW; + +public class LZWCompressionModule : SystemCompressionModule +{ + protected override LzwInputStream CreateCompressor(Stream stream) + { + throw new NotImplementedException(); + } + + protected override LzwInputStream CreateDecompressor(Stream stream) + { + return new LzwInputStream(stream); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWException.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWException.cs new file mode 100644 index 0000000..81a5462 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/LZW/LZWException.cs @@ -0,0 +1,36 @@ +using System; +using System.Runtime.Serialization; + +namespace MBS.Editor.Core.Compression.Modules.LZW; + +/// +/// LZWException represents exceptions specific to LZW classes and code. +/// +public class LZWException : CompressionException +{ + /// + /// Initialise a new instance of . + /// + public LZWException() + { + } + + /// + /// Initialise a new instance of with its message string. + /// + /// A that describes the error. + public LZWException(string message) + : base(message) + { + } + + /// + /// Initialise a new instance of . + /// + /// A that describes the error. + /// The that caused this exception. + public LZWException(string message, Exception innerException) + : base(message, innerException) + { + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/ZlibBuiltin/ZlibBuiltinCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/ZlibBuiltin/ZlibBuiltinCompressionModule.cs new file mode 100644 index 0000000..37f294c --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/Modules/ZlibBuiltin/ZlibBuiltinCompressionModule.cs @@ -0,0 +1,14 @@ +using System.IO.Compression; +using MBS.Editor.Core.Compression; + +public class ZlibBuiltinCompressionModule : SystemCompressionModule +{ + protected override ZLibStream CreateCompressor(Stream stream) + { + return new ZLibStream(stream, GetSystemCompressionLevel()); + } + protected override ZLibStream CreateDecompressor(Stream stream) + { + return new ZLibStream(stream, CompressionMode.Decompress); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionLevel.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionLevel.cs new file mode 100644 index 0000000..b5df694 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionLevel.cs @@ -0,0 +1,26 @@ +namespace MBS.Editor.Core.Compression; + +/// +/// Specifies values that indicate whether a compression operation emphasizes speed or compression size. +/// +public enum SystemCompressionLevel +{ + /// + /// The compression operation should optimally balance compression speed and output size. + /// + Optimal = 0, + /// + /// The compression operation should complete as quickly as possible, even if the resulting + /// file is not optimally compressed. + /// + Fastest = 1, + /// + /// No compression should be performed on the file. + /// + NoCompression = 2, + /// + /// The compression operation should create output as small as possible, even if + /// the operation takes a longer time to complete. + /// + SmallestSize = 3 +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionModule.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionModule.cs new file mode 100644 index 0000000..d01f1e1 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Compression/SystemCompressionModule.cs @@ -0,0 +1,44 @@ +using System.IO.Compression; + +namespace MBS.Editor.Core.Compression; + +public abstract class SystemCompressionModule : CompressionModule where TStream : Stream +{ + public SystemCompressionLevel CompressionLevel { get; set; } = SystemCompressionLevel.Optimal; + + + protected abstract TStream CreateCompressor(Stream stream); + protected abstract TStream CreateDecompressor(Stream stream); + + protected override void CompressInternal(Stream inputStream, Stream outputStream) + { + TStream _compressor = CreateCompressor(outputStream); + inputStream.CopyTo(_compressor); + + // !!! IMPORTANT !!! DO NOT FORGET TO FLUSH !!! + _compressor.Flush(); + _compressor.Close(); + } + protected override void DecompressInternal(Stream inputStream, Stream outputStream) + { + TStream _decompressor = CreateDecompressor(inputStream); + _decompressor.CopyTo(outputStream); + + // !!! IMPORTANT !!! DO NOT FORGET TO FLUSH !!! + _decompressor.Flush(); + _decompressor.Close(); + } + + protected CompressionLevel GetSystemCompressionLevel() + { + switch (CompressionLevel) + { + case SystemCompressionLevel.Fastest: return System.IO.Compression.CompressionLevel.Fastest; + case SystemCompressionLevel.NoCompression: return System.IO.Compression.CompressionLevel.NoCompression; + case SystemCompressionLevel.Optimal: return System.IO.Compression.CompressionLevel.Optimal; + case SystemCompressionLevel.SmallestSize: return System.IO.Compression.CompressionLevel.SmallestSize; + } + throw new ArgumentException("no System.IO.Compression.CompressionLevel matches the given SystemCompressionLevel"); + } + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/DataFormat.cs b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormat.cs index a3d33aa..566e0ea 100644 --- a/editor-dotnet/src/lib/MBS.Editor.Core/DataFormat.cs +++ b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormat.cs @@ -1,6 +1,38 @@ namespace MBS.Editor.Core; -public class DataFormat +public abstract class DataFormat { + public void Load(ObjectModel objectModel, Stream stream) + { + LoadInternal(objectModel, stream); + } + protected abstract void LoadInternal(ObjectModel objectModel, Stream stream); + + public void Save(ObjectModel objectModel, Stream stream) + { + SaveInternal(objectModel, stream); + } + protected abstract void SaveInternal(ObjectModel objectModel, Stream stream); + + + + public static T FromType() where T : DataFormat, new() + { + T objectModel = new T(); + return objectModel; + } + public static DataFormat FromType(Type type) + { + if (type.IsAbstract || !type.IsSubclassOf(typeof(DataFormat))) + { + throw new InvalidCastException("type must be a non-abstract subclass of DataFormat"); + } + DataFormat? objectModel = type.Assembly.CreateInstance(type.FullName) as DataFormat; + if (objectModel == null) + { + throw new TypeLoadException("could not create DataFormat from type name"); + } + return objectModel; + } } \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/DataFormatMetadata.cs b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormatMetadata.cs new file mode 100644 index 0000000..b53fdf7 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormatMetadata.cs @@ -0,0 +1,10 @@ +namespace MBS.Editor.Core; + +using MBS.Core.Settings; + +public class DataFormatMetadata +{ + public SettingsProvider ExportSettings { get; } + public SettingsProvider ImportSettings { get; } + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/DataFormats/FileSystem/ZIP/ZIPDataFormat.cs b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormats/FileSystem/ZIP/ZIPDataFormat.cs new file mode 100644 index 0000000..160810c --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/DataFormats/FileSystem/ZIP/ZIPDataFormat.cs @@ -0,0 +1,13 @@ + +namespace MBS.Editor.Core.DataFormats.FileSystem.ZIP; + +public class ZIPDataFormat : DataFormat +{ + protected override void LoadInternal(ObjectModel objectModel, Stream stream) + { + + } + protected override void SaveInternal(ObjectModel objectModel, Stream stream) + { + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Document.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Document.cs new file mode 100644 index 0000000..7f6ba86 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Document.cs @@ -0,0 +1,41 @@ +namespace MBS.Editor.Core; + +public class Document +{ + public ObjectModel ObjectModel { get; set; } + public DataFormat DataFormat { get; set; } + + public Stream InputStream { get; set; } + public Stream OutputStream { get; set; } + + public Document(ObjectModel objectModel, DataFormat dataFormat, Stream stream) : this(objectModel, dataFormat, stream, stream) { } + public Document(ObjectModel objectModel, DataFormat dataFormat, Stream inputStream, Stream outputStream) + { + ObjectModel = objectModel; + DataFormat = dataFormat; + InputStream = inputStream; + OutputStream = outputStream; + } + + public void Load() + { + DataFormat.Load(ObjectModel, InputStream); + } + public void Save() + { + DataFormat.Save(ObjectModel, OutputStream); + } + + public static Document Load(ObjectModel objectModel, DataFormat dataFormat, Stream stream) + { + Document doc = new Document(objectModel, dataFormat, stream); + doc.Load(); + return doc; + } + public static Document Save(ObjectModel objectModel, DataFormat dataFormat, Stream stream) + { + Document doc = new Document(objectModel, dataFormat, stream); + doc.Save(); + return doc; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessage.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessage.cs new file mode 100644 index 0000000..a93787e --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessage.cs @@ -0,0 +1,138 @@ +namespace MBS.Editor.Core.Hosting; +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; + +public delegate void HostApplicationMessageModifyingEventHandler(object sender, HostApplicationMessageModifyingEventArgs e); +public class HostApplicationMessageModifyingEventArgs + : CancelEventArgs +{ + public HostApplicationMessageModifyingEventArgs(HostApplicationMessage message) + { + Message = message; + } + + public HostApplicationMessage Message { get; } +} +public delegate void HostApplicationMessageModifiedEventHandler(object sender, HostApplicationMessageModifiedEventArgs e); +public class HostApplicationMessageModifiedEventArgs + : EventArgs +{ + public HostApplicationMessageModifiedEventArgs(HostApplicationMessage message) + { + Message = message; + } + + public HostApplicationMessage Message { get; } +} + +public class HostApplicationMessage +{ + public class HostApplicationMessageCollection + : System.Collections.ObjectModel.Collection + { + public event HostApplicationMessageModifyingEventHandler? MessageAdding; + public event HostApplicationMessageModifyingEventHandler? MessageRemoving; + + public event HostApplicationMessageModifiedEventHandler? MessageAdded; + public event HostApplicationMessageModifiedEventHandler? MessageRemoved; + + public HostApplicationMessage Add(HostApplicationMessageSeverity severity, string description, string fileName = null, int? lineNumber = null, int? columnNumber = null, string projectName = null) + { + HostApplicationMessage message = new HostApplicationMessage(); + message.Severity = severity; + message.Description = description; + message.FileName = fileName; + message.LineNumber = lineNumber; + message.ColumnNumber = columnNumber; + message.ProjectName = projectName; + Add(message); + return message; + } + + protected virtual void OnMessageAdding(HostApplicationMessageModifyingEventArgs e) + { + if (MessageAdding != null) + { + MessageAdding(this, e); + } + } + protected virtual void OnMessageAdded(HostApplicationMessageModifiedEventArgs e) + { + if (MessageAdded != null) + { + MessageAdded(this, e); + } + } + + protected virtual void OnMessageRemoving(HostApplicationMessageModifyingEventArgs e) + { + if (MessageRemoving != null) + { + MessageRemoving(this, e); + } + } + protected virtual void OnMessageRemoved(HostApplicationMessageModifiedEventArgs e) + { + if (MessageRemoved != null) + { + MessageRemoved(this, e); + } + } + + public event EventHandler? MessagesCleared; + protected virtual void OnMessagesCleared(EventArgs e) + { + if (MessagesCleared != null) MessagesCleared(this, e); + } + + protected override void InsertItem(int index, HostApplicationMessage item) + { + HostApplicationMessage message = item; + HostApplicationMessageModifyingEventArgs e = new HostApplicationMessageModifyingEventArgs(message); + OnMessageAdding(e); + if (e.Cancel) return; + + base.InsertItem(index, item); + + OnMessageAdded(new HostApplicationMessageModifiedEventArgs(message)); + } + protected override void RemoveItem(int index) + { + HostApplicationMessage message = this[index]; + HostApplicationMessageModifyingEventArgs e = new HostApplicationMessageModifyingEventArgs(message); + OnMessageRemoving(e); + if (e.Cancel) return; + + base.RemoveItem(index); + + OnMessageRemoved(new HostApplicationMessageModifiedEventArgs(message)); + } + protected override void ClearItems() + { + base.ClearItems(); + OnMessagesCleared(EventArgs.Empty); + } + } + + private HostApplicationMessageSeverity mvarSeverity = HostApplicationMessageSeverity.None; + public HostApplicationMessageSeverity Severity { get { return mvarSeverity; } set { mvarSeverity = value; } } + + private string mvarDescription = String.Empty; + public string Description { get { return mvarDescription; } set { mvarDescription = value; } } + + private string mvarFileName = null; + public string FileName { get { return mvarFileName; } set { mvarFileName = value; } } + + private int? mvarLineNumber = null; + public int? LineNumber { get { return mvarLineNumber; } set { mvarLineNumber = value; } } + + private int? mvarColumnNumber = null; + public int? ColumnNumber { get { return mvarColumnNumber; } set { mvarColumnNumber = value; } } + + private string mvarProjectName = null; + public string ProjectName { get { return mvarProjectName; } set { mvarProjectName = value; } } + +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessageSeverity.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessageSeverity.cs new file mode 100644 index 0000000..e3a6d70 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationMessageSeverity.cs @@ -0,0 +1,9 @@ +namespace MBS.Editor.Core.Hosting; + +public enum HostApplicationMessageSeverity +{ + None = 0, + Notice = 1, + Warning = 2, + Error = 3 +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationOutputWindow.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationOutputWindow.cs new file mode 100644 index 0000000..aa9c156 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostApplicationOutputWindow.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MBS.Editor.Core.Hosting; + +/// +/// Handles the output window in Universal Editor. If the user is running a plugin which makes +/// use of these features in non-GUI mode, it will write to the console instead. +/// +public class HostApplicationOutputWindow +{ + public event TextWrittenEventHandler? TextWritten; + protected virtual void OnTextWritten(TextWrittenEventArgs e) + { + if (TextWritten != null) + { + TextWritten(this, e); + } + else + { + Console.Write(e.Text); + } + } + + public event EventHandler? TextCleared; + protected virtual void OnTextCleared(EventArgs e) + { + if (TextCleared != null) + { + TextCleared(this, e); + } + else + { + Console.Clear(); + } + } + + public void Clear() + { + OnTextCleared(EventArgs.Empty); + } + public void Write(string text) + { + OnTextWritten(new TextWrittenEventArgs(text)); + } + public void WriteLine(string text) + { + Write(text + System.Environment.NewLine); + } +} + +public delegate void TextWrittenEventHandler(object sender, TextWrittenEventArgs e); +public class TextWrittenEventArgs +{ + private string mvarText = String.Empty; + public string Text { get { return mvarText; } } + + public TextWrittenEventArgs(string text) + { + mvarText = text; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostServices.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostServices.cs new file mode 100644 index 0000000..daf8c6c --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/HostServices.cs @@ -0,0 +1,7 @@ +namespace MBS.Editor.Core.Hosting; + +public class HostServices +{ + public HostApplicationMessage.HostApplicationMessageCollection Messages { get; } = new HostApplicationMessage.HostApplicationMessageCollection(); + public HostApplicationOutputWindow OutputWindow { get; } = new HostApplicationOutputWindow(); +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/IHostApplication.cs b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/IHostApplication.cs new file mode 100644 index 0000000..f28c6a7 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/Hosting/IHostApplication.cs @@ -0,0 +1,6 @@ +namespace MBS.Editor.Core.Hosting; + +public interface IHostApplication +{ + HostServices HostServices { get; } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/IO/Endianness.cs b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Endianness.cs new file mode 100644 index 0000000..402d672 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Endianness.cs @@ -0,0 +1,7 @@ +namespace MBS.Editor.Core.IO; + +public enum Endianness +{ + LittleEndian, + BigEndian +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/IO/NewLineSequence.cs b/editor-dotnet/src/lib/MBS.Editor.Core/IO/NewLineSequence.cs new file mode 100644 index 0000000..dd6b86b --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/IO/NewLineSequence.cs @@ -0,0 +1,11 @@ +namespace MBS.Editor.Core.IO; + +public enum NewLineSequence +{ + Automatic, + SystemDefault, + CarriageReturn, + LineFeed, + CarriageReturnLineFeed, + LineFeedCarriageReturn +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/IO/Reader.cs b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Reader.cs new file mode 100644 index 0000000..305afe3 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Reader.cs @@ -0,0 +1,1848 @@ +// +// Reader.cs - input/output module for reading binary or text data +// +// Author: +// Michael Becker +// +// Copyright (c) 2011-2024 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; + +// FIXME: this does not have any idea how to handle network streams that don't +// support seeking + +namespace MBS.Editor.Core.IO; + +using MBS.Core; + +// [DebuggerNonUserCode()] +public class Reader : ReaderWriterBase +{ + public Reader(Stream st) : base(st) { } + + public void Read(byte[] buffer, int start, int length) + { + BaseStream.Read(buffer, start, length); + } + + public bool ReadBoolean() + { + return (ReadBytes(1)[0] != 0); + } + + public byte ReadByte() + { + return ReadBytes(1)[0]; + } + + string? charBuffer = null; + int charBufferIndex = 0; + + public char[] ReadChars(int count) + { + char[] value = new char[count]; + for (int i = 0; i < count; i++) + value[i] = ReadChar(); + return value; + } + + public T ReadObject() where T : new() + { + T obj = new T(); + // FIXME: implement this + return obj; + } + + public char ReadChar() + { + return ReadChar(DefaultEncoding); + } + public char ReadChar(Encoding encoding) + { + charBuffer = null; + if (charBuffer == null) + { + int maxByteCount = encoding.GetMaxByteCount(1); + byte[] bytes = PeekBytes(maxByteCount); + charBuffer = encoding.GetString(bytes); + charBufferIndex = 0; + } + + char c = charBuffer[charBufferIndex]; + charBufferIndex++; + + int ct = encoding.GetByteCount(new char[] { c }); + BaseStream.Seek(ct, SeekOrigin.Current); + + if (charBufferIndex + 1 > charBuffer.Length) + { + charBuffer = null; + } + return c; + } + public byte PeekByte() + { + byte b = ReadByte(); + BaseStream.Seek(-1, SeekOrigin.Current); + return b; + } + public byte[] PeekBytes(int length) + { + byte[] buffer = new byte[length]; + int len = BaseStream.Read(buffer, 0, length); + BaseStream.Seek(-len, SeekOrigin.Current); + return buffer; + } + public char PeekChar() + { + return (char)PeekByte(); + } + [CLSCompliant(false)] + public sbyte ReadSByte() + { + return (sbyte)(ReadBytes(1)[0]); + } + + private int xorkey_index = 0; + + [CLSCompliant(false)] + public byte[] ReadBytes(uint length) + { + byte[] buf = new byte[length]; + uint lastct = 0; + while (lastct < length) + { + int ct = (int)length; + byte[] buf2 = new byte[ct]; + Read(buf2, 0, ct); + + Array.Copy(buf2, 0, buf, lastct, buf2.Length); + lastct += (uint)ct; + } + /* + for (int i = 0; i < Transformations.Count; i++) + { + buf = Transformations[i].Transform(buf); + } + */ + return buf; + } + + [CLSCompliant(false)] + public byte[] ReadBytes(ulong length) + { + byte[] buf = new byte[length]; + for (ulong i = 0L; i < length; i += (ulong)1L) + { + buf[(int)i] = ReadByte(); + } + return buf; + } + + [DebuggerNonUserCode()] + public byte[] ReadBytes(long length) + { + byte[] buffer = new byte[length]; + BaseStream.Read(buffer, 0, (int)length); + return buffer; + } + + [CLSCompliant(false)] + public sbyte[] ReadSBytes(long length) + { + byte[] buffer = ReadBytes(length); + return (sbyte[])(Array)buffer; + } + + public int ReadCompactInt32() + { + int multiplier = 1; + byte b1 = this.ReadByte(); + if ((b1 & 0x80) == 1) + { + multiplier = -1; + } + if ((b1 & 0x40) == 1) + { + byte b2 = this.ReadByte(); + if ((b2 & 0x80) == 1) + { + byte b3 = this.ReadByte(); + if ((b2 & 0x80) == 1) + { + byte b4 = this.ReadByte(); + return (multiplier * (((b1 | (b2 << 8)) | (b3 << 0x10)) | (b4 << 0x18))); + } + return (multiplier * ((b1 | (b2 << 8)) | (b3 << 0x10))); + } + return (multiplier * (b1 | (b2 << 8))); + } + return (multiplier * b1); + } + + /// + /// Reads a in a format that encodes the property in a 2-bit field + /// and the property in a 62-bit field. + /// + /// An object that is equivalent to the System.DateTime object that was serialized by the method. + /// + /// The serialized value is less than or greater than . + /// + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public DateTime ReadDateTime() + { + return ReadDateTime64(); + } + /// + /// Reads a in a format that encodes the property in a 2-bit field + /// and the property in a 62-bit field. + /// + /// An object that is equivalent to the System.DateTime object that was serialized by the method. + /// + /// The serialized value is less than or greater than . + /// + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public DateTime ReadDateTime64() + { + long l = ReadInt64(); + return DateTime.FromBinary(l); + } + /// + /// Reads a in a format that encodes the property in a 2-bit field + /// and the property in a 30-bit field. + /// + /// An object that is equivalent to the System.DateTime object that was serialized by the method. + /// + /// The serialized value is less than or greater than . + /// + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public DateTime ReadDateTime32() + { + int l = ReadInt32(); + return DateTime.FromBinary(l); + } + + /// + /// Reads a in ISO-9660 format (yyyyMMddHHMMSSssT). + /// + /// The read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public DateTime ReadISO9660DateTime() + { + string year = ReadFixedLengthString(4); + int nYear = int.Parse(year); + + string month = ReadFixedLengthString(2); + int nMonth = int.Parse(month); + + string day = ReadFixedLengthString(2); + int nDay = int.Parse(day); + + string hour = ReadFixedLengthString(2); + int nHour = int.Parse(hour); + + string minute = ReadFixedLengthString(2); + int nMinute = int.Parse(minute); + + string second = ReadFixedLengthString(2); + int nSecond = int.Parse(second); + + string secondHundredths = ReadFixedLengthString(2); + int nSecondHundredths = int.Parse(secondHundredths); + + // offset from Greenwich Mean Time, in 15-minute intervals, + // as a twos complement signed number, positive for time + // zones east of Greenwich, and negative for time zones + // west of Greenwich + sbyte gmtOffset = ReadSByte(); + + return new DateTime(nYear, nMonth, nDay, nHour + gmtOffset, nMinute, nSecond, nSecondHundredths, DateTimeKind.Utc); + } + + public int Read7BitEncodedInt() + { + int num = 0; + int num2 = 0; + while (num2 != 35) + { + byte b = ReadByte(); + num |= (int)(b & 127) << num2; + num2 += 7; + if ((b & 128) == 0) + { + return num; + } + } + throw new ArgumentOutOfRangeException("Invalid 7-bit encoded Int32"); + } + + public string ReadFixedLengthUTF16EndianString(int byteCount) + { + return ReadFixedLengthUTF16EndianString(byteCount, DefaultEncoding); + } + public string ReadFixedLengthUTF16EndianString(int byteCount, Encoding encoding) + { + if (byteCount % 2 != 0) + { + throw new ArgumentException("byteCount must be an even number"); + } + + byte[] data = ReadBytes(byteCount); + + // swap endians + if (Endianness == Endianness.BigEndian) + { + for (int i = 0; i < data.Length; i += 2) + { + byte tmp = data[i + 1]; + data[i + 1] = data[i]; + data[i] = tmp; + } + } + + return encoding.GetString(data); + } + + /// + /// Reads a string from the current stream. The string is prefixed with the length, encoded as an integer seven bits at a time. + /// + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadLengthPrefixedString() + { + int num = 0; + int num2 = Read7BitEncodedInt(); + if (num2 < 0) throw new ArgumentOutOfRangeException("invalid string length"); + if (num2 == 0) return String.Empty; + + int count = (num2 - num > 128) ? 128 : (num2 - num); + return ReadFixedLengthString(count); + } + + /// + /// Reads a string of the specified length from the current stream. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(byte length) + { + return this.ReadFixedLengthString(length, DefaultEncoding); + } + /// + /// Reads a string of the specified length from the current stream. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(int length) + { + return ReadFixedLengthString(length, DefaultEncoding); + } + /// + /// Reads a string of the specified length from the current stream. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public string ReadFixedLengthString(uint length) + { + return this.ReadFixedLengthString(length, DefaultEncoding); + } + /// + /// Reads a string of the specified length from the current stream. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(byte length, Encoding encoding) + { + return this.ReadFixedLengthString((int)length, encoding); + } + /// + /// Reads a string of the specified length from the current stream using the specified encoding. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The to use to convert the bytes read into a instance. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(int length, Encoding encoding) + { + byte[] id = ReadBytes(length); + return encoding.GetString(id); + } + /// + /// Reads a string of the specified length from the current stream using the specified encoding. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The to use to convert the bytes read into a instance. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public string ReadFixedLengthString(uint length, Encoding encoding) + { + int l1 = (int)length; + int l2 = ((int)(length - l1)); + byte[] id = ReadBytes(l1); + if (l2 > 0) + { + Array.Resize(ref id, id.Length + l2); + Array.Copy(ReadBytes(l2), 0, id, id.Length - l2, l2); + } + return encoding.GetString(id); + } + /// + /// Reads a string of the specified length from the current stream. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(long length) + { + return ReadFixedLengthString(length, DefaultEncoding); + } + /// + /// Reads a string of the specified length from the current stream using the specified encoding. This method does not trim null characters; use to do this. + /// + /// The length of the string to read. + /// The to use to convert the bytes read into a instance. + /// The string being read. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public string ReadFixedLengthString(long length, Encoding encoding) + { + return encoding.GetString(ReadBytes((ulong)length)); + } + + /// + /// Reads a 16-byte (128-bit) value from the current stream and advances the current position of the stream by sixteen bytes. + /// + /// A 16-byte (128-bit) value read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public Guid ReadGuid(bool reverse = false) + { + uint a = 0; + ushort b = 0; + ushort c = 0; + byte d = 0; + byte e = 0; + byte f = 0; + byte g = 0; + byte h = 0; + byte i = 0; + byte j = 0; + byte k = 0; + if (!reverse) + { + a = ReadUInt32(); + b = ReadUInt16(); + c = ReadUInt16(); + d = ReadByte(); + e = ReadByte(); + f = ReadByte(); + g = ReadByte(); + h = ReadByte(); + i = ReadByte(); + j = ReadByte(); + k = ReadByte(); + } + else + { + k = ReadByte(); + j = ReadByte(); + i = ReadByte(); + h = ReadByte(); + g = ReadByte(); + f = ReadByte(); + e = ReadByte(); + d = ReadByte(); + c = ReadUInt16(); + b = ReadUInt16(); + a = ReadUInt32(); + } + return new Guid(a, b, c, d, e, f, g, h, i, j, k); + } + /// + /// Reads an array of 16-byte (128-bit) values from the current stream and advances the current position of the stream by sixteen bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 16-byte (128-bit) values read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public Guid[] ReadGuidArray(int count) + { + Guid[] retval = new Guid[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadGuid(); + } + return retval; + } + + /// + /// Reads a 2-byte signed integer from the current stream and advances the current position of the stream by two bytes. + /// + /// A 2-byte signed integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public short ReadInt16() + { + byte[] buffer = ReadBytes((uint)2); + byte[] _buffer = new byte[2]; + if (base.Endianness == Endianness.LittleEndian) + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + } + else if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[1]; + _buffer[1] = buffer[0]; + } + return BitConverter.ToInt16(_buffer, 0); + } + /// + /// Reads an array of 2-byte signed integers from the current stream and advances the current position of the stream by two bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 2-byte signed integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public short[] ReadInt16Array(int count) + { + short[] retval = new short[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadInt16(); + } + return retval; + } + /// + /// Reads a 2-byte unsigned integer from the current stream and advances the current position of the stream by two bytes. + /// + /// A 2-byte unsigned integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public ushort ReadUInt16() + { + byte[] buffer = ReadBytes(2); + if (base.Endianness == Endianness.LittleEndian) + { + return (ushort)(buffer[0] | (buffer[1] << 8)); + } + return (ushort)(buffer[1] | (buffer[0] << 8)); + } + /// + /// Reads an array of 2-byte unsigned integers from the current stream and advances the current position of the stream by two bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 2-byte unsigned integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public ushort[] ReadUInt16Array(int count) + { + ushort[] retval = new ushort[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadUInt16(); + } + return retval; + } + /// + /// Reads a 3-byte signed integer from the current stream and advances the current position of the stream by three bytes. + /// + /// A 3-byte signed integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public int ReadInt24() + { + byte[] buffer = ReadBytes((uint)3); + byte[] _buffer = new byte[3]; + if (base.Endianness == Endianness.LittleEndian) + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = 0; + } + else if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = 0; + _buffer[1] = buffer[2]; + _buffer[2] = buffer[1]; + _buffer[3] = buffer[0]; + } + return BitConverter.ToInt32(_buffer, 0); + } + /// + /// Reads an array of 3-byte signed integers from the current stream and advances the current position of the stream by three bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 3-byte signed integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public int[] ReadInt24Array(int count) + { + int[] retval = new int[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadInt24(); + } + return retval; + } + /// + /// Reads a 3-byte unsigned integer from the current stream and advances the current position of the stream by three bytes. + /// + /// A 3-byte unsigned integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public uint ReadUInt24() + { + // TODO: Test this out! + byte[] buffer = ReadBytes(3); + if (base.Endianness == Endianness.LittleEndian) + { + return (uint)((buffer[2] << 16) | (buffer[1] << 8) | (buffer[0])); + } + return (uint)((buffer[2]) | (buffer[1] << 8) | (buffer[0] << 16)); + } + /// + /// Reads an array of 3-byte unsigned integers from the current stream and advances the current position of the stream by three bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 3-byte unsigned integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public uint[] ReadUInt24Array(int count) + { + uint[] retval = new uint[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadUInt24(); + } + return retval; + } + /// + /// Reads a 4-byte signed integer from the current stream and advances the current position of the stream by four bytes. + /// + /// A 4-byte signed integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public int ReadInt32() + { + byte[] buffer = ReadBytes((uint)4); + byte[] _buffer = new byte[4]; + if (base.Endianness == Endianness.LittleEndian) + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + } + else if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[3]; + _buffer[1] = buffer[2]; + _buffer[2] = buffer[1]; + _buffer[3] = buffer[0]; + } + return BitConverter.ToInt32(_buffer, 0); + } + /// + /// Reads an array of 4-byte signed integers from the current stream and advances the current position of the stream by four bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 4-byte signed integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public int[] ReadInt32Array(int count) + { + int[] retval = new int[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadInt32(); + } + return retval; + } + /// + /// Reads a 4-byte unsigned integer from the current stream but does not advance the current position of the stream. + /// + /// A 4-byte unsigned integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public uint PeekUInt32() + { + uint value = ReadUInt32(); + BaseStream.Seek(-4, SeekOrigin.Current); + return value; + } + /// + /// Reads a 4-byte unsigned integer from the current stream and advances the current position of the stream by four bytes. + /// + /// A 4-byte unsigned integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public uint ReadUInt32() + { + byte[] buffer = ReadBytes((uint)4); + if (base.Endianness == Endianness.LittleEndian) + { + return (uint)(((buffer[0] | (buffer[1] << 8)) | (buffer[2] << 0x10)) | (buffer[3] << 0x18)); + } + return (uint)(((buffer[3] | (buffer[2] << 8)) | (buffer[1] << 0x10)) | (buffer[0] << 0x18)); + } + /// + /// Reads an array of 4-byte unsigned integers from the current stream and advances the current position of the stream by four bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 4-byte unsigned integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public uint[] ReadUInt32Array(int count) + { + uint[] retval = new uint[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadUInt32(); + } + return retval; + } + /// + /// Reads an 8-byte signed integer from the current stream and advances the current position of the stream by eight bytes. + /// + /// An 8-byte signed integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public long ReadInt64() + { + byte[] buffer = ReadBytes((uint)8); + byte[] _buffer = new byte[8]; + if (base.Endianness == Endianness.LittleEndian) + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + _buffer[4] = buffer[4]; + _buffer[5] = buffer[5]; + _buffer[6] = buffer[6]; + _buffer[7] = buffer[7]; + } + else if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[7]; + _buffer[1] = buffer[6]; + _buffer[2] = buffer[5]; + _buffer[3] = buffer[4]; + _buffer[4] = buffer[3]; + _buffer[5] = buffer[2]; + _buffer[6] = buffer[1]; + _buffer[7] = buffer[0]; + } + return BitConverter.ToInt64(_buffer, 0); + } + /// + /// Reads an array of 8-byte signed integers from the current stream and advances the current position of the stream by eight bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 8-byte signed integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + public long[] ReadInt64Array(int count) + { + long[] retval = new long[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadInt64(); + } + return retval; + } + /// + /// Reads an 8-byte unsigned integer from the current stream and advances the current position of the stream by eight bytes. + /// + /// An 8-byte unsigned integer read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public ulong ReadUInt64() + { + byte[] buffer = ReadBytes((uint)8); + byte[] _buffer = new byte[8]; + if (base.Endianness == Endianness.LittleEndian) + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + _buffer[4] = buffer[4]; + _buffer[5] = buffer[5]; + _buffer[6] = buffer[6]; + _buffer[7] = buffer[7]; + } + else if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[7]; + _buffer[1] = buffer[6]; + _buffer[2] = buffer[5]; + _buffer[3] = buffer[4]; + _buffer[4] = buffer[3]; + _buffer[5] = buffer[2]; + _buffer[6] = buffer[1]; + _buffer[7] = buffer[0]; + } + return BitConverter.ToUInt64(_buffer, 0); + } + /// + /// Reads an array of 8-byte unsigned integers from the current stream and advances the current position of the stream by eight bytes times the number of values read. + /// + /// The number of values to read from the current stream. + /// An array of 8-byte unsigned integers read from the current stream. + /// The end of the stream is reached. + /// The stream is closed. + /// An I/O error occurs. + [CLSCompliant(false)] + public ulong[] ReadUInt64Array(int count) + { + ulong[] retval = new ulong[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadUInt64(); + } + return retval; + } + + public float ReadSingle() + { + byte[] buffer = ReadBytes((uint)4); + byte[] _buffer = new byte[4]; + if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[3]; + _buffer[1] = buffer[2]; + _buffer[2] = buffer[1]; + _buffer[3] = buffer[0]; + } + else + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + } + return BitConverter.ToSingle(_buffer, 0); + } + public float[] ReadSingleArray(int count) + { + float[] retval = new float[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadSingle(); + } + return retval; + } + + /// + /// Reads a 64-bit floating-point value. + /// + /// The double. + public double ReadDouble() + { + byte[] buffer = ReadBytes((uint)8); + byte[] _buffer = new byte[8]; + if (base.Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[7]; + _buffer[1] = buffer[6]; + _buffer[2] = buffer[5]; + _buffer[3] = buffer[4]; + _buffer[4] = buffer[3]; + _buffer[5] = buffer[2]; + _buffer[6] = buffer[1]; + _buffer[7] = buffer[0]; + } + else + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + _buffer[4] = buffer[4]; + _buffer[5] = buffer[5]; + _buffer[6] = buffer[6]; + _buffer[7] = buffer[7]; + } + return BitConverter.ToDouble(_buffer, 0); + } + public double[] ReadDoubleArray(int count) + { + double[] retval = new double[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadDouble(); + } + return retval; + } + + + public int ReadVariableLengthInt32() + { + int value = ReadByte(); + byte c = 0; + + if ((value & 0x80) == 0x80) + { + value &= 0x7F; + do + { + value = (value << 7) + ((c = ReadByte()) & 0x7F); + } + while ((c & 0x80) == 0x80); + } + + return value; + } + public int[] ReadVariableLengthInt32Array(int count) + { + int[] retval = new int[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadVariableLengthInt32(); + } + return retval; + } + + [CLSCompliant(false)] + public ulong ReadUInt48() + { + byte[] buffer = ReadBytes((uint)6); + if (base.Endianness == Endianness.LittleEndian) + { + uint num = (uint)(((buffer[0] << 0x10)) | (buffer[1] << 0x18)); + uint num2 = (uint)(((buffer[2] | (buffer[3] << 8)) | (buffer[4] << 0x10)) | (buffer[5] << 0x18)); + return (ulong)(num | num2 << 0x20); + } + else + { + uint num = (uint)(((buffer[5] << 0x10)) | (buffer[4] << 0x18)); + uint num2 = (uint)(((buffer[3] | (buffer[2] << 8)) | (buffer[1] << 0x10)) | (buffer[0] << 0x18)); + return (ulong)(num << 0x20 | num2); + } + } + [CLSCompliant(false)] + public ulong[] ReadUInt48Array(int count) + { + ulong[] retval = new ulong[count]; + for (int i = 0; i < count; i++) + { + retval[i] = ReadUInt48(); + } + return retval; + } + + + public string ReadNullTerminatedString() + { + return this.ReadNullTerminatedString(DefaultEncoding); + } + + public string ReadNullTerminatedString(int maxLength) + { + return this.ReadNullTerminatedString(maxLength, DefaultEncoding); + } + + public string ReadNullTerminatedString(Encoding encoding) + { + List r = new List(); + while (true) + { + byte nextChar = ReadByte(); + if ((nextChar == 0 && !(encoding == Encoding.Unicode)) || ((encoding == Encoding.Unicode) && (nextChar == 0 && (r.Count > 2 && r[r.Count - 1] == 0)))) + { + string result = encoding.GetString(r.ToArray()); + return result; + } + r.Add(nextChar); + } + } + + public string ReadNullTerminatedString(int maxLength, Encoding encoding) + { + string ret = this.ReadNullTerminatedString(encoding); + if (ret.Length > maxLength) + { + return ret.Substring(0, maxLength); + } + if (ret.Length < maxLength) + { + ReadBytes((maxLength - ret.Length) - 1); + } + return ret; + } + + /// + /// Reads a length-prefixed string that is prefixed with a signed short (2-byte) length, rather than an int (4-byte) length. + /// + /// + public string ReadInt16String() + { + short length = ReadInt16(); + return this.ReadFixedLengthString((int)length); + } + /// + /// Reads a length-prefixed string that is prefixed with an unsigned short (2-byte) length, rather than an int (4-byte) length. + /// + /// + public string ReadUInt16String() + { + ushort length = ReadUInt16(); + return this.ReadFixedLengthString((uint)length); + } + + public byte[] ReadToEnd() + { + return ReadBytes(BaseStream.GetRemaining()); + } + public string ReadStringToEnd(Encoding encoding = null) + { + if (encoding == null) encoding = Encoding.Default; + byte[] data = ReadToEnd(); + return encoding.GetString(data); + } + + public byte[] ReadUntil(byte[] sequence, bool includeSequence = false) + { + byte[] w = new byte[0]; + while (!BaseStream.EndOfStream()) + { + Array.Resize(ref w, w.Length + 1); + w[w.Length - 1] = ReadByte(); + + bool matches = true; + for (int i = 0; i < sequence.Length; i++) + { + if (w.Length < sequence.Length) + { + matches = false; + break; + } + if (w[w.Length - (sequence.Length - i)] != sequence[i]) + { + matches = false; + break; + } + } + + if (matches) + { + if (!includeSequence) + { + Array.Resize(ref w, w.Length - sequence.Length); + + // HACK: we aren't including the sequence in the response, BUT we should consume it anyway... right? + try + { + BaseStream.Seek(-sequence.Length, SeekOrigin.Current); + } + catch (NotSupportedException ex) + { + + } + + } + return w; + } + } + return w; + } + public string ReadUntil(string sequence) + { + return ReadUntil(sequence, DefaultEncoding); + } + public string ReadUntil(string sequence, bool includeSequence) + { + return ReadUntil(sequence, DefaultEncoding, includeSequence); + } + public string ReadUntil(string sequence, Encoding encoding) + { + return encoding.GetString(ReadUntil(sequence.ToCharArray(), encoding)); + } + public string ReadUntil(string sequence, Encoding encoding, bool includeSequence) + { + return new string(ReadUntil(sequence.ToCharArray(), encoding, includeSequence)); + } + public byte[] ReadUntil(char[] sequence) + { + return this.ReadUntil(sequence, DefaultEncoding); + } + public char[] ReadUntil(char[] sequence, bool includeSequence) + { + return this.ReadUntil(sequence, DefaultEncoding, includeSequence); + } + public byte[] ReadUntil(char[] sequence, Encoding encoding) + { + return this.ReadUntil(encoding.GetBytes(sequence)); + } + public char[] ReadUntil(char[] sequence, Encoding encoding, bool includeSequence) + { + return encoding.GetChars(this.ReadUntil(encoding.GetBytes(sequence), includeSequence)); + } + public string ReadStringUntil(string sequence) + { + return ReadStringUntil(sequence, DefaultEncoding, DefaultEncoding); + } + public string ReadStringUntil(string sequence, bool includeSequence) + { + return ReadStringUntil(sequence, DefaultEncoding, DefaultEncoding, includeSequence); + } + public string ReadStringUntil(string sequence, Encoding inputEncoding, Encoding outputEncoding) + { + return ReadStringUntil(sequence.ToCharArray(), inputEncoding, outputEncoding); + } + public string ReadStringUntil(string sequence, Encoding inputEncoding, Encoding outputEncoding, bool includeSequence) + { + return ReadStringUntil(sequence.ToCharArray(), inputEncoding, outputEncoding, includeSequence); + } + public string ReadStringUntil(char[] sequence) + { + return ReadStringUntil(sequence, DefaultEncoding, DefaultEncoding); + } + public string ReadStringUntil(char[] sequence, bool includeSequence) + { + return ReadStringUntil(sequence, DefaultEncoding, DefaultEncoding, includeSequence); + } + public string ReadStringUntil(char[] sequence, Encoding inputEncoding, Encoding outputEncoding) + { + byte[] bytes = ReadUntil(inputEncoding.GetBytes(sequence)); + return outputEncoding.GetString(bytes); + } + public string ReadStringUntil(char[] sequence, Encoding inputEncoding, Encoding outputEncoding, bool includeSequence) + { + return outputEncoding.GetString(ReadUntil(inputEncoding.GetBytes(sequence), includeSequence)); + } + + public void SeekUntilFirstNonNull() + { + while (PeekByte() == 0) + { + ReadChar(); + } + } + + public string[] ReadNullTerminatedStringArray(int stringTableSize) + { + System.Collections.Generic.List list = new System.Collections.Generic.List(); + long endpos = BaseStream.Position + stringTableSize; + while (BaseStream.Position < endpos) + { + list.Add(ReadNullTerminatedString()); + } + return list.ToArray(); + } + + // TODO: TEST THIS!! + public decimal ReadDecimal() + { + byte[] buffer = ReadBytes(16); + int num = (int)buffer[0] | (int)buffer[1] << 8 | (int)buffer[2] << 16 | (int)buffer[3] << 24; + int num2 = (int)buffer[4] | (int)buffer[5] << 8 | (int)buffer[6] << 16 | (int)buffer[7] << 24; + int num3 = (int)buffer[8] | (int)buffer[9] << 8 | (int)buffer[10] << 16 | (int)buffer[11] << 24; + int flags = (int)buffer[12] | (int)buffer[13] << 8 | (int)buffer[14] << 16 | (int)buffer[15] << 24; + + bool isNegative = ((flags & -2147483648) == -2147483648); + byte scale = (byte)(flags >> 16); + + if ((flags & 2130771967) == 0 && (flags & 16711680) <= 1835008) + { + return new Decimal(num, num2, num3, isNegative, scale); + } + throw new ArgumentOutOfRangeException("Invalid decimal"); + } + + public string ReadByteSizedString() + { + byte len = ReadByte(); + return ReadFixedLengthString(len); + } + + public short ReadDoubleEndianInt16() + { + short value1 = ReadInt16(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + short value2 = ReadInt16(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + + if (value2 != value1) + { + throw new InvalidOperationException("Big-endian value does not match little-endian value"); + } + return value1; + } + [CLSCompliant(false)] + public ushort ReadDoubleEndianUInt16() + { + ushort value1 = ReadUInt16(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + ushort value2 = ReadUInt16(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + + if (value2 != value1) + { + throw new InvalidOperationException("Big-endian value does not match little-endian value"); + } + return value1; + } + public int ReadDoubleEndianInt32() + { + int value1 = ReadInt32(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + int value2 = ReadInt32(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + + if (value2 != value1) + { + throw new InvalidOperationException("Big-endian value does not match little-endian value"); + } + return value1; + } + [CLSCompliant(false)] + public uint ReadDoubleEndianUInt32() + { + uint value1 = ReadUInt32(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + uint value2 = ReadUInt32(); + if (base.Endianness == Endianness.LittleEndian) + { + base.Endianness = Endianness.BigEndian; + } + else + { + base.Endianness = Endianness.LittleEndian; + } + + if (value2 != value1) + { + throw new InvalidOperationException("Big-endian value does not match little-endian value"); + } + return value1; + } + + private int ToInt32(byte[] buffer) + { + int ret = 0; + int mode = 0; + for (int i = 0; i < Math.Min(4, buffer.Length); i++) + { + ret |= (buffer[i] << mode); + mode += 8; + } + return ret; + } + + public int ReadCompactInt32New() + { + byte[] buffer = new byte[2]; + int start = 0; + int length = buffer.Length; + while (true) + { + Read(buffer, start, length); + if (buffer[buffer.Length - 1] == 0 || (buffer.Length > 4)) + { + return ToInt32(buffer); + } + else + { + start = buffer.Length; + length = 1; + Array.Resize(ref buffer, buffer.Length + 1); + } + } + } + + public object ReadBEncodedObject() + { + char w = (char)PeekChar(); + switch (w) + { + case 'd': + { + // Read the starting 'd' + w = ReadChar(); + + Dictionary dict = new Dictionary(); + while (w != 'e') + { + string key = (string)ReadBEncodedObject(); + object value = ReadBEncodedObject(); + w = (char)PeekChar(); + dict.Add(key, value); + } + + // Read the final 'e' + w = ReadChar(); + + return dict; + } + case 'l': + { + // Read the starting 'l' + w = ReadChar(); + + List list = new List(); + while (w != 'e') + { + object item = ReadBEncodedObject(); + w = (char)PeekChar(); + + list.Add(item); + } + + // Read the final 'e' + w = ReadChar(); + return list; + } + case 'i': + { + // Read the starting 'i' + w = ReadChar(); + string num = String.Empty; + while (w != 'e') + { + w = ReadChar(); + if (w != 'e') + { + num += w; + } + } + // Already read the final 'e' + + return Int32.Parse(num); + } + default: + { + // Assume a string + w = (char)PeekChar(); + string num = String.Empty; + string val = String.Empty; + while (w != ':') + { + w = ReadChar(); + if (w != ':') + { + num += w; + } + } + + uint nnum = UInt32.Parse(num); + val = ReadFixedLengthString(nnum); + + return val; + } + } + } + /// + /// Reads a 32-bit integer length-prefixed string using the system default encoding. + /// + /// + /// + public string ReadInt32String() + { + return ReadInt32String(DefaultEncoding); + } + /// + /// Reads a 32-bit integer length-prefixed string using the specified encoding. + /// + /// + /// + public string ReadInt32String(Encoding encoding) + { + int length = ReadInt32(); + return ReadFixedLengthString(length); + } + + /// + /// Reads a length-prefixed . + /// + /// + public Version ReadVersion() + { + byte parts = ReadByte(); + switch (parts) + { + case 1: + { + int vmaj = ReadInt32(); + return new Version(vmaj, 0); + } + case 2: + { + int vmaj = ReadInt32(); + int vmin = ReadInt32(); + return new Version(vmaj, vmin); + } + case 3: + { + int vmaj = ReadInt32(); + int vmin = ReadInt32(); + int vbld = ReadInt32(); + return new Version(vmaj, vmin, vbld); + } + case 4: + { + int vmaj = ReadInt32(); + int vmin = ReadInt32(); + int vbld = ReadInt32(); + int vrev = ReadInt32(); + + if (vbld > -1) + { + if (vrev > -1) + { + return new Version(vmaj, vmin, vbld, vrev); + } + else + { + return new Version(vmaj, vmin, vbld); + } + } + else + { + return new Version(vmaj, vmin); + } + } + } + return new Version(); + } + + private short[] ReadInt16ArrayWTF(int count) + { + byte[] buffer = new byte[count * 2]; + Read(buffer, 0, buffer.Length); + + short[] buffer2 = new short[count]; + for (int i = 0; i < buffer.Length; i += 2) + { + byte b1 = buffer[i]; + byte b2 = buffer[i + 1]; + int index = (int)(i / 2); + + if (base.Endianness == Endianness.LittleEndian) + { + buffer2[index] = (short)(b1 | (b2 << 8)); + } + else if (base.Endianness == Endianness.BigEndian) + { + buffer2[index] = (short)(b2 | (b1 << 8)); + } + } + return buffer2; + } + + public string PeekFixedLengthString(int count) + { + return PeekFixedLengthString(count, DefaultEncoding); + } + public string PeekFixedLengthString(int count, Encoding encoding) + { + byte[] data = PeekBytes(count); + return encoding.GetString(data); + } + + /// + /// Reads a half (2 bytes/half instead of 4 bytes/single) as a floating-point value. + /// + /// + public float ReadHalf() + { + byte[] buffer = ReadBytes(2); + byte[] buffer2 = new byte[4]; + if (base.Endianness == Endianness.LittleEndian) + { + buffer2[0] = 0; + buffer2[1] = 0; + buffer2[2] = buffer[0]; + buffer2[3] = buffer[1]; + } + else + { + buffer2[0] = buffer[0]; + buffer2[1] = buffer[1]; + buffer2[2] = 0; + buffer2[3] = 0; + } + return BitConverter.ToSingle(buffer2, 0); + } + + public int ReadAtMostBytes(byte[] buffer, int count) + { + if (BaseStream.GetRemaining() == 0) return 0; + + if (count < BaseStream.GetRemaining()) + { + Read(buffer, 0, count); + return count; + } + else + { + Read(buffer, 0, (int)BaseStream.GetRemaining()); + return (int)BaseStream.GetRemaining(); + } + } + + private byte[] read_buf = new byte[4096]; + private int getbit_buf = 0; + private int getbit_len = 0; + private int getbit_count = 0; + private int getbit_mask = 0; + + public int ReadBitsAsInt32(int count) + { + int i, x = 0; + + for (i = 0; i < count; i++) + { + if (getbit_mask == 0) + { + if (getbit_len == getbit_count) + { + getbit_len = ReadAtMostBytes(read_buf, 4096); + if (getbit_len == 0) throw new EndOfStreamException(); + getbit_count = 0; + } + + getbit_buf = read_buf[getbit_count++]; + getbit_mask = 128; + } + x <<= 1; + if ((getbit_buf & getbit_mask) != 0) x |= 1; + getbit_mask >>= 1; + } + return x; + } + + public string ReadInt64String() + { + long length = ReadInt64(); + string value = ReadFixedLengthString(length); + return value; + } + + public string ReadUntil(string[] until) + { + string rest = null; + return ReadUntil(until, out rest); + } + public string ReadUntil(string[] until, out string rest) + { + return ReadUntil(until, null, null, out rest); + } + public string ReadUntil(string until, string ignoreBegin, string ignoreEnd) + { + return ReadUntil(new string[] { until }, ignoreBegin, ignoreEnd); + } + public string ReadUntil(string[] until, string ignoreBegin, string ignoreEnd) + { + string rest = null; + return ReadUntil(until, ignoreBegin, ignoreEnd, out rest); + } + public string ReadUntil(string[] until, string ignoreBegin, string ignoreEnd, out string rest) + { + System.Text.StringBuilder sb = new System.Text.StringBuilder(); + + while (!BaseStream.EndOfStream()) + { + sb.Append(ReadChar()); + + foreach (string s in until) + { + if (sb.ToString().EndsWith(s)) + { + string w = sb.ToString(); + string retval = w.Substring(0, w.Length - 1); + rest = w.Substring(w.Length - 1); + return retval; + } + } + + /* + char[] buffer = new char[until.Length * 2]; + ReadBlock(buffer, 0, until.Length * 2); + + string w = new string(buffer); + if (w.Contains(until)) + { + string ww = w.Substring(0, w.IndexOf(until)); + sb.Append(ww); + + // back up the stream reader + int indexOfUntil = (w.IndexOf(until) + until.Length); + int lengthToBackUp = w.Length - indexOfUntil; + BaseStream.Seek(-1 * lengthToBackUp, SeekOrigin.Current); + break; + } + sb.Append(w); + */ + } + rest = null; + return sb.ToString(); + } + + public string ReadBetween(string start, string end, bool discard) + { + string nextstr = String.Empty; + bool inside = false; + // 0000000-3842-17774-}ehaomfd + while (!BaseStream.EndOfStream()) + { + nextstr += ReadChar(); + if (!inside) + { + if (nextstr.EndsWith(start)) + { + inside = true; + nextstr = String.Empty; + if (!discard) nextstr += start; + } + } + else + { + if (nextstr.EndsWith(end)) + { + if (discard) + { + nextstr = nextstr.Substring(0, nextstr.Length - end.Length); + } + return nextstr; + } + } + } + return String.Empty; + } + + public int CurrentLine { get; private set; } + + private NewLineSequence _ActualNewLineSequenceForAutomatic = NewLineSequence.SystemDefault; + public string ReadLine() + { + StringBuilder sb = new StringBuilder(); + if (NewLineSequence == NewLineSequence.Automatic && _ActualNewLineSequenceForAutomatic == NewLineSequence.SystemDefault) + { + // first time around, determine actual new line sequence + while (!BaseStream.EndOfStream()) + { + char c = ReadChar(); + if (c == '\n') + { + char c2 = PeekChar(); + if (c2 == '\r') + { + ReadChar(); + _ActualNewLineSequenceForAutomatic = NewLineSequence.LineFeedCarriageReturn; + break; + } + else + { + _ActualNewLineSequenceForAutomatic = NewLineSequence.LineFeed; + break; + } + } + else if (c == '\r') + { + char c2 = PeekChar(); + if (c2 == '\n') + { + ReadChar(); + _ActualNewLineSequenceForAutomatic = NewLineSequence.CarriageReturnLineFeed; + break; + } + else + { + _ActualNewLineSequenceForAutomatic = NewLineSequence.CarriageReturn; + break; + } + } + else + { + sb.Append(c); + } + } + + CurrentLine++; + return sb.ToString(); + } + else + { + string line = ReadUntil(GetNewLineSequence()); + ReadChars(GetNewLineSequence().Length); + if (line.EndsWith("\r")) + line = line.Substring(0, line.Length - 1); + + CurrentLine++; + return line; + } + } + + /// + /// Closes the current stream and releases any resources (such as sockets and file handles) associated with the current stream. + /// + public void Close() + { + BaseStream.Close(); + } + + /// + /// Aligns the to the specified number of bytes. If the current + /// position of the is not a multiple of the specified number of bytes, + /// the position will be increased by the amount of bytes necessary to bring it to the + /// aligned position. + /// + /// The number of bytes on which to align the . + /// Any additional padding bytes that should be included after aligning to the specified boundary. + public void Align(int alignTo, int extraPadding = 0) + { + long paddingCount = ((alignTo - (BaseStream.Position % alignTo)) % alignTo); + paddingCount += extraPadding; + + if (BaseStream.Position + paddingCount < BaseStream.Length) + BaseStream.Position += paddingCount; + } + + public string ReadStringUntilAny(char[] anyOf) + { + StringBuilder sb = new StringBuilder(); + while (!BaseStream.EndOfStream()) + { + char c = ReadChar(); + bool found = false; + for (int i = 0; i < anyOf.Length; i++) + { + if (c == anyOf[i]) + { + found = true; + break; + } + } + if (found) break; + sb.Append(c); + } + BaseStream.Seek(-1, SeekOrigin.Current); + return sb.ToString(); + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/IO/ReaderWriterBase.cs b/editor-dotnet/src/lib/MBS.Editor.Core/IO/ReaderWriterBase.cs new file mode 100644 index 0000000..74aa39d --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/IO/ReaderWriterBase.cs @@ -0,0 +1,79 @@ +using System.Text; + +namespace MBS.Editor.Core.IO; + +public class ReaderWriterBase +{ + private readonly Stream _st; + /// + /// Exposes access to the underlying stream of the . + /// + /// + /// The underlying stream associated with the . + /// + public Stream BaseStream { get { return _st; } } + + public Encoding DefaultEncoding { get; set; } = Encoding.UTF8; + + public NewLineSequence NewLineSequence { get; set; } = NewLineSequence.SystemDefault; + public Endianness Endianness { get; set; } = Endianness.LittleEndian; + + public bool SwapEndianness() + { + if (Endianness == Endianness.LittleEndian) + { + Endianness = Endianness.BigEndian; + return true; + } + else if (Endianness == Endianness.BigEndian) + { + Endianness = Endianness.LittleEndian; + return true; + } + return false; + } + + protected string GetNewLineSequence() + { + switch (NewLineSequence) + { + case NewLineSequence.SystemDefault: return System.Environment.NewLine; + case NewLineSequence.CarriageReturnLineFeed: return "\r\n"; + case NewLineSequence.CarriageReturn: return "\r"; + case NewLineSequence.LineFeed: return "\n"; + } + return System.Environment.NewLine; + } + + public ReaderWriterBase(Stream st) + { + _st = st; + } + + /// + /// Aligns the to the specified number of bytes. If the current + /// position of the is not a multiple of the specified number of bytes, + /// the position will be increased by the amount of bytes necessary to bring it to the + /// aligned position. + /// + /// The number of bytes on which to align the . + /// Any additional padding bytes that should be included after aligning to the specified boundary. + public void Align(int alignTo, int extraPadding = 0) + { + if (alignTo == 0) + return; + + long paddingCount = ((alignTo - (_st.Position % alignTo)) % alignTo); + paddingCount += extraPadding; + + if (_st.Position == _st.Length) + { + byte[] buffer = new byte[paddingCount]; + _st.Write(buffer, 0, buffer.Length); + } + else + { + _st.Position += paddingCount; + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/IO/Writer.cs b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Writer.cs new file mode 100644 index 0000000..618b122 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/IO/Writer.cs @@ -0,0 +1,1023 @@ +namespace MBS.Editor.Core.IO; + +using System.IO; +using System.Text; + +public class Writer : ReaderWriterBase +{ + + public Writer(Stream st) : base(st) { } + + public void Write(byte[] buffer, int offset, int length) + { + BaseStream.Write(buffer, offset, length); + } + + public void WriteByte(byte value) + { + WriteBytes(new byte[] { value }); + } + + public void WriteBoolean(bool value) + { + WriteByte(value ? (byte)1 : (byte)0); + } + + [CLSCompliant(false)] + public void WriteSByte(sbyte value) + { + WriteBytes(new byte[] { (byte)value }); + } + + public void WriteBytes(byte[] data) + { + if (data == null) return; + + /* + for (int i = 0; i < Transformations.Count; i++) + { + data = Transformations[i].Transform(data); + } + */ + Write(data, 0, data.Length); + + if (AutoFlush) + Flush(); + } + + public bool AutoFlush { get; set; } = false; + + [CLSCompliant(false)] + public void WriteSBytes(sbyte[] data) + { + if (data == null) return; + + // thanks https://stackoverflow.com/questions/829983/how-to-convert-a-sbyte-to-byte-in-c + byte[] realdata = (byte[])(Array)data; + /* + for (int i = 0; i < Transformations.Count; i++) + { + realdata = Transformations[i].Transform(realdata); + } + */ + Write(realdata, 0, realdata.Length); + } + + public void WriteFixedLengthBytes(byte[] data, int count) + { + if (data == null) data = new byte[0]; + byte[] realdata = new byte[count]; + Array.Copy(data, 0, realdata, 0, Math.Min(realdata.Length, count)); + WriteBytes(realdata); + } + + public void WriteChar(char value) + { + WriteChar(value, DefaultEncoding); + } + public void WriteChar(char value, Encoding encoding) + { + byte[] data = encoding.GetBytes(new char[] { value }); + WriteBytes(data); + } + public void WriteCharArray(char[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteChar(values[i]); + } + } + + public void Write(char value) + { + Write(value.ToString()); + } + public void Write(string value) + { + WriteFixedLengthString(value); + } + public void WriteLine() + { + WriteLine(String.Empty); + } + public void WriteLine(char value) + { + WriteLine(value.ToString()); + } + public void WriteLine(string value) + { + WriteFixedLengthString(value + GetNewLineSequence()); + } + + public void WriteGuid(Guid guid) + { + WriteBytes(guid.ToByteArray()); + } + public void WriteFixedLengthString(string value) + { + WriteFixedLengthString(value, DefaultEncoding); + } + public void WriteFixedLengthString(string value, int length) + { + WriteFixedLengthString(value, DefaultEncoding, length); + } + [CLSCompliant(false)] + public void WriteFixedLengthString(string value, uint length) + { + WriteFixedLengthString(value, DefaultEncoding, length); + } + public void WriteFixedLengthString(string value, Encoding encoding) + { + if (value == null) + return; + + byte[] data = encoding.GetBytes(value); + WriteBytes(data); + } + public void WriteFixedLengthString(string value, int length, char paddingChar) + { + WriteFixedLengthString(value, DefaultEncoding, length, paddingChar); + } + [CLSCompliant(false)] + public void WriteFixedLengthString(string value, uint length, char paddingChar) + { + this.WriteFixedLengthString(value, DefaultEncoding, length, paddingChar); + } + public void WriteFixedLengthString(string value, Encoding encoding, int length) + { + this.WriteFixedLengthString(value, encoding, length, '\0'); + } + [CLSCompliant(false)] + public void WriteFixedLengthString(string value, Encoding encoding, uint length) + { + WriteFixedLengthString(value, encoding, length, '\0'); + } + public void WriteFixedLengthString(string value, Encoding encoding, int length, char paddingChar) + { + WriteFixedLengthString(value, encoding, (uint)length, paddingChar); + } + [CLSCompliant(false)] + public void WriteFixedLengthString(string value, Encoding encoding, uint length, char paddingChar) + { + if (value == null) + return; + + string v = value; + if (v == null) v = String.Empty; + byte[] data = encoding.GetBytes(v); + byte[] realData = new byte[length]; + + uint realLength = length; + if (data.Length < realLength) + { + realLength = (uint)data.Length; + } + Array.Copy(data, 0, realData, 0, realLength); + + for (int i = data.Length; i < realData.Length; i++) + { + realData[i] = (byte)paddingChar; + } + WriteBytes(realData); + } + + public void WriteLengthPrefixedString(string value) + { + WriteLengthPrefixedString(value, DefaultEncoding); + } + public void WriteLengthPrefixedString(string value, Encoding encoding) + { + Write7BitEncodedInt32(value.Length); + WriteFixedLengthString(value); + } + + public void Write7BitEncodedInt32(int value) + { + // TODO: verify this actually works + uint v = (uint)value; + while (v >= 0x80) + { + WriteByte((byte)(v | 0x80)); + v >>= 7; + } + WriteByte((byte)v); + } + public int Calculate7BitEncodedInt32Size(int value) + { + // TODO: verify this actually works + int size = 1; + uint v = (uint)value; + while (v >= 0x80) + { + size++; + v >>= 7; + } + return size; + } + + public void WriteNullTerminatedString(string sz) + { + WriteNullTerminatedString(sz, Encoding.UTF8); + } + public void WriteNullTerminatedString(string sz, Encoding encoding) + { + byte[] values = encoding.GetBytes(sz + '\0'); + WriteBytes(values); + } + public void WriteNullTerminatedString(string sz, int length) + { + // TODO: not sure how to handle this, should "length" refer to just the string length (data length) or should it include the null-terminator (field length)? + string ssz = sz.Substring(0, Math.Min(sz.Length, length) - 1); + WriteNullTerminatedString(ssz); + } + public void WriteNullTerminatedString(string sz, Encoding encoding, int length) + { + // TODO: not sure how to handle this, should "length" refer to just the string length (data length) or should it include the null-terminator (field length)? + string ssz = sz.Substring(0, Math.Min(sz.Length, length) - 1); + WriteNullTerminatedString(ssz, encoding); + } + + /// + /// Writes a two-byte signed integer to the current stream and advances the stream position by two bytes. + /// + /// The two-byte signed integer to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt16(short value) + { + byte[] _buffer = BitConverter.GetBytes(value); + byte[] buffer = new byte[2]; + if (Endianness == Endianness.BigEndian) + { + buffer[1] = _buffer[0]; + buffer[0] = _buffer[1]; + } + else + { + buffer[0] = _buffer[0]; + buffer[1] = _buffer[1]; + } + WriteBytes(buffer); + } + /// + /// Writes an array of two-byte signed integers to the current stream and advances the stream position by two bytes times the number of values written. + /// + /// The array of two-byte signed integers to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt16Array(short[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteInt16(values[i]); + } + } + + /// + /// Writes a variable-length unsigned integer to the current stream and advances the stream position by the number of bytes written. + /// + /// The value to write. + /// a representing the number of bytes written to the stream for the given + /// This code is taken from the answer on StackOverflow https://stackoverflow.com/q/3564685 + public int WriteVariableLengthInt32(int value) + { + // thx stackoverflow :) https://stackoverflow.com/q/3564685 + int count = 0; + bool first = true; + while (first || value > 0) + { + first = false; + byte lower7bits = (byte)(value & 0x7f); + value >>= 7; + if (value > 0) + lower7bits |= 128; + WriteByte(lower7bits); + count++; + } + return count; + } + + /// + /// Writes a two-byte unsigned integer to the current stream and advances the stream position by two bytes. + /// + /// The two-byte unsigned integer to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt16(ushort value) + { + byte[] _buffer = BitConverter.GetBytes(value); + byte[] buffer = new byte[2]; + if (Endianness == Endianness.BigEndian) + { + buffer[1] = _buffer[0]; + buffer[0] = _buffer[1]; + } + else + { + buffer[0] = _buffer[0]; + buffer[1] = _buffer[1]; + } + WriteBytes(buffer); + } + /// + /// Writes an array of two-byte unsigned integers to the current stream and advances the stream position by two bytes times the number of values written. + /// + /// The array of two-byte unsigned integers to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt16Array(ushort[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteUInt16(values[i]); + } + } + /// + /// Writes a three-byte signed integer to the current stream and advances the stream position by three bytes. + /// + /// The three-byte signed integer to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt24(int value) + { + byte[] buffer = new byte[3]; + if (Endianness == Endianness.BigEndian) + { + buffer[2] = (byte)value; + buffer[1] = (byte)(value >> 8); + buffer[0] = (byte)(value >> 16); + } + else + { + buffer[0] = (byte)value; + buffer[1] = (byte)(value >> 8); + buffer[2] = (byte)(value >> 16); + } + WriteBytes(buffer); + } + /// + /// Writes an array of three-byte signed integers to the current stream and advances the stream position by three bytes times the number of values written. + /// + /// The array of three-byte signed integers to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt24Array(int[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteInt24(values[i]); + } + } + /// + /// Writes a three-byte unsigned integer to the current stream and advances the stream position by three bytes. + /// + /// The three-byte unsigned integer to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt24(uint value) + { + byte[] buffer = new byte[3]; + if (Endianness == Endianness.BigEndian) + { + buffer[2] = (byte)value; + buffer[1] = (byte)(value >> 8); + buffer[0] = (byte)(value >> 16); + } + else + { + buffer[0] = (byte)value; + buffer[1] = (byte)(value >> 8); + buffer[2] = (byte)(value >> 16); + } + WriteBytes(buffer); + } + /// + /// Writes an array of three-byte unsigned integers to the current stream and advances the stream position by three bytes times the number of values written. + /// + /// The array of three-byte unsigned integers to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt24Array(uint[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteUInt24(values[i]); + } + } + /// + /// Writes a four-byte signed integer to the current stream and advances the stream position by four bytes. + /// + /// The four-byte signed integer to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt32(int value) + { + byte[] _buffer = BitConverter.GetBytes(value); + byte[] buffer = new byte[4]; + if (Endianness == Endianness.BigEndian) + { + buffer[3] = _buffer[0]; + buffer[2] = _buffer[1]; + buffer[1] = _buffer[2]; + buffer[0] = _buffer[3]; + } + else + { + buffer[0] = _buffer[0]; + buffer[1] = _buffer[1]; + buffer[2] = _buffer[2]; + buffer[3] = _buffer[3]; + } + WriteBytes(buffer); + } + /// + /// Writes an array of four-byte signed integers to the current stream and advances the stream position by four bytes times the number of values written. + /// + /// The array of four-byte signed integers to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt32Array(int[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteInt32(values[i]); + } + } + /// + /// Writes a four-byte unsigned integer to the current stream and advances the stream position by four bytes. + /// + /// The four-byte unsigned integer to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt32(uint value) + { + byte[] _buffer = BitConverter.GetBytes(value); + byte[] buffer = new byte[4]; + if (Endianness == Endianness.BigEndian) + { + buffer[3] = _buffer[0]; + buffer[2] = _buffer[1]; + buffer[1] = _buffer[2]; + buffer[0] = _buffer[3]; + } + else + { + buffer[0] = _buffer[0]; + buffer[1] = _buffer[1]; + buffer[2] = _buffer[2]; + buffer[3] = _buffer[3]; + } + WriteBytes(buffer); + } + /// + /// Writes an array of four-byte unsigned integers to the current stream and advances the stream position by four bytes times the number of values written. + /// + /// The array of four-byte unsigned integers to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt32Array(uint[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteUInt32(values[i]); + } + } + /// + /// Writes an eight-byte signed integer to the current stream and advances the stream position by eight bytes. + /// + /// The eight-byte signed integer to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt64(long value) + { + byte[] _buffer = new byte[8]; + if (Endianness == Endianness.BigEndian) + { + _buffer[0] = (byte)(value >> 56); + _buffer[1] = (byte)(value >> 48); + _buffer[2] = (byte)(value >> 40); + _buffer[3] = (byte)(value >> 32); + _buffer[4] = (byte)(value >> 24); + _buffer[5] = (byte)(value >> 16); + _buffer[6] = (byte)(value >> 8); + _buffer[7] = (byte)value; + } + else + { + _buffer[0] = (byte)value; + _buffer[1] = (byte)(value >> 8); + _buffer[2] = (byte)(value >> 16); + _buffer[3] = (byte)(value >> 24); + _buffer[4] = (byte)(value >> 32); + _buffer[5] = (byte)(value >> 40); + _buffer[6] = (byte)(value >> 48); + _buffer[7] = (byte)(value >> 56); + } + WriteBytes(_buffer); + } + /// + /// Writes an array of eight-byte signed integers to the current stream and advances the stream position by eight bytes times the number of values written. + /// + /// The array of eight-byte signed integers to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteInt64Array(long[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteInt64(values[i]); + } + } + /// + /// Writes an eight-byte unsigned integer to the current stream and advances the stream position by eight bytes. + /// + /// The eight-byte unsigned integer to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt64(ulong value) + { + byte[] _buffer = new byte[8]; + if (Endianness == Endianness.BigEndian) + { + _buffer[0] = (byte)(value >> 56); + _buffer[1] = (byte)(value >> 48); + _buffer[2] = (byte)(value >> 40); + _buffer[3] = (byte)(value >> 32); + _buffer[4] = (byte)(value >> 24); + _buffer[5] = (byte)(value >> 16); + _buffer[6] = (byte)(value >> 8); + _buffer[7] = (byte)value; + } + else + { + _buffer[0] = (byte)value; + _buffer[1] = (byte)(value >> 8); + _buffer[2] = (byte)(value >> 16); + _buffer[3] = (byte)(value >> 24); + _buffer[4] = (byte)(value >> 32); + _buffer[5] = (byte)(value >> 40); + _buffer[6] = (byte)(value >> 48); + _buffer[7] = (byte)(value >> 56); + } + WriteBytes(_buffer); + } + /// + /// Writes an array of eight-byte unsigned integers to the current stream and advances the stream position by eight bytes times the number of values written. + /// + /// The array of eight-byte unsigned integers to write. + /// An I/O error occurs. + /// The stream is closed. + [CLSCompliant(false)] + public void WriteUInt64Array(ulong[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteUInt64(values[i]); + } + } + /// + /// Writes an arbitrary object to the current stream and advances the stream position by the number of bytes needed to store the object. + /// + /// The object to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteObject(object value) + { + if (value == null) + return; + + if (value is object[]) + { + object[] array = (object[])value; + for (int i = 0; i < array.Length; i++) + { + WriteObject(array[i]); + } + return; + } + + Type objectType = value.GetType(); + + if (objectType == typeof(Boolean)) + { + WriteBoolean((bool)value); + return; + } + else if (objectType == typeof(Byte)) + { + WriteByte((byte)value); + return; + } + else if (objectType == typeof(Byte[])) + { + WriteBytes((byte[])value); + return; + } + else if (objectType == typeof(SByte)) + { + WriteSByte((sbyte)value); + return; + } + else if (objectType == typeof(String)) + { + WriteLengthPrefixedString((string)value); + return; + } + else if (objectType == typeof(Char)) + { + WriteChar((char)value); + return; + } + else if (objectType == typeof(Char[])) + { + WriteCharArray((char[])value); + return; + } + else if (objectType == typeof(Single)) + { + WriteSingle((float)value); + return; + } + else if (objectType == typeof(Double)) + { + WriteDouble((double)value); + return; + } + else if (objectType == typeof(Int16)) + { + WriteInt16((short)value); + return; + } + else if (objectType == typeof(Int32)) + { + WriteInt32((int)value); + return; + } + else if (objectType == typeof(Int64)) + { + WriteInt64((long)value); + return; + } + else if (objectType == typeof(UInt16)) + { + WriteUInt16((ushort)value); + return; + } + else if (objectType == typeof(UInt32)) + { + WriteUInt32((uint)value); + return; + } + else if (objectType == typeof(UInt64)) + { + WriteUInt64((ulong)value); + return; + } + else if (objectType == typeof(DateTime)) + { + WriteDateTime((DateTime)value); + return; + } + + System.Reflection.FieldInfo[] fis = (objectType.GetFields(System.Reflection.BindingFlags.Default | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.Instance)); + foreach (System.Reflection.FieldInfo fi in fis) + { + // Type fieldType = fi.FieldType; + WriteObject(fi.GetValue(value)); + } + } + /// + /// Writes an array of arbitrary objects to the current stream and advances the stream position by the number of bytes needed to store the objects. + /// + /// The array of objects to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteObjectArray(object[] values) + { + for (int i = 0; i < values.Length; i++) + { + WriteObject(values[i]); + } + } + + /// + /// Writes a in a format that encodes the property in a 2-bit field + /// and the property in a 62-bit field. + /// + /// An I/O error occurs. + /// The stream is closed. + public void WriteDateTime(DateTime value) + { + WriteInt64(value.ToBinary()); + } + + /// + /// Encodes a into a 32-bit value as used in MS-DOS and Windows FAT directory entries. + /// + /// Date time. + /// + /// Each portion of the time stamp (year, month etc.) is encoded within specific bits of the 32-bit timestamp. The epoch for this time format is 1980. This format has a granularity of 2 seconds. + /// + public void WriteDOSFileTime(DateTime dateTime) + { + // The 32 bit date and time format used in the MSDOS and Windows FAT directory entry + + // Year Month Day Hour Min Seconds / 2 + // Bits 31-25 24-21 20-16 15-11 10-5 4-0 + + /* + cal.set(Calendar.YEAR, (int)((dosTime >> 25) & 0x7F) + 1980); + cal.set(Calendar.MONTH, (int)((dosTime >> 21) & 0x0f) - 1); + cal.set(Calendar.DATE, (int)(dosTime >> 16) & 0x1f); + cal.set(Calendar.HOUR_OF_DAY, (int)(dosTime >> 11) & 0x1f); + cal.set(Calendar.MINUTE, (int)(dosTime >> 5) & 0x3f); + cal.set(Calendar.SECOND, (int)(dosTime << 1) & 0x3e); + cal.set(Calendar.MILLISECOND, 0); + */ + + uint seconds = (uint)(dateTime.Second / 2); + uint min = (uint)dateTime.Minute; + uint hour = (uint)dateTime.Hour; + uint day = (uint)dateTime.Day; + uint month = (uint)dateTime.Month; + uint year = (uint)dateTime.Year - 1973; // huh? + + uint value = 0;//e (seconds | (min << 6) | (hour << 11) | (day << 16) | (month << 21) | (year << 25)); + WriteUInt32(value); // (int)(dateTime.Second / 2)); + } + + /// + /// Writes a four-byte floating-point value to the current stream and advances the stream position by four bytes. + /// + /// The four-byte floating-point value to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteSingle(float value) + { + byte[] buffer = BitConverter.GetBytes(value); + byte[] _buffer = new byte[4]; + if (Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[3]; + _buffer[1] = buffer[2]; + _buffer[2] = buffer[1]; + _buffer[3] = buffer[0]; + } + else + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + } + WriteBytes(_buffer); + } + public void WriteSingleArray(float[] value) + { + for (int i = 0; i < value.Length; i++) + { + WriteSingle(value[i]); + } + } + + /// + /// Writes an eight-byte floating-point value to the current stream and advances the stream position by eight bytes. + /// + /// The eight-byte floating-point value to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteDouble(double value) + { + byte[] buffer = BitConverter.GetBytes(value); + byte[] _buffer = new byte[8]; + if (Endianness == Endianness.BigEndian) + { + _buffer[0] = buffer[7]; + _buffer[1] = buffer[6]; + _buffer[2] = buffer[5]; + _buffer[3] = buffer[4]; + _buffer[4] = buffer[3]; + _buffer[5] = buffer[2]; + _buffer[6] = buffer[1]; + _buffer[7] = buffer[0]; + } + else + { + _buffer[0] = buffer[0]; + _buffer[1] = buffer[1]; + _buffer[2] = buffer[2]; + _buffer[3] = buffer[3]; + _buffer[4] = buffer[4]; + _buffer[5] = buffer[5]; + _buffer[6] = buffer[6]; + _buffer[7] = buffer[7]; + } + WriteBytes(_buffer); + } + /// + /// Writes a decimal value to the current stream and advances the stream position by sixteen bytes. + /// + /// The decimal value to write. + /// An I/O error occurs. + /// The stream is closed. + public void WriteDecimal(decimal value) + { + int[] bits = decimal.GetBits(value); + int lo = bits[0], mid = bits[1], hi = bits[2], flags = bits[3]; + + byte[] buffer = new byte[16]; + if (Endianness == Endianness.LittleEndian) + { + buffer[0] = (byte)lo; + buffer[1] = (byte)(lo >> 8); + buffer[2] = (byte)(lo >> 16); + buffer[3] = (byte)(lo >> 24); + buffer[4] = (byte)mid; + buffer[5] = (byte)(mid >> 8); + buffer[6] = (byte)(mid >> 16); + buffer[7] = (byte)(mid >> 24); + buffer[8] = (byte)hi; + buffer[9] = (byte)(hi >> 8); + buffer[10] = (byte)(hi >> 16); + buffer[11] = (byte)(hi >> 24); + buffer[12] = (byte)flags; + buffer[13] = (byte)(flags >> 8); + buffer[14] = (byte)(flags >> 16); + buffer[15] = (byte)(flags >> 24); + } + else + { + buffer[15] = (byte)lo; + buffer[14] = (byte)(lo >> 8); + buffer[13] = (byte)(lo >> 16); + buffer[12] = (byte)(lo >> 24); + buffer[11] = (byte)mid; + buffer[10] = (byte)(mid >> 8); + buffer[9] = (byte)(mid >> 16); + buffer[9] = (byte)(mid >> 24); + buffer[7] = (byte)hi; + buffer[6] = (byte)(hi >> 8); + buffer[5] = (byte)(hi >> 16); + buffer[4] = (byte)(hi >> 24); + buffer[3] = (byte)flags; + buffer[2] = (byte)(flags >> 8); + buffer[1] = (byte)(flags >> 16); + buffer[0] = (byte)(flags >> 24); + } + WriteBytes(buffer); + } + + public void WriteVersion(Version version) + { + WriteVersion(version, 4); + } + public void WriteVersion(Version version, int count) + { + switch (count) + { + case 1: + { + WriteByte(1); + WriteInt32(version.Major); + break; + } + case 2: + { + WriteByte(2); + WriteInt32(version.Major); + WriteInt32(version.Minor); + break; + } + case 3: + { + WriteByte(3); + WriteInt32(version.Major); + WriteInt32(version.Minor); + WriteInt32(version.Build); + break; + } + case 4: + { + WriteByte(4); + WriteInt32(version.Major); + WriteInt32(version.Minor); + WriteInt32(version.Build); + WriteInt32(version.Revision); + break; + } + } + } + + public long CountAlignment(int paddingCount) + { + return CountAlignment(paddingCount, 0); + } + public long CountAlignment(int paddingCount, int dataCount) + { + long position = (BaseStream.Position + dataCount); + int num = (int)(position % paddingCount); + return num; + } + + public void WriteDoubleEndianInt16(short value) + { + WriteInt16(value); + SwapEndianness(); + WriteInt16(value); + SwapEndianness(); + } + public void WriteDoubleEndianInt32(int value) + { + WriteInt32(value); + SwapEndianness(); + WriteInt32(value); + SwapEndianness(); + } + public void WriteDoubleEndianInt64(long value) + { + WriteInt64(value); + SwapEndianness(); + WriteInt64(value); + SwapEndianness(); + } + [CLSCompliant(false)] + public void WriteDoubleEndianUInt16(ushort value) + { + WriteUInt16(value); + SwapEndianness(); + WriteUInt16(value); + SwapEndianness(); + } + [CLSCompliant(false)] + public void WriteDoubleEndianUInt32(uint value) + { + WriteUInt32(value); + SwapEndianness(); + WriteUInt32(value); + SwapEndianness(); + } + [CLSCompliant(false)] + public void WriteDoubleEndianUInt64(ulong value) + { + WriteUInt64(value); + SwapEndianness(); + WriteUInt64(value); + SwapEndianness(); + } + + public void WriteUInt16String(string value) + { + WriteUInt16String(value, DefaultEncoding); + } + public void WriteUInt16String(string value, Encoding encoding) + { + ushort length = (ushort)value.Length; + byte[] input = encoding.GetBytes(value); + byte[] output = new byte[length]; + Array.Copy(input, 0, output, 0, output.Length); + WriteUInt16(length); + WriteBytes(output); + } + + public void WriteUInt16SizedString(string value) + { + WriteUInt16SizedString(value, DefaultEncoding); + } + public void WriteUInt16SizedString(string value, Encoding encoding) + { + ushort length = (ushort)value.Length; + byte[] input = encoding.GetBytes(value); + byte[] output = new byte[length]; + Array.Copy(input, 0, output, 0, output.Length); + WriteBytes(output); + } + + public void WriteInt64String(string value) + { + WriteInt64(value.Length); + WriteFixedLengthString(value); + } + + /// + /// Clears all buffers for the associated and causes + /// any buffered data to be written to the underlying device. + /// + public void Flush() + { + BaseStream.Flush(); + } + /// + /// Closes the underlying . + /// + public void Close() + { + BaseStream.Close(); + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/InvalidDataFormatException.cs b/editor-dotnet/src/lib/MBS.Editor.Core/InvalidDataFormatException.cs new file mode 100644 index 0000000..7cb78db --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/InvalidDataFormatException.cs @@ -0,0 +1,29 @@ +namespace MBS.Editor.Core; + +public class InvalidDataFormatException : Exception +{ + /// + /// Initializes a new instance of the class. + /// + public InvalidDataFormatException() : base() { } + + /// + /// Initializes a new instance of the class + /// with a specified error message. + /// + /// The message that describes the error. + public InvalidDataFormatException(string? message) : base(message) { } + + /// + /// Initializes a new instance of the class + /// with a specified error message and a reference to the inner exception that is the + /// cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// + /// The exception that is the cause of the current exception, or a null reference + /// (Nothing in Visual Basic) if no inner exception is specified. + /// + public InvalidDataFormatException(string? message, Exception? innerException) : base(message, innerException) { } + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/MBS.Editor.Core.csproj b/editor-dotnet/src/lib/MBS.Editor.Core/MBS.Editor.Core.csproj index bb23fb7..192a048 100644 --- a/editor-dotnet/src/lib/MBS.Editor.Core/MBS.Editor.Core.csproj +++ b/editor-dotnet/src/lib/MBS.Editor.Core/MBS.Editor.Core.csproj @@ -1,9 +1,10 @@ - - - - net8.0 - enable - enable - - - + + + + + + net8.0 + enable + enable + + \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModel.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModel.cs index 9d59c2f..c342e97 100644 --- a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModel.cs +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModel.cs @@ -3,5 +3,34 @@ public class ObjectModel { public PropertyCollection Properties { get; } = new PropertyCollection(); + + public virtual void Clear() + { + + } + + public virtual void CopyTo(ObjectModel dest) + { + + } + + public static T FromType() where T : ObjectModel, new() + { + T objectModel = new T(); + return objectModel; + } + public static ObjectModel FromType(Type type) + { + if (type.IsAbstract || !type.IsSubclassOf(typeof(ObjectModel))) + { + throw new InvalidCastException("type must be a non-abstract subclass of ObjectModel"); + } + ObjectModel? objectModel = type.Assembly.CreateInstance(type.FullName) as ObjectModel; + if (objectModel == null) + { + throw new TypeLoadException("could not create ObjectModel from type name"); + } + return objectModel; + } } diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelMetadata.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelMetadata.cs new file mode 100644 index 0000000..b3a9425 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelMetadata.cs @@ -0,0 +1,6 @@ +namespace MBS.Editor.Core; + +public class ObjectModelMetadata +{ + public string[] Path { get; set; } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelNotSupportedException.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelNotSupportedException.cs new file mode 100644 index 0000000..80e9a03 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModelNotSupportedException.cs @@ -0,0 +1,37 @@ +namespace MBS.Editor.Core; + +public class ObjectModelNotSupportedException : Exception +{ + /// + /// Initializes a new instance of the class. + /// + public ObjectModelNotSupportedException() : base() { } + + /// + /// Initializes a new instance of the class + /// with a specified error message. + /// + /// The message that describes the error. + public ObjectModelNotSupportedException(string? message) : base(message) { } + + /// + /// Initializes a new instance of the class + /// with a specified error message and a reference to the inner exception that is the + /// cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// + /// The exception that is the cause of the current exception, or a null reference + /// (Nothing in Visual Basic) if no inner exception is specified. + /// + public ObjectModelNotSupportedException(string? message, Exception? innerException) : base(message, innerException) { } + + public Type? ExpectedObjectModelType { get; } + public Type? ActualObjectModelType { get; } + + public ObjectModelNotSupportedException(string? message, Type expectedObjectModelType, Type actualObjectModelType) + { + + } + +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseField.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseField.cs new file mode 100644 index 0000000..72cfecd --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseField.cs @@ -0,0 +1,89 @@ +// +// DatabaseField.cs - represents a field (column) in a DatabaseObjectModel +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; + +namespace MBS.Editor.Core.ObjectModels.Database +{ + /// + /// Represents a field (column) in a . + /// + public class DatabaseField : ICloneable + { + + public class DatabaseFieldCollection + : System.Collections.ObjectModel.Collection + { + public DatabaseField Add(string Name, object Value = null, Type dataType = null) + { + DatabaseField df = new DatabaseField(); + df.Name = Name; + df.Value = Value; + df.DataType = dataType; + + base.Add(df); + return df; + } + + public DatabaseField this[string Name] + { + get + { + for (int i = 0; i < Count; i++) + { + if (this[i].Name.Equals(Name)) return this[i]; + } + return null; + } + } + } + + public DatabaseField(string name = "", object value = null) + { + Name = name; + Value = value; + } + + public string Name { get; set; } = String.Empty; + public object Value { get; set; } = null; + public Type DataType { get; set; } = null; + + public object Clone() + { + DatabaseField clone = new DatabaseField(); + clone.Name = (Name.Clone() as string); + if (Value is ICloneable) + { + clone.Value = (Value as ICloneable).Clone(); + } + else + { + clone.Value = Value; + } + return clone; + } + + public override string ToString() + { + return String.Format("{0} = {1}", Name, Value); + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseObjectModel.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseObjectModel.cs new file mode 100644 index 0000000..0298076 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseObjectModel.cs @@ -0,0 +1,92 @@ +// +// DatabaseObjectModel.cs - provides an ObjectModel for manipulating databases +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + + +namespace MBS.Editor.Core.ObjectModels.Database +{ + /// + /// Provides an for manipulating databases. + /// + public class DatabaseObjectModel : ObjectModel + { + private static ObjectModelMetadata _omr = null; + public static ObjectModelMetadata Metadata + { + get + { + if (_omr == null) + { + _omr = new ObjectModelMetadata(); + _omr.Path = new string[] { "General", "Database" }; + } + return _omr; + } + } + + public string Name { get; set; } = null; + public DatabaseTable.DatabaseTableCollection Tables { get; } = new DatabaseTable.DatabaseTableCollection(); + + public override void Clear() + { + Tables.Clear(); + } + public override void CopyTo(ObjectModel where) + { + DatabaseObjectModel clone = (where as DatabaseObjectModel); + if (clone == null) + throw new ObjectModelNotSupportedException(); + + for (int i = 0; i < Tables.Count; i++) + { + clone.Tables.Add(Tables[i].Clone() as DatabaseTable); + } + } + +/* + public static DatabaseObjectModel FromMarkup(MarkupTagElement tag) + { + DatabaseObjectModel db = new DatabaseObjectModel(); + for (int i = 0; i < tag.Elements.Count; i++) + { + MarkupTagElement tag2 = (tag.Elements[i] as MarkupTagElement); + if (tag2 == null) continue; + if (tag2.FullName == "Tables") + { + foreach (MarkupElement elTable in tag2.Elements ) + { + MarkupTagElement tagTable = (elTable as MarkupTagElement); + if (tagTable == null) continue; + if (tagTable.FullName != "Table") continue; + + MarkupAttribute attName = tag2.Attributes["Name"]; + if (attName == null) continue; + + DatabaseTable dt = new DatabaseTable(); + dt.Name = attName.Value; + db.Tables.Add(dt); + } + } + } + return db; + } +*/ + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseRecord.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseRecord.cs new file mode 100644 index 0000000..875566c --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseRecord.cs @@ -0,0 +1,70 @@ +// +// DatabaseRecord.cs - represents a record (row) in a DatabaseObjectModel +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; + +namespace MBS.Editor.Core.ObjectModels.Database +{ + /// + /// Represents a record (row) in a . + /// + public class DatabaseRecord : ICloneable + { + + public class DatabaseRecordCollection + : System.Collections.ObjectModel.Collection + { + public DatabaseRecord Add(params DatabaseField[] parameters) + { + DatabaseRecord dr = new DatabaseRecord(); + foreach (DatabaseField df in parameters) + { + dr.Fields.Add(df.Name, df.Value); + } + return dr; + } + } + + public DatabaseRecord(params DatabaseField[] fields) + { + for (int i = 0; i < fields.Length; i++) + { + Fields.Add(fields[i]); + } + } + + private DatabaseField.DatabaseFieldCollection mvarFields = new DatabaseField.DatabaseFieldCollection (); + public DatabaseField.DatabaseFieldCollection Fields + { + get { return mvarFields; } + } + + public object Clone() + { + DatabaseRecord clone = new DatabaseRecord(); + for (int i = 0; i < Fields.Count; i++) + { + clone.Fields.Add(Fields[i].Clone() as DatabaseField); + } + return clone; + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseTable.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseTable.cs new file mode 100644 index 0000000..69c4356 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/Database/DatabaseTable.cs @@ -0,0 +1,95 @@ +// +// DatabaseTable.cs - represents a table in a DatabaseObjectModel +// +// Author: +// Michael Becker +// +// Copyright (c) 2011-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; +using System.Collections.Generic; + +namespace MBS.Editor.Core.ObjectModels.Database +{ + /// + /// Represents a table in a . + /// + public class DatabaseTable : ICloneable + { + public class DatabaseTableCollection + : System.Collections.ObjectModel.Collection + { + public DatabaseTable this[string name] + { + get + { + if (_itemsByName.ContainsKey(name)) + return _itemsByName[name]; + return null; + } + } + + private Dictionary _itemsByName = new Dictionary(); + protected override void ClearItems() + { + base.ClearItems(); + _itemsByName.Clear(); + } + protected override void InsertItem(int index, DatabaseTable item) + { + base.InsertItem(index, item); + _itemsByName[item.Name] = item; + } + protected override void RemoveItem(int index) + { + if (_itemsByName.ContainsKey(this[index].Name)) + _itemsByName.Remove(this[index].Name); + base.RemoveItem(index); + } + } + + /// + /// Gets or sets the name of the . + /// + /// The name of the . + public string Name { get; set; } = String.Empty; + /// + /// Gets a collection of instances representing the fields (columns) in the . + /// + /// The fields (columns) in the . + public DatabaseField.DatabaseFieldCollection Fields { get; } = new DatabaseField.DatabaseFieldCollection(); + /// + /// Gets a collection of instances representing the records (rows) in the . + /// + /// The records (rows) in the . + public DatabaseRecord.DatabaseRecordCollection Records { get; } = new DatabaseRecord.DatabaseRecordCollection(); + + public object Clone() + { + DatabaseTable clone = new DatabaseTable(); + clone.Name = (Name.Clone() as string); + for (int i = 0; i < Fields.Count; i++) + { + clone.Fields.Add(Fields[i].Clone() as DatabaseField); + } + for (int i = 0; i < Records.Count; i++) + { + clone.Records.Add(Records[i].Clone() as DatabaseRecord); + } + return clone; + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSource.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSource.cs new file mode 100644 index 0000000..34cd688 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSource.cs @@ -0,0 +1,25 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public abstract class FileSource +{ + public long Length { get { return GetLengthInternal(); } } + + protected abstract long GetLengthInternal(); + protected abstract byte[] GetDataInternal(long offset, long length); + + public byte[] GetData() { return GetData(0, GetLengthInternal()); } + public byte[] GetData(long offset, long length) + { + byte[] data = GetDataInternal(offset, length); + MemoryStream msInput = new MemoryStream(data); + /* + for (int i = 0; i < Transformations.Count; i++) + { + System.IO.MemoryStream msOutput = new System.IO.MemoryStream(); + Transformations[i].Function(this, msInput, msOutput); + msInput = msOutput; + } + */ + return msInput.ToArray(); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/ByteArrayFileSource.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/ByteArrayFileSource.cs new file mode 100644 index 0000000..3455f6d --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/ByteArrayFileSource.cs @@ -0,0 +1,55 @@ +// +// MemoryFileSource.cs - provides a FileSource for retrieving file data from a byte array +// +// Author: +// Michael Becker +// +// Copyright (c) 2011-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; +using System.Data; + +namespace MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +/// +/// Provides a for retrieving file data from a . +/// +public class ByteArrayFileSource : FileSource +{ + public byte[] Data { get; set; } + + public ByteArrayFileSource(byte[] data) + { + Data = data; + } + + protected override byte[] GetDataInternal(long offset, long length) + { + long realLength = Math.Min(length, Data.Length); + byte[] realData = Data; + long remaining = realData.Length - offset; + realLength = Math.Min(realLength, remaining); + + byte[] data = new byte[realLength]; + Array.Copy(realData, offset, data, 0, realLength); + return data; + } + + protected override long GetLengthInternal() + { + return Data.Length; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/CompressedEmbeddedFileSource.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/CompressedEmbeddedFileSource.cs new file mode 100644 index 0000000..ea1bf7f --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/CompressedEmbeddedFileSource.cs @@ -0,0 +1,54 @@ +using MBS.Core.Collections; +using MBS.Editor.Core.Compression; + +namespace MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +public class CompressedEmbeddedFileSource : EmbeddedFileSource +{ + public CompressionModule? CompressionModule { get; set; } = null; + + public long CompressedLength { get; set; } + private long DecompressedLength { get; set; } + protected override long ActualLength => DecompressedLength; + + private byte[]? _decompressedData = null; + + protected override byte[] GetDataInternal(long offset, long length) + { + if (_decompressedData == null) + { + Stream.Seek(Offset + offset, SeekOrigin.Begin); + + byte[] compressedData = new byte[CompressedLength]; + Stream.Read(compressedData, 0, compressedData.Length); + Console.WriteLine("compressed data: " + compressedData.ToString(" ", "x")); + + byte[] decompressedData = compressedData; + if (CompressionModule != null) + { + decompressedData = CompressionModule.Decompress(compressedData); + } + _decompressedData = decompressedData; + Console.WriteLine("decompressed data: " + decompressedData.ToString(" ", "x")); + } + + if (offset + length > _decompressedData.Length) + { + Console.WriteLine(String.Format("embedded file offset: {0}", Offset)); + Console.WriteLine(String.Format("requested offset: {0}", offset)); + Console.WriteLine(String.Format("requested length: {0}", length)); + Console.WriteLine(String.Format("actual stream length: {0}", _decompressedData.Length)); + throw new ArgumentOutOfRangeException("offset + length", "embedded file offset + requested offset + requested length extends past the actual length of the underlying stream"); + } + + byte[] data = new byte[length]; + Array.Copy(_decompressedData, offset, data, 0, data.Length); + return data; + } + + public CompressedEmbeddedFileSource(Stream stream, long offset, long compressedLength, long decompressedLength) : base(stream, offset, decompressedLength) + { + CompressedLength = compressedLength; + DecompressedLength = decompressedLength; + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/EmbeddedFileSource.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/EmbeddedFileSource.cs new file mode 100644 index 0000000..9327a10 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/EmbeddedFileSource.cs @@ -0,0 +1,34 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +public class EmbeddedFileSource : FileSource +{ + public Stream Stream { get; } + public long Offset { get; set; } + + protected virtual long ActualLength { get; } + + protected override long GetLengthInternal() + { + return ActualLength; + } + protected override byte[] GetDataInternal(long offset, long length) + { + if (Offset + offset + length >= Stream.Length) + { + throw new ArgumentOutOfRangeException("embedded file offset + requested offset + requested length extends past the actual length of the underlying stream"); + } + + Stream.Seek(Offset + offset, SeekOrigin.Begin); + + byte[] data = new byte[length]; + Stream.Read(data, 0, data.Length); + return data; + } + + public EmbeddedFileSource(Stream stream, long offset, long length) + { + Stream = stream; + Offset = offset; + ActualLength = length; + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/StreamFileSource.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/StreamFileSource.cs new file mode 100644 index 0000000..43f00b2 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSources/StreamFileSource.cs @@ -0,0 +1,59 @@ +// +// StreamFileSource.cs - provides a FileSource for retrieving file data from a System.IO.Stream +// +// Author: +// Michael Becker +// +// Copyright (c) 2011-2024 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +/// +/// Provides a for retrieving file data from a . +/// +public class StreamFileSource : FileSource +{ + public Stream BaseStream { get; set; } + public StreamFileSource(Stream stream) + { + BaseStream = stream; + } + + protected override byte[] GetDataInternal(long offset, long length) + { + byte[] buffer = new byte[length]; + try + { + if (offset + length > BaseStream.Length) + { + throw new ArgumentOutOfRangeException("offset + length is out of range"); + } + } + catch (NotSupportedException ex) + { + // continue anyway + } + + BaseStream.Seek(offset, SeekOrigin.Begin); + BaseStream.Read(buffer, 0, (int)length); + return buffer; + } + + protected override long GetLengthInternal() + { + return BaseStream.Length; + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemCustomDetailCollection.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemCustomDetailCollection.cs new file mode 100644 index 0000000..344c520 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemCustomDetailCollection.cs @@ -0,0 +1,29 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemCustomDetailCollection +{ + private struct _item + { + public string id; + public string title; + } + + private Dictionary _dict = new Dictionary(); + + public void Add(string id, string title) + { + _item item = new _item(); + item.id = id; + item.title = title; + _dict[id] = item; + } + public bool Contains(string id) + { + return _dict.ContainsKey(id); + } + public string GetTitle(string id) + { + _item item = _dict[id]; + return item.title; + } +} \ No newline at end of file diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFile.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFile.cs new file mode 100644 index 0000000..d0d27af --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFile.cs @@ -0,0 +1,20 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemFile : FileSystemItem +{ + public FileSource? Source { get; set; } = null; + + public DateTime? ModificationTimestamp { get; set; } = null; + + public Dictionary CustomDetails { get; } = new Dictionary(); + + public long Size + { + get + { + if (Source == null) + return 0; + return Source.Length; + } + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFolder.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFolder.cs new file mode 100644 index 0000000..ae9d73c --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemFolder.cs @@ -0,0 +1,12 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemFolder : FileSystemItem, IFileSystemItemContainer +{ + + public FileSystemItemCollection Items { get; } + + public FileSystemFolder() + { + Items = new FileSystemItemCollection(this); + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItem.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItem.cs new file mode 100644 index 0000000..9cd9159 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItem.cs @@ -0,0 +1,8 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemItem +{ + public FileSystemObjectModel FileSystem { get { return Parent?.FileSystem; } } + public string? Name { get; set; } = null; + public IFileSystemItemContainer? Parent { get; internal set; } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItemCollection.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItemCollection.cs new file mode 100644 index 0000000..38cfef2 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemItemCollection.cs @@ -0,0 +1,181 @@ + +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemItemCollection + : System.Collections.ObjectModel.Collection +{ + public IFileSystemItemContainer Parent { get; } + public FileSystemItemCollection(IFileSystemItemContainer parent) + { + Parent = parent; + } + + public bool Contains(string filename) + { + return this[filename] != null; + } + public FileSystemItem? this[string filename] + { + get + { + foreach (FileSystemItem item in this) + { + if (item.Name == filename) + { + return item; + } + } + return null; + } + } + + protected override void ClearItems() + { + for (int i = 0; i < Count; i++) + { + this[i].Parent = null; + } + base.ClearItems(); + } + protected override void InsertItem(int index, FileSystemItem item) + { + base.InsertItem(index, item); + item.Parent = Parent; + } + protected override void RemoveItem(int index) + { + this[index].Parent = null; + base.RemoveItem(index); + } + + + public FileSystemFolder AddFolder(string name) + { + string[] path = name.Split(Parent.FileSystem.PathSeparators, StringSplitOptions.None); + FileSystemFolder parent = null; + for (int i = 0; i < path.Length; i++) + { + if (parent == null) + { + parent = this[path[i]] as FileSystemFolder; + if (parent == null) + { + FileSystemFolder f = new FileSystemFolder(); + f.Name = path[i]; + Add(f); + parent = f; + } + } + else + { + FileSystemFolder new_parent = parent.Items[path[i]] as FileSystemFolder; + if (new_parent == null) + { + new_parent = new FileSystemFolder(); + new_parent.Name = path[i]; + parent.Items.Add(new_parent); + } + parent = new_parent; + } + if (parent == null) throw new DirectoryNotFoundException(); + } + return parent; + } + public FileSystemFile AddFile(string name, FileSource? source = null) + { + if (name == null) name = String.Empty; + string[] path = name.Split(Parent.FileSystem.PathSeparators, StringSplitOptions.None); + FileSystemFolder parent = null; + for (int i = 0; i < path.Length - 1; i++) + { + if (parent == null) + { + if (Contains(path[i])) + { + parent = this[path[i]] as FileSystemFolder; + } + else + { + parent = AddFolder(path[i]); + } + } + else + { + if (parent.Items.Contains(path[i])) + { + parent = parent.Items[path[i]] as FileSystemFolder; + } + else + { + parent = parent.Items.AddFolder(path[i]); + } + } + + if (parent == null) + { + throw new System.IO.DirectoryNotFoundException(); + } + } + + FileSystemFile file = new FileSystemFile(); + file.Name = path[path.Length - 1]; + file.Source = source; + if (parent == null) + { + Add(file); + } + else + { + parent.Items.Add(file); + } + + return file; + } + + + public FileSystemFile[] GetAllFiles() + { + List list = new List(); + for (int i = 0; i < Count; i++) + { + if (this[i] is FileSystemFile Peter) + { + list.Add(Peter); + } + else if (this[i] is FileSystemFolder folder) + { + FileSystemFile[] files2 = folder.Items.GetAllFiles(); + list.AddRange(files2); + } + } + return list.ToArray(); + } + + public FileSystemFile[] GetFiles() + { + List list = new List(); + for (int i = 0; i < Count; i++) + { + if (this[i] is FileSystemFile Peter) + { + list.Add(Peter); + } + } + return list.ToArray(); + } + + public FileSystemFolder[] GetFolders() + { + List list = new List(); + for (int i = 0; i < Count; i++) + { + if (this[i] is FileSystemFolder folder) + { + list.Add(folder); + } + } + return list.ToArray(); + } +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemObjectModel.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemObjectModel.cs new file mode 100644 index 0000000..2cfcc06 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/FileSystemObjectModel.cs @@ -0,0 +1,20 @@ + + +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public class FileSystemObjectModel : ObjectModel, IFileSystemItemContainer +{ + public FileSystemObjectModel FileSystem { get { return this; } } + public FileSystemItemCollection Items { get; } + public FileSystemObjectModel() + { + Items = new FileSystemItemCollection(this); + } + + public FileSystemCustomDetailCollection CustomDetails { get; } = new FileSystemCustomDetailCollection(); + + public string[] PathSeparators { get; set; } = { "/", "\\" }; // System.IO.Path.DirectorySeparatorChar.ToString(), System.IO.Path.AltDirectorySeparatorChar.ToString() }; + +} diff --git a/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/IFileSystemItemContainer.cs b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/IFileSystemItemContainer.cs new file mode 100644 index 0000000..33c5ba9 --- /dev/null +++ b/editor-dotnet/src/lib/MBS.Editor.Core/ObjectModels/FileSystem/IFileSystemItemContainer.cs @@ -0,0 +1,7 @@ +namespace MBS.Editor.Core.ObjectModels.FileSystem; + +public interface IFileSystemItemContainer +{ + FileSystemObjectModel FileSystem { get; } + FileSystemItemCollection Items { get; } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/Internal/UTFTableInfo.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/Internal/UTFTableInfo.cs new file mode 100644 index 0000000..c1506f6 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/Internal/UTFTableInfo.cs @@ -0,0 +1,17 @@ +namespace MBS.Editor.Plugins.CRI.DataFormats.Database.UTF.Internal; + +internal struct UTFTABLEINFO +{ + public long utfOffset; + + public int tableSize; + public int schemaOffset; + public int rowsOffset; + public int stringTableOffset; + public int dataOffset; + public uint tableNameStringOffset; + public short tableColumns; + public short rowWidth; + public int tableRows; + public int stringTableSize; +} \ No newline at end of file diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnDataType.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnDataType.cs new file mode 100644 index 0000000..066ac32 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnDataType.cs @@ -0,0 +1,77 @@ +// +// UTFColumnDataType.cs - CRI Middleware UTF table column data types +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.Database.UTF; + +/// +/// The data type for a column in a UTF table. +/// +public enum UTFColumnDataType : byte +{ + /// + /// Mask value for combining with . + /// + Mask = 0x0f, + /// + /// The column represents a variable-length array of data. + /// + Data = 0x0b, + /// + /// The column represents a variable-length . + /// + String = 0x0a, + /// + /// The column represents a value. + /// + Float = 0x08, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Long2 = 0x07, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Long = 0x06, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Int2 = 0x05, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Int = 0x04, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Short2 = 0x03, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Short = 0x02, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Byte2 = 0x01, + /// + /// The column represents a value. There may or may not be a distinction between signed and unsigned types. + /// + Byte = 0x00 +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnStorageType.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnStorageType.cs new file mode 100644 index 0000000..bdf30e5 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFColumnStorageType.cs @@ -0,0 +1,45 @@ +// +// UTFColumnStorageType.cs - CRI Middleware UTF table column storage types +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.Database.UTF; + +/// +/// The storage type for a column in a UTF table. +/// +public enum UTFColumnStorageType : byte +{ + /// + /// Mask value for combining with . + /// + Mask = 0xf0, + /// + /// Data in this column is stored per row, with a single value written for each ROW in the table. + /// + PerRow = 0x50, + /// + /// Data in this column is constant regardless of row, with a single value written for each COLUMN in the table. + /// + Constant = 0x30, + /// + /// Data in this column is declared NULL for all rows in the table. No data is written for this column. + /// + Zero = 0x10 +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFDataFormat.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFDataFormat.cs new file mode 100644 index 0000000..90619de --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/Database/UTF/UTFDataFormat.cs @@ -0,0 +1,644 @@ +// +// UTFDataFormat.cs - COMPLETED - Implementation of CRI Middleware UTF table (used in CPK) +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.Database.UTF; + +using System; +using System.Collections.Generic; + +using MBS.Core; +using MBS.Core.Collections; + +using MBS.Editor.Core; +using MBS.Editor.Core.IO; +using MBS.Editor.Core.ObjectModels.Database; +using MBS.Editor.Plugins.CRI.DataFormats.Database.UTF.Internal; + +public class UTFDataFormat : DataFormat +{ + private static DataFormatMetadata _dfr; + public static DataFormatMetadata Metadata + { + get + { + if (_dfr == null) + { + _dfr = new DataFormatMetadata(); + } + return _dfr; + } + } + + private UTFTABLEINFO ReadUTFTableInfo(Reader br) + { + UTFTABLEINFO info = new UTFTABLEINFO(); + info.utfOffset = br.BaseStream.Position; + info.tableSize = br.ReadInt32(); + info.schemaOffset = 0x20; + info.rowsOffset = br.ReadInt32(); + info.stringTableOffset = br.ReadInt32(); + info.dataOffset = br.ReadInt32(); + + // CPK Header & UTF Header are ignored, so add 8 to each offset + + info.tableNameStringOffset = br.ReadUInt32(); // 00000007 + info.tableColumns = br.ReadInt16(); // 0023 + info.rowWidth = br.ReadInt16(); // 007e + info.tableRows = br.ReadInt32(); // 00000001 + info.stringTableSize = info.dataOffset - info.stringTableOffset; + return info; + } + + protected override void LoadInternal(ObjectModel objectModel, Stream stream) + { + DatabaseObjectModel? utf = objectModel as DatabaseObjectModel; + if (utf == null) + throw new ObjectModelNotSupportedException(); + + Reader br = new Reader(stream); + string utf_signature = br.ReadFixedLengthString(4); + + if (utf_signature != "@UTF") + throw new InvalidDataFormatException(); // we are assuming passed in decrypted UTF from the CPK + + DatabaseTable dt = new DatabaseTable(); + + br.Endianness = Endianness.BigEndian; + + UTFTABLEINFO info = ReadUTFTableInfo(br); + + int[] columnNameOffsets = new int[info.tableColumns]; + long[] constantOffsets = new long[info.tableColumns]; + UTFColumnStorageType[] storageTypes = new UTFColumnStorageType[info.tableColumns]; + UTFColumnDataType[] dataTypes = new UTFColumnDataType[info.tableColumns]; + + // Read string table - remember, this is relative to UTF data WITH the "@UTF" signature + br.BaseStream.SavePosition(); + br.BaseStream.Seek(info.utfOffset + info.stringTableOffset + 4, SeekOrigin.Begin); + /* + while (br.PeekByte() == 0) + { + br.ReadByte(); + } + */ + byte[] stringTableData = br.ReadBytes(info.stringTableSize); + + MemoryStream maStringTable = new MemoryStream(stringTableData); + Reader stringTableReader = new Reader(maStringTable); + + stringTableReader.BaseStream.Seek(info.tableNameStringOffset, SeekOrigin.Begin); + dt.Name = stringTableReader.ReadNullTerminatedString(); + br.BaseStream.LoadPosition(); + + for (int i = 0; i < info.tableColumns; i++) + { + byte schema = br.ReadByte(); + columnNameOffsets[i] = br.ReadInt32(); + storageTypes[i] = (UTFColumnStorageType)(schema & (byte)UTFColumnStorageType.Mask); + dataTypes[i] = (UTFColumnDataType)(schema & (byte)UTFColumnDataType.Mask); + + object constantValue = null; + if (storageTypes[i] == UTFColumnStorageType.Constant) + { + constantOffsets[i] = br.BaseStream.Position; + switch (dataTypes[i]) + { + case UTFColumnDataType.Long: + case UTFColumnDataType.Long2: + case UTFColumnDataType.Data: + { + constantValue = br.ReadInt64(); + break; + } + case UTFColumnDataType.Float: + { + constantValue = br.ReadSingle(); + break; + } + case UTFColumnDataType.String: + { + int valueOffset = br.ReadInt32(); + stringTableReader.BaseStream.Seek(valueOffset, SeekOrigin.Begin); + constantValue = stringTableReader.ReadNullTerminatedString(); + break; + } + case UTFColumnDataType.Int: + case UTFColumnDataType.Int2: + { + constantValue = br.ReadInt32(); + break; + } + case UTFColumnDataType.Short: + case UTFColumnDataType.Short2: + { + constantValue = br.ReadInt16(); + break; + } + case UTFColumnDataType.Byte: + case UTFColumnDataType.Byte2: + { + constantValue = br.ReadByte(); + break; + } + default: + { + Console.WriteLine("cpk: ReadUTFTable: unknown data type for column " + i.ToString()); + break; + } + } + } + + dt.Fields.Add("Field" + i.ToString(), constantValue, SystemDataTypeForUTFDataType(dataTypes[i])); + } + + for (int i = 0; i < info.tableColumns; i++) + { + stringTableReader.BaseStream.Seek(columnNameOffsets[i], SeekOrigin.Begin); + dt.Fields[i].Name = stringTableReader.ReadNullTerminatedString(); + } + + for (int i = 0; i < info.tableRows; i++) + { + uint rowOffset = (uint)(info.utfOffset + 4 + info.rowsOffset + (i * info.rowWidth)); + uint rowStartOffset = rowOffset; + br.BaseStream.Seek(rowOffset, SeekOrigin.Begin); + + DatabaseRecord record = new DatabaseRecord(); + + for (int j = 0; j < info.tableColumns; j++) + { + UTFColumnStorageType storageType = storageTypes[j]; + UTFColumnDataType dataType = dataTypes[j]; + long constantOffset = constantOffsets[j] - 11; + + switch (storageType) + { + case UTFColumnStorageType.PerRow: + { + switch (dataType) + { + case UTFColumnDataType.String: + { + string value = null; + if (storageType == UTFColumnStorageType.Constant) + { + value = (dt.Fields[j].Value as string); + } + else + { + uint stringOffset = br.ReadUInt32(); + if (stringOffset < stringTableData.Length) + { + stringTableReader.BaseStream.Seek(stringOffset, SeekOrigin.Begin); + value = stringTableReader.ReadNullTerminatedString(); + } + } + record.Fields.Add(dt.Fields[j].Name, value); + break; + } + case UTFColumnDataType.Data: + { + uint varDataOffset = br.ReadUInt32(); + uint varDataSize = br.ReadUInt32(); + + byte[] value = null; + if (varDataOffset == 0 && varDataSize == 0) + { + value = null; + } + else + { + long realOffset = info.dataOffset + 8 + varDataOffset; + br.BaseStream.SavePosition(); + br.BaseStream.Seek(realOffset, SeekOrigin.Begin); + byte[] tableData = br.ReadBytes(varDataSize); + br.BaseStream.LoadPosition(); + value = tableData; + } + record.Fields.Add(dt.Fields[j].Name, value); + break; + } + case UTFColumnDataType.Long: + case UTFColumnDataType.Long2: + { + ulong value = br.ReadUInt64(); + record.Fields.Add(dt.Fields[j].Name, value); + + break; + } + case UTFColumnDataType.Int: + case UTFColumnDataType.Int2: + { + uint value = br.ReadUInt32(); + record.Fields.Add(dt.Fields[j].Name, value); + + break; + } + case UTFColumnDataType.Short: + case UTFColumnDataType.Short2: + { + ushort value = br.ReadUInt16(); + record.Fields.Add(dt.Fields[j].Name, value); + break; + } + case UTFColumnDataType.Float: + { + float value = br.ReadSingle(); + record.Fields.Add(dt.Fields[j].Name, value); + break; + } + case UTFColumnDataType.Byte: + case UTFColumnDataType.Byte2: + { + byte value = br.ReadByte(); + record.Fields.Add(dt.Fields[j].Name, value); + break; + } + } + break; + } + case UTFColumnStorageType.Constant: + { + record.Fields.Add(dt.Fields[j].Name, dt.Fields[j].Value); + continue; + } + case UTFColumnStorageType.Zero: + { + record.Fields.Add(dt.Fields[j].Name, null); + continue; + } + } + } + + dt.Records.Add(record); + } + utf.Tables.Add(dt); + } + + public static Type SystemDataTypeForUTFDataType(UTFColumnDataType dataType) + { + switch (dataType) + { + case UTFColumnDataType.Byte: + case UTFColumnDataType.Byte2: + { + return typeof(byte); + } + case UTFColumnDataType.Data: + { + return typeof(byte[]); + } + case UTFColumnDataType.Float: + { + return typeof(float); + } + case UTFColumnDataType.Int: + { + return typeof(uint); + } + case UTFColumnDataType.Int2: + { + return typeof(int); + } + case UTFColumnDataType.Long: + case UTFColumnDataType.Long2: + { + return typeof(long); + } + case UTFColumnDataType.Short: + case UTFColumnDataType.Short2: + { + return typeof(short); + } + case UTFColumnDataType.String: + { + return typeof(string); + } + } + return null; + } + public static UTFColumnDataType UTFDataTypeForSystemDataType(Type dataType) + { + if (dataType == typeof(byte)) return UTFColumnDataType.Byte; + else if (dataType == typeof(sbyte)) return UTFColumnDataType.Byte; + else if (dataType == typeof(byte[])) return UTFColumnDataType.Data; + else if (dataType == typeof(float)) return UTFColumnDataType.Float; + else if (dataType == typeof(int)) return UTFColumnDataType.Int2; + else if (dataType == typeof(uint)) return UTFColumnDataType.Int; + else if (dataType == typeof(long)) return UTFColumnDataType.Long; + else if (dataType == typeof(ulong)) return UTFColumnDataType.Long; + else if (dataType == typeof(short)) return UTFColumnDataType.Short; + else if (dataType == typeof(ushort)) return UTFColumnDataType.Short; + else if (dataType == typeof(string)) return UTFColumnDataType.String; + return UTFColumnDataType.Mask; + } + + protected override void SaveInternal(ObjectModel objectModel, Stream stream) + { + DatabaseObjectModel? utf = objectModel as DatabaseObjectModel; + if (utf == null) + throw new ObjectModelNotSupportedException(); + + Writer bw = new Writer(stream); + bw.WriteFixedLengthString("@UTF"); + + DatabaseTable dt = utf.Tables[0]; + + bw.Endianness = Endianness.BigEndian; + + // do the hard work here to determine if a field should be recorded as zero or not + UTFColumnStorageType[] columnStorageTypes = new UTFColumnStorageType[dt.Fields.Count]; + UTFColumnDataType[] columnDataTypes = new UTFColumnDataType[dt.Fields.Count]; + for (int i = 0; i < dt.Fields.Count; i++) + { + columnStorageTypes[i] = UTFColumnStorageType.Zero; + columnDataTypes[i] = UTFDataTypeForSystemDataType(dt.Fields[i].DataType); + + if (dt.Fields[i].Value != null) + { + columnStorageTypes[i] = UTFColumnStorageType.Constant; + continue; + } + for (int j = 0; j < dt.Records.Count; j++) + { + if (dt.Records[j].Fields[i].Value != null) + { + columnStorageTypes[i] = UTFColumnStorageType.PerRow; + break; + } + } + } + + int tableSize = 24; // size of entire file = 32 - "@UTF".Length(4) - (size of table size field:4) = 32 - 8 = 24 + tableSize += (5 * dt.Fields.Count); // 5 * 36 = 204 5 * 35 = 195 + tableSize += (dt.Name.Length + 1); // 204 + "CpkHeader".Length + 1 = 214 + tableSize += 7; // "\0".Length // 214 + 7 = 221 + + int rowsOffset = 24 + (5 * dt.Fields.Count); + int stringTableOffset = rowsOffset; + short rowWidth = 0; + for (int i = 0; i < dt.Fields.Count; i++) + { + tableSize += (dt.Fields[i].Name.Length + 1); + if (columnStorageTypes[i] == UTFColumnStorageType.Constant) + { + int l = GetLengthForDataType(columnDataTypes[i]); + tableSize += l; + stringTableOffset += l; + rowsOffset += l; + + if (columnDataTypes[i] == UTFColumnDataType.String) + { + tableSize += ((string)dt.Fields[i].Value).Length + 1; + } + } + else if (columnStorageTypes[i] == UTFColumnStorageType.PerRow) + { + rowWidth += GetLengthForDataType(columnDataTypes[i]); + } + } + + for (int i = 0; i < dt.Records.Count; i++) + { + for (int j = 0; j < dt.Records[i].Fields.Count; j++) + { + if (columnStorageTypes[j] == UTFColumnStorageType.PerRow) + { + tableSize += GetLengthForDataType(columnDataTypes[j]); + stringTableOffset += GetLengthForDataType(columnDataTypes[j]); + if (columnDataTypes[j] == UTFColumnDataType.String) + { + tableSize += ((string)dt.Records[i].Fields[j].Value).Length + 1; + } + } + } + } + + // this is off... always at the same offset too (CpkTocInfo - 0x818 in cpk files) + // tableSize += 8; // this is correct, but, CpkFileBuilder chokes, unless it is omitted + tableSize = tableSize.Align(8); + + bw.WriteInt32(tableSize); + bw.WriteInt32(rowsOffset); + bw.WriteInt32(stringTableOffset); + bw.WriteInt32(tableSize); // data offset - same as table size? + bw.WriteUInt32(7); // "\0".Length + bw.WriteInt16((short)dt.Fields.Count); // 0023 + bw.WriteInt16(rowWidth); // 007e + bw.WriteInt32(dt.Records.Count); // 00000001 + + int columnNameOffset = (int)8 + (int)dt.Name.Length; // add space for "\0" string and dt.Name + 1 + + List stringTable = new List(); + stringTable.Add(""); + stringTable.Add(dt.Name); + for (int i = 0; i < dt.Fields.Count; i++) + { + byte schema = 0; + schema |= (byte)((byte)columnStorageTypes[i] | (byte)columnDataTypes[i]); + + bw.WriteByte(schema); + bw.WriteInt32(columnNameOffset); + + columnNameOffset += dt.Fields[i].Name.Length + 1; + stringTable.Add(dt.Fields[i].Name); + + if (columnStorageTypes[i] == UTFColumnStorageType.Constant) + { + WriteValue(bw, dt.Fields[i].Value, columnDataTypes[i], stringTable); + if (columnDataTypes[i] == UTFColumnDataType.String) + { + columnNameOffset += ((string)dt.Fields[i].Value).Length + 1; + } + } + } + + for (int i = 0; i < dt.Records.Count; i++) + { + for (int j = 0; j < dt.Fields.Count; j++) + { + if (columnStorageTypes[j] == UTFColumnStorageType.PerRow) + { + WriteValue(bw, dt.Records[i].Fields[j].Value, stringTable); + } + } + } + + for (int i = 0; i < stringTable.Count; i++) + { + bw.WriteNullTerminatedString(stringTable[i]); + } + + bw.Align(8); + } + + public static short GetLengthForDataType(UTFColumnDataType columnDataType) + { + switch (columnDataType) + { + case UTFColumnDataType.String: + { + return 4; + } + case UTFColumnDataType.Data: + case UTFColumnDataType.Long: + case UTFColumnDataType.Long2: + { + return 8; + } + case UTFColumnDataType.Byte: + case UTFColumnDataType.Byte2: + { + return 1; + } + case UTFColumnDataType.Float: + case UTFColumnDataType.Int: + case UTFColumnDataType.Int2: + { + return 4; + } + case UTFColumnDataType.Short: + case UTFColumnDataType.Short2: + { + return 2; + } + } + throw new NotImplementedException(); + } + + private void WriteValue(Writer bw, object value, List stringTable) + { + if (value is string) + { + WriteValue(bw, value, UTFColumnDataType.String, stringTable); + } + else if (value is byte[]) + { + WriteValue(bw, value, UTFColumnDataType.Data, stringTable); + } + else if (value is long || value is ulong) + { + WriteValue(bw, value, UTFColumnDataType.Long, stringTable); + } + else if (value is int || value is uint) + { + WriteValue(bw, value, UTFColumnDataType.Int, stringTable); + } + else if (value is short || value is ushort) + { + WriteValue(bw, value, UTFColumnDataType.Short, stringTable); + } + else if (value is byte || value is byte) + { + WriteValue(bw, value, UTFColumnDataType.Byte, stringTable); + } + else if (value is float) + { + WriteValue(bw, value, UTFColumnDataType.Float, stringTable); + } + } + + private void WriteValue(Writer bw, object value, UTFColumnDataType columnDataType, List stringTable) + { + switch (columnDataType) + { + case UTFColumnDataType.String: + { + string str = (string)value; + if (stringTable.Contains(str)) + { + bw.WriteUInt32((uint)stringTable.GetItemOffset(stringTable.IndexOf(str), 1)); + } + else + { + stringTable.Add(str); + bw.WriteUInt32((uint)stringTable.GetItemOffset(stringTable.Count - 1, 1)); + } + break; + } + case UTFColumnDataType.Data: + { + uint varDataOffset = 0; + uint varDataSize = 0; + bw.WriteUInt32(varDataOffset); + bw.WriteUInt32(varDataSize); + break; + } + case UTFColumnDataType.Long: + case UTFColumnDataType.Long2: + { + if (value is ulong) + { + bw.WriteUInt64((ulong)value); + } + else + { + bw.WriteInt64((long)value); + } + break; + } + case UTFColumnDataType.Int: + case UTFColumnDataType.Int2: + { + if (value is uint) + { + bw.WriteUInt32((uint)value); + } + else + { + bw.WriteInt32((int)value); + } + break; + } + case UTFColumnDataType.Short: + case UTFColumnDataType.Short2: + { + if (value is ushort) + { + bw.WriteUInt16((ushort)value); + } + else + { + bw.WriteInt16((short)value); + } + break; + } + case UTFColumnDataType.Float: + { + bw.WriteSingle((float)value); + break; + } + case UTFColumnDataType.Byte: + case UTFColumnDataType.Byte2: + { + if (value is byte) + { + bw.WriteByte((byte)value); + } + else + { + bw.WriteSByte((sbyte)value); + } + break; + } + } + } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSDataFormat.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSDataFormat.cs new file mode 100644 index 0000000..e95a3fa --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSDataFormat.cs @@ -0,0 +1,293 @@ +// +// AFSDataFormat.cs - COMPLETED - implementation of CRI Middleware AFS archive +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2024 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.AFS; + +using MBS.Core; + +using MBS.Editor.Core; +using MBS.Editor.Core.Hosting; +using MBS.Editor.Core.ObjectModels.FileSystem; + +using MBS.Editor.Core.IO; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +/// +/// A for loading and saving archives in CRI Middleware AFS/AWB/ACB format. +/// +public class AFSDataFormat : DataFormat +{ + /* + private static DataFormatReference _dfr; + /// + /// Creates a containing metadata about the . + /// + /// The which contains metadata about the . + protected override DataFormatReference MakeReferenceInternal() + { + if (_dfr == null) + { + _dfr = base.MakeReferenceInternal(); + _dfr.Capabilities.Add(typeof(FileSystemObjectModel), DataFormatCapabilities.All); + } + return _dfr; + } + */ + + /// + /// Gets or sets the version of AFS archive to read or write. Defaults to ('AFS\0'). + /// + /// The version of AFS archive to read or write. + public AFSFormatVersion FormatVersion { get; set; } = AFSFormatVersion.AFS0; + + /// + /// When true, and the underlying is a , + /// if the name of the file ends in ".awb", will automatically set to + /// . + /// + /// + public bool AutoDetectFormatVersion { get; set; } = true; + + /// + /// Loads the data from the input . + /// + /// A into which to load archive content. + protected override void LoadInternal(ObjectModel objectModel, Stream stream) + { + FileSystemObjectModel fsom = (objectModel as FileSystemObjectModel); + if (fsom == null) + throw new ObjectModelNotSupportedException(); + + string? filename = null; + if (stream is FileStream) + { + filename = ((FileStream)stream).Name; + } + + Reader reader = new Reader(stream); + string afs = reader.ReadFixedLengthString(4); + + switch (afs) + { + case "AFS\0": + { + FormatVersion = AFSFormatVersion.AFS0; + + uint fileCount = reader.ReadUInt32(); + AFSFileInfo[] fileinfos = new AFSFileInfo[fileCount]; + + for (int i = 0; i < fileCount; i++) + { + fileinfos[i].offset = reader.ReadUInt32(); + fileinfos[i].length = reader.ReadUInt32(); + } + + uint tocOffset = reader.ReadUInt32(); + uint tocLength = reader.ReadUInt32(); + + if (tocOffset == 0) + { + (Application.Instance as IHostApplication)?.HostServices.Messages.Add(HostApplicationMessageSeverity.Warning, "table of contents not found", filename); + for (int j = 0; j < fileCount; j++) + { + fileinfos[j].name = String.Format("file_{0}", j); + FileSystemFile f = fsom.Items.AddFile(fileinfos[j].name); + f.Source = new EmbeddedFileSource(stream, fileinfos[j].offset, fileinfos[j].length); + } + } + else + { + reader.BaseStream.Seek(tocOffset, SeekOrigin.Begin); + for (int j = 0; j < fileCount; j++) + { + fileinfos[j].name = reader.ReadFixedLengthString(32).TrimNull(); + + ushort year = reader.ReadUInt16(); + ushort month = reader.ReadUInt16(); + ushort day = reader.ReadUInt16(); + ushort hour = reader.ReadUInt16(); + ushort minute = reader.ReadUInt16(); + ushort second = reader.ReadUInt16(); + fileinfos[j].datetime = new DateTime(year, month, day, hour, minute, second); + fileinfos[j].length2 = reader.ReadUInt32(); + + if (fileinfos[j].length2 != fileinfos[j].length) + { + (Application.Instance as IHostApplication)?.HostServices.Messages.Add(HostApplicationMessageSeverity.Warning, String.Format("length != length2 for file '{0}'", fileinfos[j].name), filename); + } + + FileSystemFile f = fsom.Items.AddFile(fileinfos[j].name); + f.Source = new EmbeddedFileSource(stream, fileinfos[j].offset, fileinfos[j].length); + f.ModificationTimestamp = fileinfos[j].datetime; + } + } + break; + } + case "AFS2": + { + FormatVersion = AFSFormatVersion.AFS2; + uint unknown1 = reader.ReadUInt32(); + + uint fileCount = reader.ReadUInt32(); + AFSFileInfo[] fileinfos = new AFSFileInfo[fileCount]; + + uint unknown2 = reader.ReadUInt32(); + for (uint i = 0; i < fileCount; i++) + { + ushort index = reader.ReadUInt16(); + } + for (uint i = 0; i < fileCount; i++) + { + fileinfos[i].offset = reader.ReadUInt32(); + fileinfos[i].offset = fileinfos[i].offset.Align(0x10); // does not affect 6 and 1 in v_etc_streamfiles.awb; idk why + if (i > 0) + { + fileinfos[i - 1].length = fileinfos[i].offset - fileinfos[i - 1].offset; + } + } + + uint totalArchiveSize = reader.ReadUInt32(); + fileinfos[fileinfos.Length - 1].length = totalArchiveSize - fileinfos[fileinfos.Length - 1].offset; + + ushort unknown4 = reader.ReadUInt16(); + + for (uint i = 0; i < fileinfos.Length; i++) + { + FileSystemFile f = fsom.Items.AddFile(i.ToString().PadLeft(8, '0')); + f.Source = new EmbeddedFileSource(stream, fileinfos[i].offset, fileinfos[i].length); + } + break; + } + default: + { + throw new InvalidDataFormatException("file does not begin with \"AFS\\0\""); + } + } + } + + /// + /// Writes the data to the output . + /// + /// A containing the archive content to write. + protected override void SaveInternal(ObjectModel objectModel, Stream stream) + { + FileSystemObjectModel? fsom = objectModel as FileSystemObjectModel; + if (fsom == null) + throw new ObjectModelNotSupportedException(); + + if (stream is FileStream fs && AutoDetectFormatVersion) + { + if (fs.Name.ToLower().EndsWith(".awb")) + { + FormatVersion = AFSFormatVersion.AFS2; + } + } + + Writer writer = new Writer(stream); + FileSystemFile[] files = fsom.Items.GetFiles(); + + if (FormatVersion == AFSFormatVersion.AFS0) + { + writer.WriteFixedLengthString("AFS\0"); + + uint filecount = (uint)files.LongLength; + writer.WriteUInt32(filecount); + + uint offset = 8; + offset += (8 * filecount); // offset + size + offset += 8; // tocoffset + unknown1 + + uint[] offsets = new uint[(filecount * 2) + 1]; + offsets[0] = filecount; + + for (int i = 0; i < filecount; i++) + { + offset = offset.Align(2048); // align to 2048 byte boundary + + offsets[(i * 2) + 1] = offset; + offsets[(i * 2) + 2] = (uint)files[i].Size; + + writer.WriteUInt32(offset); + writer.WriteUInt32((uint)files[i].Size); + + offset += (uint)files[i].Size; + } + + offset = offset.Align(2048); + uint tocOffset = offset; + uint tocLength = (uint)(48 * files.Length); + writer.WriteUInt32(tocOffset); + writer.WriteUInt32(tocLength); + + // now we should be at file data + for (int i = 0; i < filecount; i++) + { + writer.Align(2048); + writer.WriteBytes(files[i].Source?.GetData()); + } + + // now we should be at the TOC + writer.Align(2048); + for (int j = 0; j < filecount; j++) + { + writer.WriteFixedLengthString(files[j].Name, 32); + + DateTime ts = files[j].ModificationTimestamp.GetValueOrDefault(DateTime.Now); + writer.WriteUInt16((ushort)ts.Year); + writer.WriteUInt16((ushort)ts.Month); + writer.WriteUInt16((ushort)ts.Day); + writer.WriteUInt16((ushort)ts.Hour); + writer.WriteUInt16((ushort)ts.Minute); + writer.WriteUInt16((ushort)ts.Second); + writer.WriteUInt32((uint)offsets[j]); + } + + writer.Align(2048); + } + else if (FormatVersion == AFSFormatVersion.AFS2) + { + writer.WriteFixedLengthString("AFS2"); + + writer.WriteUInt32(0); //unknown1 + writer.WriteUInt32((uint)files.Length); + writer.WriteUInt32(32); // unknown2 + for (uint i = 0; i < files.Length; i++) + { + writer.WriteUInt16((ushort)i); + } + + uint offset = (uint)(20 + (files.Length * 6)); + for (uint i = 0; i < files.Length; i++) + { + writer.WriteUInt32(offset); + offset += (uint) files[i].Size; + } + + writer.WriteUInt32(offset); // total archive size + + for (uint i = 0; i < files.Length; i++) + { + writer.Align(16); + writer.WriteBytes(files[i].Source?.GetData()); + } + } + } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFileInfo.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFileInfo.cs new file mode 100644 index 0000000..d61bdfe --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFileInfo.cs @@ -0,0 +1,41 @@ +// +// AFSFileInfo.cs - internal structure representing metadata for files in an AFS archive +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.AFS; + +using System; + +/// +/// Internal structure representing metadata for files in an AFS archive. +/// +internal struct AFSFileInfo +{ + public string name; + public uint offset; + public DateTime datetime; + public uint length; + public uint length2; + + public override string ToString() + { + return String.Format("{0} : {1} [{2}]", name, offset, length); + } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFormatVersion.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFormatVersion.cs new file mode 100644 index 0000000..4b0a8c1 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/AFS/AFSFormatVersion.cs @@ -0,0 +1,37 @@ +// +// AFSFormatVersion.cs - the version of AFS archive being handled by an AFSDataFormat instance +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.AFS; + +/// +/// The version of AFS archive being handled by an instance. +/// +public enum AFSFormatVersion +{ + /// + /// Older version of AFS, which stores file data and TOC information in the same AFS file. + /// + AFS0, + /// + /// Newer version of AFS, which stores file data in an AWB file and writes the TOC to a separate ACB file. + /// + AFS2 +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKCompressionModule.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKCompressionModule.cs new file mode 100644 index 0000000..59b3c30 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKCompressionModule.cs @@ -0,0 +1,193 @@ +using MBS.Editor.Core.Compression; +using MBS.Editor.Core.IO; + +namespace MBS.Editor.Plugins.CRI; + +public class CPKCompressionModule : CompressionModule +{ + private ushort get_next_bits(byte[] input, ref int offset_p, ref byte bit_pool_p, ref int bits_left_p, int bit_count) + { + ushort out_bits = 0; + int num_bits_produced = 0; + int bits_this_round; + + while (num_bits_produced < bit_count) + { + if (bits_left_p == 0) + { + bit_pool_p = input[offset_p]; + bits_left_p = 8; + offset_p--; + } + + if (bits_left_p > (bit_count - num_bits_produced)) + bits_this_round = bit_count - num_bits_produced; + else + bits_this_round = bits_left_p; + + out_bits <<= bits_this_round; + + out_bits |= (ushort)((ushort)(bit_pool_p >> (bits_left_p - bits_this_round)) & ((1 << bits_this_round) - 1)); + + bits_left_p -= bits_this_round; + num_bits_produced += bits_this_round; + } + + return out_bits; + } + + protected override void CompressInternal(Stream inputStream, Stream outputStream) + { + } + protected override void DecompressInternal(Stream inputStream, Stream outputStream) + { + Reader r = new Reader(inputStream); + byte[] input = r.ReadToEnd(); + byte[] output = DecompressCRILAYLA(input); + outputStream.Write(output, 0, output.Length); + } + + public byte[] DecompressCRILAYLA(byte[] input) + { + byte[] result; + + MemoryStream ms = new MemoryStream(input); + Reader br = new Reader(ms); + br.Endianness = Endianness.LittleEndian; + + br.BaseStream.Seek(8, SeekOrigin.Begin); // Skip CRILAYLA + int uncompressed_size = br.ReadInt32(); + int uncompressed_header_offset = br.ReadInt32(); + + result = new byte[uncompressed_size + 0x100]; + + // do some error checks here......... + + // copy uncompressed 0x100 header to start of file + Console.WriteLine("copy from input: {0}, 0, 0x100", uncompressed_header_offset + 0x10); + Array.Copy(input, uncompressed_header_offset + 0x10, result, 0, 0x100); + + int input_end = input.Length - 0x100 - 1; + int input_offset = input_end; + int output_end = 0x100 + uncompressed_size - 1; + byte bit_pool = 0; + int bits_left = 0, bytes_output = 0; + int[] vle_lens = new int[4] { 2, 3, 5, 8 }; + + while (bytes_output < uncompressed_size) + { + if (get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 1) > 0) + { + int backreference_offset = output_end - bytes_output + get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 13) + 3; + int backreference_length = 3; + int vle_level; + + for (vle_level = 0; vle_level < vle_lens.Length; vle_level++) + { + int this_level = get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, vle_lens[vle_level]); + backreference_length += this_level; + if (this_level != ((1 << vle_lens[vle_level]) - 1)) break; + } + + if (vle_level == vle_lens.Length) + { + int this_level; + do + { + this_level = get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 8); + backreference_length += this_level; + } while (this_level == 255); + } + + for (int i = 0; i < backreference_length; i++) + { + result[output_end - bytes_output] = result[backreference_offset--]; + bytes_output++; + } + } + else + { + // verbatim byte + result[output_end - bytes_output] = (byte)get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 8); + bytes_output++; + } + } + + br.Close(); + ms.Close(); + + return result; + } + + public byte[] DecompressLegacyCRI(byte[] input, int USize) + { + byte[] result;// = new byte[USize]; + + MemoryStream ms = new MemoryStream(input); + Reader br = new Reader(ms); + br.Endianness = Endianness.BigEndian; + + br.BaseStream.Seek(8, SeekOrigin.Begin); // Skip CRILAYLA + int uncompressed_size = br.ReadInt32(); + int uncompressed_header_offset = br.ReadInt32(); + + result = new byte[uncompressed_size + 0x100]; + + // do some error checks here......... + + // copy uncompressed 0x100 header to start of file + Array.Copy(input, uncompressed_header_offset + 0x10, result, 0, 0x100); + + int input_end = input.Length - 0x100 - 1; + int input_offset = input_end; + int output_end = 0x100 + uncompressed_size - 1; + byte bit_pool = 0; + int bits_left = 0, bytes_output = 0; + int[] vle_lens = new int[4] { 2, 3, 5, 8 }; + + while (bytes_output < uncompressed_size) + { + if (get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 1) > 0) + { + int backreference_offset = output_end - bytes_output + get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 13) + 3; + int backreference_length = 3; + int vle_level; + + for (vle_level = 0; vle_level < vle_lens.Length; vle_level++) + { + int this_level = get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, vle_lens[vle_level]); + backreference_length += this_level; + if (this_level != ((1 << vle_lens[vle_level]) - 1)) break; + } + + if (vle_level == vle_lens.Length) + { + int this_level; + do + { + this_level = get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 8); + backreference_length += this_level; + } while (this_level == 255); + } + + for (int i = 0; i < backreference_length; i++) + { + result[output_end - bytes_output] = result[backreference_offset--]; + bytes_output++; + } + } + else + { + // verbatim byte + result[output_end - bytes_output] = (byte)get_next_bits(input, ref input_offset, ref bit_pool, ref bits_left, 8); + bytes_output++; + } + } + + br.Close(); + ms.Close(); + + return result; + } + +} \ No newline at end of file diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKDataFormat.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKDataFormat.cs new file mode 100644 index 0000000..98d68ca --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKDataFormat.cs @@ -0,0 +1,897 @@ +// +// CPKDataFormat.cs - implementation of CRI Middleware CPK archive +// +// Author: +// Michael Becker +// +// Copyright (c) 2019-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.CPK; + +using System; +using System.Collections.Generic; +using MBS.Core; +using MBS.Core.Settings; + +using MBS.Editor.Core; +using MBS.Editor.Core.IO; +using MBS.Editor.Core.ObjectModels.Database; +using MBS.Editor.Core.ObjectModels.FileSystem; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; +using MBS.Editor.Plugins.CRI.DataFormats.Database.UTF; + +/// +/// A for loading and saving archives in CRI Middleware CPK format. +/// +public class CPKDataFormat : DataFormat +{ + private static DataFormatMetadata _dfr; + public static DataFormatMetadata Metadata + { + get + { + if (_dfr == null) + { + _dfr = new DataFormatMetadata(); + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new ChoiceSetting(nameof(Mode), "File access _method", CPKFileMode.IDFilename, new ChoiceSetting.ChoiceSettingValue[] + { + new ChoiceSetting.ChoiceSettingValue("IDOnly", "ID only", CPKFileMode.IDOnly), + new ChoiceSetting.ChoiceSettingValue("FilenameOnly", "Filename only", CPKFileMode.FilenameOnly), + new ChoiceSetting.ChoiceSettingValue("IDFilename", "ID + Filename", CPKFileMode.IDFilename), + new ChoiceSetting.ChoiceSettingValue("FilenameGroup", "Filename + Group (Attribute)", CPKFileMode.FilenameGroup), + new ChoiceSetting.ChoiceSettingValue("IDGroup", "ID + Group (Attribute)", CPKFileMode.IDGroup), + new ChoiceSetting.ChoiceSettingValue("FilenameIDGroup", "Filename + ID + Group (Attribute)", CPKFileMode.FilenameIDGroup) + }) + { Description = "Choose the method by which files should be accessed in the resulting archive." }); + + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new TextSetting(nameof(VersionString), "_Version string", "CPKMC2.14.00, DLL2.74.00") + { Description = "Override the version string written by the creator program." }); + + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new RangeSetting(nameof(SectorAlignment), "Sector _alignment", 2048, 0, 2048) + { Description = "Choose the alignment for each file in the resulting archive (between 1 and 2048 in powers of 2)." }); + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new BooleanSetting(nameof(ScrambleDirectoryInformation), "_Scramble directory information") + { Description = "Encrypt the directory information in the resulting archive (contents are NOT encrypted)." }); + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new BooleanSetting(nameof(ForceCompression), "_Force compression") + { Description = "Attempt to compress all files in the archive regardless of individual file compression setting." }); + _dfr.ExportSettings.SettingsGroups[0].Settings.Add(new BooleanSetting(nameof(ForceCompression), "Include _CRC information") + { Description = "Generate CRC checksum information (about 4 additional bytes per file) for each file." }); + + _dfr.ExportSettings.SettingsGroups.Add(new SettingsGroup("CPK File Setting", new Setting[] + { + new BooleanSetting("TopTocInformation", "Write TOC at beginning of file", false) + { Description = "Media with slow seek time may be able to load the information fast, but may take up more space." }, + new BooleanSetting("RandomDataPadding", "_Random data padding", false) + { Description = "Pad file contents with random data instead of zero byte." }, + new BooleanSetting("RemoveLocalInfo", "Do not write _local filename information", false) + { Description = "When enabled, new files cannot be added." }, + new BooleanSetting("RemoveTimestampInfo", "Do not write _timestamp information", false) + { Description = "When enabled, data cannot be added." } + })); + } + return _dfr; + } + } + + public CPKFileMode Mode { get; set; } = CPKFileMode.FilenameOnly; + + /// + /// Gets or sets the version string which contains information about the library which created the archive. The default value is "CPKMC2.14.00, DLL2.74.00" which is the version string that official CRI Middleware CPK tools use. + /// + /// The version string. + public string VersionString { get; set; } = "CPKMC2.14.00, DLL2.74.00"; // "CPKFBSTD1.49.34, DLL3.24.00" + + public bool ScrambleDirectoryInformation { get; set; } = false; + public bool ForceCompression { get; set; } = false; + + /// + /// Gets or sets the sector alignment, in bytes, of the CPK archive. The default value is 2048. + /// + /// The file alignment. + public int SectorAlignment { get; set; } = 2048; + + // these are mainly for the benefit of the CRI Extensions for FileSystemEditor + + private byte[] _HeaderData = null; + /// + /// Returns the raw data from the initial "CPK " chunk of this file, or if this chunk does not exist. + /// + /// The raw data from the "CPK " chunk. + public byte[] HeaderData { get { return _HeaderData; } } + /// + /// Returns the UTF table from the initial "CPK " chunk of this file, or if this chunk does not exist. + /// + /// The header table. + public DatabaseTable HeaderTable { get; private set; } = null; + + private byte[] _TocData = null; + /// + /// Returns the raw data from the "TOC " chunk of this file, or NULL if this chunk does not exist. + /// + /// The raw data from the "TOC " chunk. + public byte[] TocData { get { return _TocData; } } + + private byte[] _ITocData = null; + /// + /// Returns the raw data from the "ITOC" chunk of this file, or NULL if this chunk does not exist. + /// + /// The raw data from the "ITOC" chunk. + public byte[] ITocData { get { return _ITocData; } } + + private byte[] _GTocData = null; + /// + /// Returns the raw data from the "GTOC" chunk of this file, or NULL if this chunk does not exist. + /// + /// The raw data from the "GTOC" chunk. + public byte[] GTocData { get { return _GTocData; } } + + private byte[] _ETocData = null; + /// + /// Returns the raw data from the final "ETOC" chunk of this file, or NULL if this chunk does not exist. + /// + /// The raw data from the "ETOC" chunk. + public byte[] ETocData { get { return _ETocData; } } + + /// + /// Loads the data from the input . + /// + /// A into which to load archive content. + protected override void LoadInternal(ObjectModel objectModel, Stream stream) + { + FileSystemObjectModel? fsom = objectModel as FileSystemObjectModel; + if (fsom == null) + throw new ObjectModelNotSupportedException(); + + fsom.CustomDetails.Add("CRI.CPK.FileID", "ID"); + fsom.CustomDetails.Add("CRI.CPK.CRC", "CRC"); + + Reader br = new Reader(stream); + + // Rebuilt based on cpk_unpack + // Rebuilt AGAIN based on github.com/esperknight/CriPakTools + DatabaseObjectModel utf_om = ReadUTF("CPK ", br, out _HeaderData); + int utf_checksum = br.ReadInt32(); // maybe checksum? + + DatabaseTable dtUTF = utf_om.Tables[0]; + HeaderTable = dtUTF; + if (objectModel is DatabaseObjectModel) + { + (objectModel as DatabaseObjectModel).Tables.Add(dtUTF); + } + + DatabaseTable dtUTFTOC = null, dtUTFITOC = null, dtUTFITOC_L = null, dtUTFITOC_H = null, dtUTFETOC = null; + + if (dtUTF.Records[0].Fields["CpkMode"]?.Value != null) + { + Mode = (CPKFileMode)(uint)(dtUTF.Records[0].Fields["CpkMode"].Value); + } + if (dtUTF.Records[0].Fields["Tvers"].Value != null) + { + VersionString = dtUTF.Records[0].Fields["Tvers"].Value.ToString(); + } + if (dtUTF.Records[0].Fields["CrcTable"]?.Value != null) + { + + } + + // UTF table parsing works now, so no need to hardcode toc offset - WOOHOO!!! + if (dtUTF.Records[0].Fields["TocOffset"].Value != null) + { + ulong tocOffset = (ulong)dtUTF.Records[0].Fields["TocOffset"].Value; + br.BaseStream.Seek((long)tocOffset, SeekOrigin.Begin); + + utf_om = ReadUTF("TOC ", br, out _TocData); + + dtUTFTOC = utf_om.Tables[0]; + } + if (dtUTF.Records[0].Fields["ItocOffset"].Value != null) + { + // Index TOC + ulong itocOffset = (ulong)dtUTF.Records[0].Fields["ItocOffset"].Value; + br.BaseStream.Seek((long)itocOffset, SeekOrigin.Begin); + + utf_om = ReadUTF("ITOC", br, out _ITocData); + + dtUTFITOC = utf_om.Tables[0]; + + byte[] dtUTFITOC_L_data = (dtUTFITOC.Records[0].Fields["DataL"]?.Value as byte[]); + byte[] dtUTFITOC_H_data = (dtUTFITOC.Records[0].Fields["DataH"]?.Value as byte[]); + + if (dtUTFITOC_L_data != null) + { + DatabaseObjectModel _lutfom = new DatabaseObjectModel(); + UTFDataFormat _lutfdf = new UTFDataFormat(); + Document.Load(_lutfom, _lutfdf, new MemoryStream(dtUTFITOC_L_data)); + dtUTFITOC_L = _lutfom.Tables[0]; + } + if (dtUTFITOC_H_data != null) + { + DatabaseObjectModel _lutfom = new DatabaseObjectModel(); + UTFDataFormat _lutfdf = new UTFDataFormat(); + Document.Load(_lutfom, _lutfdf, new MemoryStream(dtUTFITOC_H_data)); + dtUTFITOC_H = _lutfom.Tables[0]; + } + } + if (dtUTF.Records[0].Fields["GtocOffset"].Value != null) + { + // Groups TOC + ulong gtocOffset = (ulong)dtUTF.Records[0].Fields["GtocOffset"].Value; + br.BaseStream.Seek((long)gtocOffset, SeekOrigin.Begin); + + utf_om = ReadUTF("GTOC", br, out _GTocData); + + DatabaseTable dtUTFGTOC = utf_om.Tables[0]; + + DatabaseTable dtCpkGtocAttr = utf_om.Tables["CpkGtocAttr"]; + List listAttribs = new List(); + if (dtCpkGtocAttr != null) + { + for (int i = 0; i < dtCpkGtocAttr.Records.Count; i++) + { + string attribName = dtCpkGtocAttr.Records[i].Fields["Aname"]?.Value?.ToString(); + listAttribs.Add(attribName); + } + } + } + + if (dtUTFTOC != null) + { + if (objectModel is DatabaseObjectModel) + { + (objectModel as DatabaseObjectModel).Tables.Add(dtUTFTOC); + } + else if (objectModel is FileSystemObjectModel) + { + for (int i = 0; i < dtUTFTOC.Records.Count; i++) + { + string dirName = (string)dtUTFTOC.Records[i].Fields["DirName"].Value; + string fileTitle = (string)dtUTFTOC.Records[i].Fields["FileName"].Value; + string fileName = fileTitle; + if (!String.IsNullOrEmpty(dirName)) + { + fileName = dirName + '/' + fileTitle; + } + + uint compressedLength = (uint)dtUTFTOC.Records[i].Fields["FileSize"].Value; + uint decompressedLength = (uint)dtUTFTOC.Records[i].Fields["ExtractSize"].Value; + ulong offset = (ulong)dtUTFTOC.Records[i].Fields["FileOffset"].Value; + + ulong lTocOffset = (ulong)dtUTF.Records[0].Fields["TocOffset"].Value; + ulong lContentOffset = (ulong)dtUTF.Records[0].Fields["ContentOffset"].Value; + + // HACK: according to kamikat cpk tools, the real content offset is whichever is smaller TocOffset vs ContentOffset + // https://github.com/kamikat/cpktools/blob/master/cpkunpack.py + // this feels EXTREMELY hacky, but it works... for now + ulong lRealContentOffset = Math.Min(lTocOffset, lContentOffset); + + offset += lContentOffset; + + FileSystemFile f = fsom.Items.AddFile(fileName); + + if (dtUTFTOC.Records[i].Fields["ID"] != null) + { + uint id = (uint)dtUTFTOC.Records[i].Fields["ID"].Value; + f.CustomDetails["CRI.CPK.FileID"] = id; + } + if (dtUTFTOC.Records[i].Fields["CRC"] != null) + { + f.CustomDetails["CRI.CPK.CRC"] = ((uint)dtUTFTOC.Records[i].Fields["CRC"].Value).ToString("x"); + } + f.Source = new CompressedEmbeddedFileSource(stream, (long)lContentOffset, compressedLength, decompressedLength); + if (compressedLength != decompressedLength) + { + ((CompressedEmbeddedFileSource)f.Source).CompressionModule = new CPKCompressionModule(); + } + } + } + } + else if (dtUTFITOC_L != null || dtUTFITOC_H != null) + { + ulong lContentOffset = (ulong)dtUTF.Records[0].Fields["ContentOffset"].Value; + ulong offset = lContentOffset; + + List list = new List(); + if (dtUTFITOC_L != null) + { + for (int i = 0; i < dtUTFITOC_L.Records.Count; i++) + { + ushort decompressedLength = (ushort)dtUTFITOC_L.Records[i].Fields["FileSize"].Value; + ushort compressedLength = (ushort)dtUTFITOC_L.Records[i].Fields["ExtractSize"].Value; + + FileSystemFile f = new FileSystemFile(); + + ushort id = (ushort)dtUTFITOC_L.Records[i].Fields["ID"].Value; + f.Name = id.ToString(); + f.CustomDetails["CRI.CPK.FileID"] = id; + + f.Source = new CompressedEmbeddedFileSource(stream, (long)offset, compressedLength, decompressedLength); + list.Add(f); + } + } + if (dtUTFITOC_H != null) + { + for (int i = 0; i < dtUTFITOC_H.Records.Count; i++) + { + uint decompressedLength = (uint)dtUTFITOC_H.Records[i].Fields["FileSize"].Value; + uint compressedLength = (uint)dtUTFITOC_H.Records[i].Fields["ExtractSize"].Value; + + FileSystemFile f = new FileSystemFile(); + + ushort id = (ushort)dtUTFITOC_H.Records[i].Fields["ID"].Value; + f.Name = id.ToString(); + f.CustomDetails["CRI.CPK.FileID"] = id; + + f.Source = new CompressedEmbeddedFileSource(stream, 0, compressedLength, decompressedLength); + list.Add(f); + } + } + + // sort them by ID - this is important because the data is stored contiguously + list.Sort(new Comparison((x, y) => ((ushort)x.CustomDetails["CRI.CPK.FileID"]).CompareTo((ushort)y.CustomDetails["CRI.CPK.FileID"]))); + + for (int i = 0; i < list.Count; i++) + { + CompressedEmbeddedFileSource? source = list[i].Source as CompressedEmbeddedFileSource; + if (source != null) + { + source.Offset = (long)offset; + } + + offset += (uint)source.CompressedLength; + offset = offset.Align((ulong)SectorAlignment); + + fsom.Items.Add(list[i]); + } + } + + if (dtUTF.Records[0].Fields["EtocOffset"].Value != null) + { + ulong etocOffset = (ulong)dtUTF.Records[0].Fields["EtocOffset"].Value; + br.BaseStream.Seek((long)etocOffset, SeekOrigin.Begin); + + utf_om = ReadUTF("ETOC", br, out _ETocData); + dtUTFETOC = utf_om.Tables[0]; + } + + if (dtUTFETOC != null) + { + if (objectModel is DatabaseObjectModel) + { + (objectModel as DatabaseObjectModel).Tables.Add(dtUTFETOC); + } + else if (objectModel is FileSystemObjectModel) + { + for (int i = 0; i < dtUTFETOC.Records.Count; i++) + { + ulong updateDateTime = (ulong)dtUTFETOC.Records[i].Fields["UpdateDateTime"].Value; + string localDir = (string)dtUTFETOC.Records[i].Fields["LocalDir"].Value; + + if (i >= fsom.Items.Count) + continue; + + FileSystemFile? f = fsom.Items[i] as FileSystemFile; + if (f != null) + { + byte[] updateDateTimeBytes = BitConverter.GetBytes(updateDateTime); + // remember, the CPK is big-endian, but the UTF is little-endian + ushort updateDateTimeYear = BitConverter.ToUInt16(new byte[] { updateDateTimeBytes[6], updateDateTimeBytes[7] }, 0); + byte updateDateTimeMonth = updateDateTimeBytes[5]; + byte updateDateTimeDay = updateDateTimeBytes[4]; + byte updateDateTimeHour = updateDateTimeBytes[3]; + byte updateDateTimeMinute = updateDateTimeBytes[2]; + byte updateDateTimeSecond = updateDateTimeBytes[1]; + byte updateDateTimeMs = updateDateTimeBytes[0]; + + f.ModificationTimestamp = new DateTime(updateDateTimeYear, updateDateTimeMonth, updateDateTimeDay, updateDateTimeHour, updateDateTimeMinute, updateDateTimeSecond, updateDateTimeMs); + } + } + } + } + } + + private DatabaseObjectModel ReadUTF(string expectedSignature, Reader br, out byte[] data) + { + string tocSignature = br.ReadFixedLengthString(4); + if (tocSignature != expectedSignature) + throw new InvalidDataFormatException(); + + int unknown1 = br.ReadInt32(); // always 255? + + // UTF table for TOC + long utf_size = br.ReadInt64(); // size of UTF including "@UTF" + + byte[] utf_data = br.ReadBytes(utf_size); + + MemoryStream ma = new MemoryStream(utf_data); + Reader r = new Reader(ma); + string utf_signature = r.ReadFixedLengthString(4); + if (utf_signature != "@UTF") + { + ScrambleDirectoryInformation = true; + // encrypted? + utf_data = DecryptUTF(utf_data); + ma = new MemoryStream(utf_data); + r = new Reader(ma); + } + else + { + ma.Seek(-4, SeekOrigin.Current); + } + + UTFDataFormat utf_df = new UTFDataFormat(); + DatabaseObjectModel utf_om = new DatabaseObjectModel(); + Document.Load(utf_om, utf_df, ma); + + data = utf_data; + return utf_om; + } + /* + void f_DataRequest(object sender, DataRequestEventArgs e) + { + File f = (sender as File); + uint decompressedLength = (uint)f.Properties["DecompressedLength"]; + uint compressedLength = (uint)f.Properties["CompressedLength"]; + ulong offset = (ulong)f.Properties["Offset"]; + Reader br = (Reader)f.Properties["Reader"]; + + br.Accessor.Position = (long)offset; + + byte[] decompressedData = null; + if (compressedLength == 0) + { + decompressedData = br.ReadBytes(decompressedLength); + } + else + { + byte[] compressedData = br.ReadBytes(compressedLength); + decompressedData = //compress() // compressedData; + } + + e.Data = decompressedData; + } + */ + + private DatabaseObjectModel BuildHeaderUTF(FileSystemObjectModel fsom, int tocsize, ulong contentOffset, ulong contentSize, ulong etocOffset, ulong etocLength, ulong? itocOffset, ulong? itocLength) + { + FileSystemFile[] files = fsom.Items.GetFiles(); + + DatabaseTable dt = new DatabaseTable(); + dt.Name = "CpkHeader"; + dt.Fields.Add("UpdateDateTime", null, typeof(Int64)); + dt.Fields.Add("FileSize", null, typeof(Int64)); + dt.Fields.Add("ContentOffset", null, typeof(Int64)); + dt.Fields.Add("ContentSize", null, typeof(Int64)); + dt.Fields.Add("TocOffset", null, typeof(Int64)); + dt.Fields.Add("TocSize", null, typeof(Int64)); + dt.Fields.Add("TocCrc", null, typeof(uint)); + + // added in newer version CpkFileBuilder + // dt.Fields.Add("HtocOffset", null, typeof(Int64)); + // dt.Fields.Add("HtocSize", null, typeof(Int64)); + + dt.Fields.Add("EtocOffset", null, typeof(Int64)); + dt.Fields.Add("EtocSize", null, typeof(Int64)); + dt.Fields.Add("ItocOffset", null, typeof(Int64)); + dt.Fields.Add("ItocSize", null, typeof(Int64)); + dt.Fields.Add("ItocCrc", null, typeof(uint)); + dt.Fields.Add("GtocOffset", null, typeof(Int64)); + dt.Fields.Add("GtocSize", null, typeof(Int64)); + dt.Fields.Add("GtocCrc", null, typeof(uint)); + + // added in newer version CpkFileBuilder + // dt.Fields.Add("HgtocOffset", null, typeof(Int64)); + // dt.Fields.Add("HgtocSize", null, typeof(Int64)); + + dt.Fields.Add("EnabledPackedSize", null, typeof(Int64)); + dt.Fields.Add("EnabledDataSize", null, typeof(Int64)); + dt.Fields.Add("TotalDataSize", null, typeof(Int64)); + dt.Fields.Add("Tocs", null, typeof(uint)); + dt.Fields.Add("Files", null, typeof(uint)); + dt.Fields.Add("Groups", null, typeof(uint)); + dt.Fields.Add("Attrs", null, typeof(uint)); + dt.Fields.Add("TotalFiles", null, typeof(uint)); + dt.Fields.Add("Directories", null, typeof(uint)); + dt.Fields.Add("Updates", null, typeof(uint)); + dt.Fields.Add("Version", null, typeof(Int16)); + dt.Fields.Add("Revision", null, typeof(Int16)); + dt.Fields.Add("Align", null, typeof(Int16)); + dt.Fields.Add("Sorted", null, typeof(Int16)); + + // added in newer version CpkFileBuilder + // dt.Fields.Add("EnableFileName", null, typeof(Int16)); + + dt.Fields.Add("EID", null, typeof(Int16)); + dt.Fields.Add("CpkMode", null, typeof(uint)); + dt.Fields.Add("Tvers", null, typeof(string)); + dt.Fields.Add("Comment", null, typeof(string)); + dt.Fields.Add("Codec", null, typeof(uint)); + dt.Fields.Add("DpkItoc", null, typeof(uint)); + + //added in newer version CpkFileBuilder + // dt.Fields.Add("EnableTocCrc", null, typeof(Int16)); + // dt.Fields.Add("EnableFileCrc", null, typeof(Int16)); + // dt.Fields.Add("CrcMode", null, typeof(uint)); + // dt.Fields.Add("CrcTable", null, typeof(byte[])); + + // cri, go home, you're drunk + ulong enabledPackedSize = 0; + for (uint i = 0; i < files.Length; i++) + { + enabledPackedSize += (ulong) files[i].Size; + } + enabledPackedSize *= 2; + + ulong enabledDataSize = enabledPackedSize; + + dt.Records.Add(new DatabaseRecord(new DatabaseField[] + { + new DatabaseField("UpdateDateTime", (ulong)1), + new DatabaseField("FileSize", null), + new DatabaseField("ContentOffset", contentOffset), // 18432 , should be 20480 + new DatabaseField("ContentSize", contentSize), // 8217472, should be 8564736 (347264 difference!) + new DatabaseField("TocOffset", (ulong)SectorAlignment), + new DatabaseField("TocSize", (ulong)tocsize), + new DatabaseField("TocCrc", null), + + // added in newer version CpkFileBuilder + // new DatabaseField("HtocOffset", 0), + // new DatabaseField("HtocSize", 0), + + new DatabaseField("EtocOffset", etocOffset), + new DatabaseField("EtocSize", etocLength), + new DatabaseField("ItocOffset", itocOffset), + new DatabaseField("ItocSize", itocLength), + new DatabaseField("ItocCrc", null), + new DatabaseField("GtocOffset", null), + new DatabaseField("GtocSize", null), + new DatabaseField("GtocCrc", null), + + // added in newer version CpkFileBuilder + // new DatabaseField("HgtocOffset", null), + // new DatabaseField("HgtocSize", null), + + new DatabaseField("EnabledPackedSize", enabledPackedSize), //16434944 in diva2script.cpk + new DatabaseField("EnabledDataSize", enabledDataSize), + new DatabaseField("TotalDataSize", null), + new DatabaseField("Tocs", null), + new DatabaseField("Files", (uint)files.Length), + new DatabaseField("Groups", (uint)0), + new DatabaseField("Attrs", (uint)0), + new DatabaseField("TotalFiles", null), + new DatabaseField("Directories", null), + new DatabaseField("Updates", null), + new DatabaseField("Version", (ushort)7), + new DatabaseField("Revision", (ushort)0), + new DatabaseField("Align", (ushort)SectorAlignment), + new DatabaseField("Sorted", (ushort)1), + + // added in newer version CpkFileBuilder + // new DatabaseField("EnableFileName", (ushort)0), + + new DatabaseField("EID", (ushort)1), + new DatabaseField("CpkMode", (uint)Mode), + new DatabaseField("Tvers", VersionString), + new DatabaseField("Comment", null), + new DatabaseField("Codec", (uint)0), + new DatabaseField("DpkItoc", (uint)0), + + // added in newer version CpkFileBuilder + // new DatabaseField("EnableTocCrc", (short)0), + // new DatabaseField("EnableFileCrc", (short)0), + // new DatabaseField("CrcMode", (uint)0), + // new DatabaseField("CrcTable", null) + })); + + DatabaseObjectModel db = new DatabaseObjectModel(); + db.Tables.Add(dt); + return db; + } + + private ulong GetUtfTableSize(DatabaseTable dt) + { + ulong size = (ulong)(32 + (dt.Fields.Count * 5)); + for (int i = 0; i < dt.Fields.Count; i++) + { + if (dt.Fields[i].Value == null) + { + // perrow + for (int j = 0; j < dt.Records.Count; j++) + { + size += (ulong)UTFDataFormat.GetLengthForDataType(UTFDataFormat.UTFDataTypeForSystemDataType(dt.Fields[i].DataType)); + } + } + } + return size; + } + + private DatabaseObjectModel BuildTocUTF(FileSystemFile[] files, ulong initialFileOffset, ref IDOFFSET[] sortedOffsets) + { + DatabaseTable dt = new DatabaseTable(); + dt.Name = "CpkTocInfo"; + dt.Fields.Add("DirName", String.Empty, typeof(string)); + dt.Fields.Add("FileName", null, typeof(string)); + dt.Fields.Add("FileSize", null, typeof(uint)); + dt.Fields.Add("ExtractSize", null, typeof(uint)); + dt.Fields.Add("FileOffset", null, typeof(ulong)); + dt.Fields.Add("ID", null, typeof(uint)); + dt.Fields.Add("UserString", "", typeof(string)); + + ulong offset = initialFileOffset; + offset -= (ulong) SectorAlignment; // idk? + + List offsets = new List(files.Length); + for (int i = 0; i < files.Length; i++) + { + offsets.Add(new IDOFFSET(i, (uint)files[i].CustomDetails.GetValueOrDefault("ID", 0U), 0, (ulong) files[i].Size)); + } + offsets.Sort((x, y) => x.ID.CompareTo(y.ID)); + + sortedOffsets = offsets.ToArray(); + + for (int i = 0; i < files.Length; i++) + { + offsets[i] = new IDOFFSET(offsets[i].INDEX, offsets[i].ID, offset, offsets[i].SIZE); + offset += offsets[i].SIZE; + offset = offset.Align((ulong)SectorAlignment); + } + offsets.Sort((x, y) => x.INDEX.CompareTo(y.INDEX)); + + for (int i = 0; i < files.Length; i++) + { + dt.Records.Add(new DatabaseRecord(new DatabaseField[] + { + new DatabaseField("DirName", null), + new DatabaseField("FileName", files[i].Name), + new DatabaseField("FileSize", (uint)files[i].Size), + new DatabaseField("ExtractSize", (uint)files[i].Size), + new DatabaseField("FileOffset", offsets[i].OFFSET), + new DatabaseField("ID", (uint)files[i].CustomDetails.GetValueOrDefault("ID", (uint)i)), + new DatabaseField("UserString", "") + })); + } + + DatabaseObjectModel db = new DatabaseObjectModel(); + db.Tables.Add(dt); + return db; + } + private DatabaseObjectModel BuildEtocUTF(FileSystemFile[] files) + { + DatabaseTable dt = new DatabaseTable(); + dt.Name = "CpkEtocInfo"; + dt.Fields.Add("UpdateDateTime", null, typeof(ulong)); + dt.Fields.Add("LocalDir", String.Empty, typeof(string)); + + for (int i = 0; i < files.Length; i++) + { + DateTime updateDateTime = files[i].ModificationTimestamp.GetValueOrDefault(DateTime.Now); + + // yeaaaaahhh + byte[] updateDateTimeBytes = new byte[8]; + updateDateTimeBytes[0] = (byte)updateDateTime.Millisecond; + updateDateTimeBytes[1] = (byte)updateDateTime.Second; + updateDateTimeBytes[2] = (byte)updateDateTime.Minute; + updateDateTimeBytes[3] = (byte)updateDateTime.Hour; + updateDateTimeBytes[4] = (byte)updateDateTime.Day; + updateDateTimeBytes[5] = (byte)updateDateTime.Month; + + byte[] updateDateTimeYear = BitConverter.GetBytes((ushort)updateDateTime.Year); + updateDateTimeBytes[6] = updateDateTimeYear[0]; + updateDateTimeBytes[7] = updateDateTimeYear[1]; + + dt.Records.Add(new DatabaseRecord(new DatabaseField[] + { + new DatabaseField("UpdateDateTime", BitConverter.ToUInt64(updateDateTimeBytes, 0)), + new DatabaseField("LocalDir", null) + })); + } + + dt.Records.Add(new DatabaseRecord(new DatabaseField[] + { + new DatabaseField("UpdateDateTime", (ulong)0), + new DatabaseField("LocalDir", null) + })); + + DatabaseObjectModel db = new DatabaseObjectModel(); + db.Tables.Add(dt); + return db; + } + private DatabaseObjectModel BuildItocUTF(IDOFFSET[] entries) + { + DatabaseTable dt = new DatabaseTable(); + dt.Name = "CpkExtendId"; + dt.Fields.Add("ID", null, typeof(int)); + dt.Fields.Add("TocIndex", null, typeof(int)); + + for (int i = 0; i < entries.Length; i++) + { + dt.Records.Add(new DatabaseRecord(new DatabaseField[] + { + new DatabaseField("ID", entries[i].ID), + new DatabaseField("TocIndex", entries[i].INDEX) + })); + } + + DatabaseObjectModel db = new DatabaseObjectModel(); + db.Tables.Add(dt); + return db; + } + + /// + /// Applies a simple cipher to decrypt an encrypted UTF sector. + /// + /// The decrypted data. + /// The data to decrypt. + private byte[] DecryptUTF(byte[] input) + { + byte[] result = new byte[input.Length]; + + int m = 0x0000655f, t = 0x00004115; + for (int i = 0; i < input.Length; i++) + { + byte d = input[i]; + d = (byte)(d ^ (byte)(m & 0xff)); + result[i] = d; + m *= t; + } + + return result; + } + + /// + /// Writes the data to the output . + /// + /// A containing the archive content to write. + protected override void SaveInternal(ObjectModel objectModel, Stream stream) + { + FileSystemObjectModel? fsom = objectModel as FileSystemObjectModel; + if (fsom == null) + throw new ObjectModelNotSupportedException(); + + FileSystemFile[] files = fsom.Items.GetAllFiles(); + + Writer bw = new Writer(stream); + bw.WriteFixedLengthString("CPK "); + + bw.WriteInt32(255); // unknown1 + + UTFDataFormat dfUTF = new UTFDataFormat(); + + ulong contentOffset = 16; + ulong etocLength = 0; + ulong itocLength = 0; + ulong headerLength = 0; + ulong tocLength = 0; + + IDOFFSET[] sortedOffsets = null; + { + // TODO: replace all these calls to build methods with a simple calculation function e.g. UTFDataFormat.GetTableSize() ... + // there is no reason to go through the entire file list just to calculate how big a table should be - mind those variable-length strings though + DatabaseObjectModel _tmp_om = BuildHeaderUTF(fsom, 0, 0, 0, 0, 0, 0, 0); // 704 + MemoryStream _tmp_ma = new MemoryStream(); + Document.Save(_tmp_om, dfUTF, _tmp_ma); + contentOffset += (ulong)_tmp_ma.Length; + contentOffset = contentOffset.Align((ulong)SectorAlignment); + + headerLength = (ulong) _tmp_ma.Length; + + _tmp_om = BuildTocUTF(files, 0, ref sortedOffsets); // 117880 + _tmp_ma = new MemoryStream(); + Document.Save(_tmp_om, dfUTF, _tmp_ma); + + contentOffset += 16; + contentOffset += (ulong)_tmp_ma.Length; + contentOffset = contentOffset.Align((ulong)SectorAlignment); + tocLength = (ulong) _tmp_ma.Length; + + _tmp_om = BuildItocUTF(sortedOffsets); + _tmp_ma = new MemoryStream(); + Document.Save(_tmp_om, dfUTF, _tmp_ma); + + contentOffset = contentOffset.RoundToPower((ulong)2); // this is done before the ITOC, apparently. + + contentOffset += 16; + contentOffset += (ulong)_tmp_ma.Length; + contentOffset = contentOffset.Align((ulong)SectorAlignment); + itocLength = (ulong) _tmp_ma.Length; // 21728 + + _tmp_om = BuildEtocUTF(files); + _tmp_ma = new MemoryStream(); + Document.Save(_tmp_om, dfUTF, _tmp_ma); + // contentOffset += (ulong)_tmp_ma.Length; // lol wtf contentoffset isn't affected by ETOC... + etocLength = (ulong) _tmp_ma.Length; // 21752 + } + + DatabaseObjectModel utfTOC = BuildTocUTF(files, contentOffset, ref sortedOffsets); + + MemoryStream maUTFTOC = new MemoryStream(); + Document.Save(utfTOC, dfUTF, maUTFTOC); + + byte[] utfTOC_data = maUTFTOC.ToArray(); + + + ulong contentSize = 0; + for (uint i = 0; i < files.Length; i++) + { + contentSize += (ulong)files[i].Size; + contentSize = contentSize.Align((ulong)SectorAlignment); + } + + ulong itocOffset = 16 + headerLength; + itocOffset = itocOffset.Align((ulong)SectorAlignment); + itocOffset += (16 + tocLength); + itocOffset = itocOffset.Align((ulong)SectorAlignment); + + itocOffset = itocOffset.RoundToPower(2); + + ulong etocOffset = 0; + etocOffset = contentOffset + contentSize; + + DatabaseObjectModel utfHeader = BuildHeaderUTF(fsom, utfTOC_data.Length + 16 /*includes 16-byte 'TOC ' header from CPK*/, contentOffset, contentSize, etocOffset, etocLength + 16, itocOffset, itocLength + 16); + MemoryStream maUTFHeader = new MemoryStream(); + Document.Save(utfHeader, dfUTF, maUTFHeader); + + byte[] utfHeader_data = maUTFHeader.ToArray(); + bw.WriteInt64(utfHeader_data.Length); + bw.WriteBytes(utfHeader_data); + + int __unknown_checksum = -1677552896; // 9634460; + bw.WriteInt32(__unknown_checksum); + + bw.Align(SectorAlignment); + bw.BaseStream.Seek(-6, SeekOrigin.Current); + bw.WriteFixedLengthString("(c)CRI"); + + WriteChunk(bw, "TOC ", utfTOC_data); + bw.Align(SectorAlignment); + + // here comes the ITOC (indexes TOC) UTF table chunk. + DatabaseObjectModel utfITOC = BuildItocUTF(sortedOffsets); + MemoryStream maUTFITOC = new MemoryStream(); + Document.Save(utfITOC, dfUTF, maUTFITOC); + + byte[] utfITOC_data = maUTFITOC.ToArray(); + bw.BaseStream.Seek(bw.BaseStream.Position.RoundToPower(2), SeekOrigin.Begin); + + WriteChunk(bw, "ITOC", utfITOC_data); + bw.Align(SectorAlignment); + + // here comes the file data. each file is aligned to FileAlignment bytes, apparently. + for (uint i = 0; i < sortedOffsets.Length; i++) + { + byte[]? data = files[sortedOffsets[i].INDEX].Source?.GetData(); + if (data != null) + { + bw.WriteBytes(data); + } + bw.Align(SectorAlignment); + } + + DatabaseObjectModel utfETOC = BuildEtocUTF(files); + MemoryStream maUTFETOC = new MemoryStream(); + Document.Save(utfETOC, dfUTF, maUTFETOC); + + byte[] utfETOC_data = maUTFETOC.ToArray(); + bw.Align(SectorAlignment); + WriteChunk(bw, "ETOC", utfETOC_data); + } + + private void WriteChunk(Writer writer, string chunkID, byte[] chunkData) + { + writer.WriteFixedLengthString(chunkID); + writer.WriteInt32(255); + writer.WriteInt64(chunkData.Length); + writer.WriteBytes(chunkData); + } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKFileMode.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKFileMode.cs new file mode 100644 index 0000000..015f9f7 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/CPKFileMode.cs @@ -0,0 +1,32 @@ +// +// CPKFileMode.cs +// +// Author: +// Michael Becker +// +// Copyright (c) 2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.CPK; + +public enum CPKFileMode +{ + IDOnly = 0, + FilenameOnly = 1, + IDFilename = 2, + FilenameGroup = 3, + IDGroup = 4, + FilenameIDGroup = 5 +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/Internal/IDOFFSET.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/Internal/IDOFFSET.cs new file mode 100644 index 0000000..4a205b2 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/DataFormats/FileSystem/CPK/Internal/IDOFFSET.cs @@ -0,0 +1,41 @@ +// +// IDOFFSET.cs - internal structure for representing an ID, offset, and size for a file in a CPK archive +// +// Author: +// Michael Becker +// +// Copyright (c) 2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +namespace MBS.Editor.Plugins.CRI.DataFormats.FileSystem.CPK; + +/// +/// Internal structure for representing an ID, offset, and size for a file in a CPK archive. +/// +internal struct IDOFFSET +{ + public int INDEX; + public uint ID; + public ulong OFFSET; + public ulong SIZE; + + public IDOFFSET(int index, uint id, ulong offset, ulong size) + { + INDEX = index; + ID = id; + OFFSET = offset; + SIZE = size; + } +} diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/MBS.Editor.Plugins.CRI.csproj b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/MBS.Editor.Plugins.CRI.csproj new file mode 100644 index 0000000..3add91d --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.CRI/MBS.Editor.Plugins.CRI.csproj @@ -0,0 +1,11 @@ + + + + + + + net8.0 + enable + enable + + \ No newline at end of file diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/AssemblyInfo.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/AssemblyInfo.cs new file mode 100644 index 0000000..4164eff --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Reflection; + +[assembly: AssemblyCompany("Mike Becker's Software")] \ No newline at end of file diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/MBS.Editor.Plugins.Multimedia.csproj b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/MBS.Editor.Plugins.Multimedia.csproj new file mode 100644 index 0000000..c76dfda --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/MBS.Editor.Plugins.Multimedia.csproj @@ -0,0 +1,12 @@ + + + + + + + net8.0 + enable + enable + false + + \ No newline at end of file diff --git a/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/PositionVector2.cs b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/PositionVector2.cs new file mode 100644 index 0000000..ece9b23 --- /dev/null +++ b/editor-dotnet/src/plugins/MBS.Editor.Plugins.Multimedia/PositionVector2.cs @@ -0,0 +1,123 @@ +// +// PositionVector2.cs - provides a tuple indicating X and Y position +// +// Author: +// Michael Becker +// +// Copyright (c) 2011-2020 Mike Becker's Software +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +using System; + +namespace UniversalEditor +{ + /// + /// Provides a tuple indicating X and Y position. + /// + public struct PositionVector2 : ICloneable + { + public bool IsEmpty { get; } + + private PositionVector2(bool empty) + { + X = 0; + Y = 0; + IsEmpty = empty; + } + + /// + /// Represents the empty . This field is read-only. + /// + public static readonly PositionVector2 Empty = new PositionVector2(true); + + public double X { get; set; } + public double Y { get; set; } + + public PositionVector2(float x, float y) + { + X = x; + Y = y; + IsEmpty = false; + } + public PositionVector2(double x, double y) + { + X = x; + Y = y; + IsEmpty = false; + } + + public double[] ToDoubleArray() + { + return new double[] { X, Y }; + } + public float[] ToFloatArray() + { + return new float[] { (float)X, (float)Y }; + } + + public static PositionVector2 operator +(PositionVector2 left, PositionVector2 right) + { + return new PositionVector2(left.X + right.X, left.Y + right.Y); + } + public static PositionVector2 operator -(PositionVector2 left, PositionVector2 right) + { + return new PositionVector2(left.X - right.X, left.Y - right.Y); + } + public static PositionVector2 operator *(PositionVector2 left, PositionVector2 right) + { + return new PositionVector2(left.X * right.X, left.Y * right.Y); + } + public static PositionVector2 operator /(PositionVector2 left, PositionVector2 right) + { + return new PositionVector2(left.X / right.X, left.Y / right.Y); + } + + public override string ToString() + { + return ToString(", ", "(", ")"); + } + public string ToString(string separator, string encloseStart, string encloseEnd) + { + System.Text.StringBuilder sb = new System.Text.StringBuilder(); + if (encloseStart != null) + { + sb.Append(encloseStart); + } + sb.Append(String.Format("{0:0.0#####################}", X)); + sb.Append(separator); + sb.Append(String.Format("{0:0.0#####################}", Y)); + if (encloseEnd != null) + { + sb.Append(encloseEnd); + } + return sb.ToString(); + } + + public object Clone() + { + PositionVector2 clone = new PositionVector2(); + clone.X = X; + clone.Y = Y; + return clone; + } + + public double GetLargestComponentValue() + { + if (X > Y) return X; + if (Y > X) return Y; + return 0.0; + } + } +} diff --git a/editor-dotnet/src/tests/MBS.Editor.Core.Tests/Compression/CompressionTests.cs b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/Compression/CompressionTests.cs new file mode 100644 index 0000000..85495d0 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/Compression/CompressionTests.cs @@ -0,0 +1,141 @@ +using System.Diagnostics; +using MBS.Core.Collections; +using NUnit.Framework.Internal; + +namespace MBS.Editor.Core.Tests.Compression; + +using MBS.Editor.Core.Compression.Modules.Deflate; +using MBS.Editor.Core.Compression.Modules.GZip; +using MBS.Editor.Core.Compression.Modules.LZW; + +[TestFixture] +public class CompressionTests +{ + [SetUp] + public void Setup() + { + } + + public readonly byte[] TEST_DATA = new byte[] { 0xCA, 0xFE, 0xBA, 0xBE, 0xDE, 0xAD, 0xBE, 0xEF }; + + [Test] + public void GZipCompressionModuleTest() + { + Console.WriteLine("GZipCompressionModuleTest"); + + GZipCompressionModule module = new GZipCompressionModule(); + byte[] compressed = module.Compress(TEST_DATA); + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + module = new GZipCompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + Assert.That(decompressed.Length, Is.EqualTo(TEST_DATA.Length)); + for (int i = 0; i < TEST_DATA.Length; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } + + [Test] + public void DeflateCompressionModuleTest() + { + Console.WriteLine("DeflateCompressionModuleTest"); + + DeflateCompressionModule module = new DeflateCompressionModule(); + byte[] compressed = module.Compress(TEST_DATA); + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + module = new DeflateCompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + Assert.That(decompressed.Length, Is.EqualTo(TEST_DATA.Length)); + for (int i = 0; i < TEST_DATA.Length; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } + + [Test] + public void ZlibBuiltinCompressionModuleTest() + { + Console.WriteLine("ZlibBuiltinCompressionModuleTest"); + + ZlibBuiltinCompressionModule module = new ZlibBuiltinCompressionModule(); + byte[] compressed = module.Compress(TEST_DATA); + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + module = new ZlibBuiltinCompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + Assert.That(decompressed.Length, Is.EqualTo(TEST_DATA.Length)); + for (int i = 0; i < TEST_DATA.Length; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } + + [Test] + public void LZWCompressionModuleTest() + { + Assert.Ignore("we know we don't yet support compression, only decompression for now"); + + Console.WriteLine("LZWCompressionModuleTest"); + + LZWCompressionModule module = new LZWCompressionModule(); + byte[] compressed = module.Compress(TEST_DATA); + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + module = new LZWCompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + Assert.That(decompressed.Length, Is.EqualTo(TEST_DATA.Length)); + for (int i = 0; i < TEST_DATA.Length; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } + + [Test] + public void LZWDecompressionOnlyTest() + { + Console.WriteLine("LZWCompressionOnlyTest"); + + byte[] compressed = { 0x1F, 0x9D, 0x90, 0xCA, 0xFC, 0xE9, 0xF2, 0x05, 0xA0, 0xA0, 0xC1, 0x82, 0x23, 0x20, 0x8D, 0x88, 0x42, 0x06, 0x52, 0x18, 0x02, 0x51, 0x24, 0x11, 0x08, 0x73, 0x29, 0xCA, 0x01, 0x30, 0x83, 0x8A, 0x50, 0x1A, 0x03, 0x24, 0x4C, 0xA5, 0x20, 0x90, 0x8C, 0x5C, 0xB9, 0x10, 0x80, 0x46, 0x9A, 0x09, 0x06, 0x32, 0x40, 0x91, 0x50, 0x23, 0x01, 0xCB, 0x00, 0x29, 0xC2, 0x0C, 0x01, 0x33, 0xC1, 0x48, 0x00, 0x23, 0x19, 0x68, 0x80, 0xA1, 0x90, 0x86, 0x42, 0x82, 0x30, 0x1D, 0x3B, 0x0E, 0xC8, 0x52, 0x47, 0xD2, 0x81, 0x12, 0x97, 0x40, 0x9C, 0x04, 0x32, 0x06, 0x4C, 0x24, 0x1A, 0x71, 0x68, 0x14, 0x08, 0x63, 0xE0, 0xE9, 0x81, 0x2B, 0x50, 0x12, 0x55, 0x48, 0x80, 0xC1, 0x8A, 0xCB, 0x41, 0x57, 0x02, 0x4C, 0x42, 0x12, 0xE6, 0x46, 0x02, 0x2C, 0x71, 0xCE, 0x5E, 0x80, 0x82, 0xE9, 0x42, 0x11, 0x48, 0x18, 0xA0, 0x64, 0xE0, 0x9A, 0x40, 0x51, 0x82, 0x43, 0x60, 0xEC, 0x28, 0xC0, 0x04, 0x07, 0xD3, 0xA9, 0x04, 0x80, 0x30, 0x35, 0xD0, 0x04, 0x28, 0x5B, 0xA3, 0x6C, 0x80, 0xB4, 0x5D, 0x3A, 0x63, 0x00, 0x0E, 0x18, 0x40, 0x9F, 0x3E, 0xFD, 0x9B, 0x4C, 0x99, 0xB2, 0xB7, 0x56, 0xBE, 0xDE, 0x1D, 0xDC, 0x0C }; + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + LZWCompressionModule module = new LZWCompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + // Assert.That(decompressed.Length, Is.EqualTo(compressed.Length)); + for (int i = 0; i < 4; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } + + [Test] + public void BZip2CompressionModuleTest() + { + Console.WriteLine("BZip2CompressionModuleTest"); + + BZip2CompressionModule module = new BZip2CompressionModule(); + byte[] compressed = module.Compress(TEST_DATA); + Console.WriteLine("compressed bytes: " + compressed.ToString(" ", "x")); + + module = new BZip2CompressionModule(); + byte[] decompressed = module.Decompress(compressed); + Console.WriteLine("decompressed bytes: " + decompressed.ToString(" ", "x")); + + Assert.That(decompressed.Length, Is.EqualTo(TEST_DATA.Length)); + for (int i = 0; i < TEST_DATA.Length; i++) + { + Assert.That(decompressed[i], Is.EqualTo(TEST_DATA[i])); + } + } +} diff --git a/editor-dotnet/src/tests/MBS.Editor.Core.Tests/GlobalUsings.cs b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/GlobalUsings.cs new file mode 100644 index 0000000..cefced4 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/GlobalUsings.cs @@ -0,0 +1 @@ +global using NUnit.Framework; \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Core.Tests/MBS.Editor.Core.Tests.csproj b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/MBS.Editor.Core.Tests.csproj new file mode 100644 index 0000000..050e4ca --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/MBS.Editor.Core.Tests.csproj @@ -0,0 +1,19 @@ + + + net8.0 + enable + enable + false + true + + + + + + + + + + + + \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemItemCollectionTests.cs b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemItemCollectionTests.cs new file mode 100644 index 0000000..de922a8 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemItemCollectionTests.cs @@ -0,0 +1,45 @@ + +using MBS.Editor.Core.ObjectModels.FileSystem; + +namespace MBS.Editor.Core.Tests.ObjectModels.FileSystem; + +public class FileSystemItemCollectionTests +{ + [ Test()] + public static void AddFolderWithEmptyNameTest() + { + // the reason this test exists + // is because I screwed something up + // and caused a stack overflow when adding a Folder with empty name =(^.^)= + FileSystemObjectModel fsom = new FileSystemObjectModel(); + + FileSystemFolder folder = fsom.Items.AddFolder(""); + Assert.That(folder, Is.Not.Null); + Assert.That(folder.Name, Is.EqualTo(String.Empty)); + } + [Test()] + public static void AddFolderWithSingleNameTest() + { + // the reason this test exists + // is... just to make sure this works + FileSystemObjectModel fsom = new FileSystemObjectModel(); + + FileSystemFolder folder = fsom.Items.AddFolder("System32"); + Assert.That(folder, Is.Not.Null); + Assert.That(folder.Name, Is.EqualTo("System32")); + } + [Test()] + public static void AddFolderWithPathTest() + { + // the reason THIS test exists + // is because I screwed SOMETHING ELSE up + // and ended up messing up the entire AddFolder implementation (x .x) + FileSystemObjectModel fsom = new FileSystemObjectModel(); + + FileSystemFolder folder = fsom.Items.AddFolder("C/Windows/System32/Drivers"); + Assert.That(folder, Is.Not.Null); + Assert.That(folder.Name, Is.EqualTo("Drivers")); + Assert.That(folder.Parent, Is.Not.Null); + Assert.That(((FileSystemFolder)folder.Parent).Name, Is.EqualTo("System32")); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemTests.cs b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemTests.cs new file mode 100644 index 0000000..aa57360 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Core.Tests/ObjectModels/FileSystem/FileSystemTests.cs @@ -0,0 +1,36 @@ +using MBS.Editor.Core.ObjectModels.FileSystem; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; + +namespace MBS.Editor.Core.Tests.ObjectModels.FileSystem; + +[TestFixture] +public class FileSystemTests +{ + [SetUp] + public void Setup() + { + } + + public readonly byte[] TEST_DATA = new byte[] { 0xCA, 0xFE, 0xBA, 0xBE, 0xDE, 0xAD, 0xBE, 0xEF }; + + [Test] + public void ByteArrayFileSourceTest() + { + ByteArrayFileSource source = new ByteArrayFileSource(TEST_DATA); + byte[] data = source.GetData(4, 2); + + Assert.That(data[0], Is.EqualTo(TEST_DATA[4])); + Assert.That(data[1], Is.EqualTo(TEST_DATA[5])); + } + + [Test] + public void MemoryStreamFileSourceTest() + { + MemoryStream ms = new MemoryStream(TEST_DATA); + StreamFileSource source = new StreamFileSource(ms); + byte[] data = source.GetData(4, 2); + + Assert.That(data[0], Is.EqualTo(TEST_DATA[4])); + Assert.That(data[1], Is.EqualTo(TEST_DATA[5])); + } +} diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/AFS/AFSDataFormatTests.cs b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/AFS/AFSDataFormatTests.cs new file mode 100644 index 0000000..7b58708 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/AFS/AFSDataFormatTests.cs @@ -0,0 +1,35 @@ +using MBS.Editor.Core; +using MBS.Editor.Core.ObjectModels.FileSystem; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; +using MBS.Editor.Plugins.CRI.DataFormats.FileSystem.AFS; + +namespace MBS.Editor.Plugins.CRI.Tests.DataFormats.AFS; + +public class AFSDataFormatTests +{ + [SetUp] + public void Setup() + { + } + + [Test] + public void Test1() + { + FileSystemObjectModel fsom = new FileSystemObjectModel(); + fsom.Items.AddFile("System.dat", new ByteArrayFileSource(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE })); + + System.IO.MemoryStream ms = new System.IO.MemoryStream(); + + AFSDataFormat afs = new AFSDataFormat(); + Document.Save(fsom, afs, ms); + + ms.Seek(0, System.IO.SeekOrigin.Begin); + + fsom = new FileSystemObjectModel(); + Document.Load(fsom, afs, ms); + + Assert.That(fsom.Items.GetFiles().Length, Is.EqualTo(1)); + Assert.That((fsom.Items["System.dat"] as FileSystemFile)?.Source?.GetData(2, 1)[0], Is.EqualTo(0xBA)); + + } +} \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/CPK/CPKDataFormatTests.cs b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/CPK/CPKDataFormatTests.cs new file mode 100644 index 0000000..a2678c9 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/DataFormats/CPK/CPKDataFormatTests.cs @@ -0,0 +1,170 @@ +using System; +using MBS.Core; +using MBS.Core.Collections; +using MBS.Editor.Core; +using MBS.Editor.Core.ObjectModels.FileSystem; +using MBS.Editor.Core.ObjectModels.FileSystem.FileSources; +using MBS.Editor.Plugins.CRI.DataFormats.FileSystem.CPK; +using NUnit.Framework.Internal; + +namespace MBS.Editor.Plugins.CRI.Tests.DataFormats.CPK; + +public class CPKDataFormatTests +{ + private System.Collections.Generic.Dictionary testDataStreams = new System.Collections.Generic.Dictionary(); + + [SetUp] + public void Setup() + { + MBS.Core.Reflection.ManifestResourceStream[] strms = MBS.Core.Reflection.ManifestResourceStream.GetManifestResourceStreamsForAssembly(System.Reflection.Assembly.GetExecutingAssembly()); + for (int i = 0; i < strms.Length; i++) + { + testDataStreams[strms[i].Name] = strms[i].Stream; + } + } + + [Test(Description = "Ensures the CPKDataFormat can store exactly one file, uncompressed, with no filename masking.")] + public void EditorReadWriteSingleFileUncompressedUnmaskedTest() + { + byte[] TEST_DATA = new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }; + FileSystemObjectModel fsom = new FileSystemObjectModel(); + fsom.Items.AddFile("System.dat", new ByteArrayFileSource(TEST_DATA)); + + System.IO.MemoryStream ms = new System.IO.MemoryStream(); + + CPKDataFormat afs = new CPKDataFormat(); + Document.Save(fsom, afs, ms); + + byte[] stdata = ms.ToArray(); + System.IO.File.WriteAllBytes("/tmp/tst.cpk", stdata); + + ms.Seek(0, System.IO.SeekOrigin.Begin); + + fsom = new FileSystemObjectModel(); + Document.Load(fsom, afs, ms); + + Assert.That(fsom.Items.GetFiles().Length, Is.EqualTo(1), "File system must have exactly one file"); + + FileSystemFile? System_dat = fsom.Items["System.dat"] as FileSystemFile; + Assert.That(System_dat, Is.Not.Null, "The file 'System.dat' must exist and be a File"); + Assert.That(System_dat.Source, Is.Not.Null, "The file 'System.dat' must have an associated FileSource"); + Assert.That(System_dat.Source.Length, Is.EqualTo(TEST_DATA.Length)); + + Console.WriteLine(String.Format("System.dat source offset: {0} length: {1}", ((CompressedEmbeddedFileSource)System_dat.Source).Offset, System_dat.Source.Length)); + + byte[] data = System_dat.Source.GetData(); + Console.WriteLine("System.dat bytes: " + data.ToString(" ", "x")); + + Assert.That(System_dat.Source.GetData(2, 1)[0], Is.EqualTo(0xBA)); + } + + private void SampleStreamTest(string streamName) + { + if (testDataStreams.ContainsKey(streamName)) + { + System.IO.Stream TEST_STREAM = testDataStreams[streamName]; + SampleStreamTest(TEST_STREAM); + } + else + { + Console.Error.WriteLine("test data stream not found: '" + streamName + "'"); + Assert.Ignore(); + } + } + private void SampleStreamTest(System.IO.Stream stream) + { + stream.Seek(0, System.IO.SeekOrigin.Begin); + + CPKDataFormat afs = new CPKDataFormat(); + FileSystemObjectModel fsom = new FileSystemObjectModel(); + Document.Load(fsom, afs, stream); + + Assert.That(fsom.Items.Count, Is.EqualTo(2)); + Assert.That(fsom.Items[0].Name, Is.EqualTo("folder1")); + Assert.That(fsom.Items[1].Name, Is.EqualTo("folder2")); + + Assert.That(fsom.Items[0] is FileSystemFolder); + Assert.That(fsom.Items[1] is FileSystemFolder); + + Assert.That(((FileSystemFolder)fsom.Items[0]).Items.Count, Is.EqualTo(2)); + Assert.That(((FileSystemFolder)fsom.Items[0]).Items[0].Name, Is.EqualTo("data1.bmp")); + Assert.That(((FileSystemFile)((FileSystemFolder)fsom.Items[0]).Items[0]).Size, Is.EqualTo(50228)); + + byte[] BM4 = ((FileSystemFile)((FileSystemFolder)fsom.Items[0]).Items[0]).Source.GetData(); + Assert.That(BM4[0], Is.EqualTo((byte)'B')); + Assert.That(BM4[1], Is.EqualTo((byte)'M')); + + Assert.That(((FileSystemFolder)fsom.Items[0]).Items[1].Name, Is.EqualTo("data2.bmp")); + Assert.That(((FileSystemFile)((FileSystemFolder)fsom.Items[0]).Items[1]).Size, Is.EqualTo(50228)); + + Assert.That(((FileSystemFolder)fsom.Items[1]).Items.Count, Is.EqualTo(3)); + Assert.That(((FileSystemFolder)fsom.Items[1]).Items[0].Name, Is.EqualTo("data3.bmp")); + Assert.That(((FileSystemFile)((FileSystemFolder)fsom.Items[1]).Items[0]).Size, Is.EqualTo(50228)); + + Assert.That(((FileSystemFolder)fsom.Items[1]).Items[1].Name, Is.EqualTo("data4.bmp")); + Assert.That(((FileSystemFile)((FileSystemFolder)fsom.Items[1]).Items[1]).Size, Is.EqualTo(50228)); + + Assert.That(((FileSystemFolder)fsom.Items[1]).Items[2].Name, Is.EqualTo("voice1.ahx")); + Assert.That(((FileSystemFile)((FileSystemFolder)fsom.Items[1]).Items[2]).Size, Is.EqualTo(2120)); + } + + [Test(Description = "Ensures that CPK files generated by CPKMG are understood by MBS Editor")] + public void CPKFileBuilderUncompressedUnmaskedTest() + { + SampleStreamTest("MBS.Editor.Plugins.CRI.Tests.Resources.TestData.sample_data_uncompressed_unmasked.cpk"); + } + + [Test(Description = "Ensures that CPK files generated by CPKMG, including with IDs and filenames, are understood by MBS Editor")] + public void CPKFileBuilderUncompressedUnmaskedWithIDsAndFileNamesTest() + { + SampleStreamTest("MBS.Editor.Plugins.CRI.Tests.Resources.TestData.sample_data_uncompressed_unmasked_idfn.cpk"); + } + + [Test(Description = "Ensures that CPK files generated by CPKMG, using default settings, are understood by MBS Editor")] + public void CPKFileBuilderDefaultTest() + { + // this includes CRILAYLA compression, yay! + SampleStreamTest("MBS.Editor.Plugins.CRI.Tests.Resources.TestData.sample_data.cpk"); + } + + [Test(Description = "Ensures that CPK files generated by CPKMG, with filename information masked, are understood by MBS Editor")] + public void CPKFileBuilderUncompressedMaskedTest() + { + SampleStreamTest("MBS.Editor.Plugins.CRI.Tests.Resources.TestData.sample_data_uncompressed_masked.cpk"); + } + + [Test(Description = "Ensures the CPKDataFormat can store exactly one file, CRILAYLA compressed, with no filename masking.")] + public void SingleFileLaylaCompressedUnmaskedTest() + { + byte[] TEST_DATA = new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }; + FileSystemObjectModel fsom = new FileSystemObjectModel(); + fsom.Items.AddFile("System.dat", new ByteArrayFileSource(TEST_DATA)); + + System.IO.MemoryStream ms = new System.IO.MemoryStream(); + + CPKDataFormat afs = new CPKDataFormat(); + Document.Save(fsom, afs, ms); + + byte[] stdata = ms.ToArray(); + System.IO.File.WriteAllBytes("/tmp/tst.cpk", stdata); + + ms.Seek(0, System.IO.SeekOrigin.Begin); + + fsom = new FileSystemObjectModel(); + Document.Load(fsom, afs, ms); + + Assert.That(fsom.Items.GetFiles().Length, Is.EqualTo(1), "File system must have exactly one file"); + + FileSystemFile? System_dat = fsom.Items["System.dat"] as FileSystemFile; + Assert.That(System_dat, Is.Not.Null, "The file 'System.dat' must exist and be a File"); + Assert.That(System_dat.Source, Is.Not.Null, "The file 'System.dat' must have an associated FileSource"); + Assert.That(System_dat.Source.Length, Is.EqualTo(TEST_DATA.Length)); + + Console.WriteLine(String.Format("System.dat source offset: {0} length: {1}", ((CompressedEmbeddedFileSource)System_dat.Source).Offset, System_dat.Source.Length)); + + byte[] data = System_dat.Source.GetData(); + Console.WriteLine("System.dat bytes: " + data.ToString(" ", "x")); + + Assert.That(System_dat.Source.GetData(2, 1)[0], Is.EqualTo(0xBA)); + } +} \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/GlobalUsings.cs b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/GlobalUsings.cs new file mode 100644 index 0000000..cefced4 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/GlobalUsings.cs @@ -0,0 +1 @@ +global using NUnit.Framework; \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/MBS.Editor.Plugins.CRI.Tests.csproj b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/MBS.Editor.Plugins.CRI.Tests.csproj new file mode 100644 index 0000000..cd028c0 --- /dev/null +++ b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/MBS.Editor.Plugins.CRI.Tests.csproj @@ -0,0 +1,26 @@ + + + net8.0 + disable + enable + false + true + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data.cpk b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data.cpk new file mode 100644 index 0000000..a73390d Binary files /dev/null and b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data.cpk differ diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_masked.cpk b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_masked.cpk new file mode 100644 index 0000000..8ddeb89 Binary files /dev/null and b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_masked.cpk differ diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked.cpk b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked.cpk new file mode 100644 index 0000000..af259a1 Binary files /dev/null and b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked.cpk differ diff --git a/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked_idfn.cpk b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked_idfn.cpk new file mode 100644 index 0000000..43082c3 Binary files /dev/null and b/editor-dotnet/src/tests/MBS.Editor.Plugins.CRI.Tests/Resources/TestData/sample_data_uncompressed_unmasked_idfn.cpk differ