Endfield??
This commit is contained in:
95
BeyondTools.SparkBuffer/BeanType.cs
Normal file
95
BeyondTools.SparkBuffer/BeanType.cs
Normal file
@@ -0,0 +1,95 @@
|
||||
using BeyondTools.SparkBuffer.Extensions;
|
||||
|
||||
namespace BeyondTools.SparkBuffer
|
||||
{
|
||||
public struct BeanType
|
||||
{
|
||||
public int typeHash;
|
||||
public string name;
|
||||
public Field[] fields;
|
||||
|
||||
public BeanType(BinaryReader reader)
|
||||
{
|
||||
typeHash = reader.ReadInt32();
|
||||
name = reader.ReadSparkBufferString();
|
||||
reader.Align4Bytes();
|
||||
var fieldCount = reader.ReadInt32();
|
||||
fields = new Field[fieldCount];
|
||||
|
||||
foreach (ref var field in fields.AsSpan())
|
||||
{
|
||||
field.name = reader.ReadSparkBufferString();
|
||||
field.type = reader.ReadSparkType();
|
||||
switch (field.type)
|
||||
{
|
||||
case SparkType.Bool:
|
||||
case SparkType.Byte:
|
||||
case SparkType.Int:
|
||||
case SparkType.Long:
|
||||
case SparkType.Float:
|
||||
case SparkType.Double:
|
||||
case SparkType.String:
|
||||
break;
|
||||
case SparkType.Enum:
|
||||
case SparkType.Bean:
|
||||
reader.Align4Bytes();
|
||||
field.typeHash = reader.ReadInt32();
|
||||
break;
|
||||
case SparkType.Array:
|
||||
field.type2 = reader.ReadSparkType();
|
||||
|
||||
if (field.type2.Value.IsEnumOrBeanType())
|
||||
{
|
||||
reader.Align4Bytes();
|
||||
field.typeHash = reader.ReadInt32();
|
||||
}
|
||||
break;
|
||||
case SparkType.Map:
|
||||
field.type2 = reader.ReadSparkType();
|
||||
field.type3 = reader.ReadSparkType();
|
||||
|
||||
if (field.type2.Value.IsEnumOrBeanType())
|
||||
{
|
||||
reader.Align4Bytes();
|
||||
field.typeHash = reader.ReadInt32();
|
||||
}
|
||||
if (field.type3.Value.IsEnumOrBeanType())
|
||||
{
|
||||
reader.Align4Bytes();
|
||||
field.typeHash2 = reader.ReadInt32();
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
throw new Exception(string.Format("Unsupported bean field type {0} at position {1}", field.type, reader.BaseStream.Position));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public struct Field
|
||||
{
|
||||
public string name;
|
||||
public SparkType type;
|
||||
|
||||
/// <summary>
|
||||
/// <see cref="SparkType.Array"/> or <see cref="SparkType.Map"/> key type
|
||||
/// </summary>
|
||||
public SparkType? type2;
|
||||
|
||||
/// <summary>
|
||||
/// <see cref="SparkType.Map"/> value type
|
||||
/// </summary>
|
||||
public SparkType? type3;
|
||||
|
||||
/// <summary>
|
||||
/// <see cref="SparkType.Bean"/>, <see cref="SparkType.Enum"/>, <see cref="SparkType.Array"/>, or <see cref="SparkType.Map"/> key type hash
|
||||
/// </summary>
|
||||
public int? typeHash;
|
||||
|
||||
/// <summary>
|
||||
/// <see cref="SparkType.Map"/> value type hash
|
||||
/// </summary>
|
||||
public int? typeHash2;
|
||||
}
|
||||
}
|
||||
}
|
||||
16
BeyondTools.SparkBuffer/BeyondTools.SparkBuffer.csproj
Normal file
16
BeyondTools.SparkBuffer/BeyondTools.SparkBuffer.csproj
Normal file
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="ConsoleAppFramework" Version="5.3.3">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
33
BeyondTools.SparkBuffer/EnumType.cs
Normal file
33
BeyondTools.SparkBuffer/EnumType.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
using BeyondTools.SparkBuffer.Extensions;
|
||||
|
||||
namespace BeyondTools.SparkBuffer
|
||||
{
|
||||
public struct EnumType
|
||||
{
|
||||
public int typeHash;
|
||||
public string name;
|
||||
public EnumItem[] enums;
|
||||
|
||||
public EnumType(BinaryReader reader)
|
||||
{
|
||||
typeHash = reader.ReadInt32();
|
||||
name = reader.ReadSparkBufferString();
|
||||
reader.Align4Bytes();
|
||||
var enumCount = reader.ReadInt32();
|
||||
enums = new EnumItem[enumCount];
|
||||
|
||||
foreach (ref var enumItem in enums.AsSpan())
|
||||
{
|
||||
enumItem.name = reader.ReadSparkBufferString();
|
||||
reader.Align4Bytes();
|
||||
enumItem.value = reader.ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
public struct EnumItem
|
||||
{
|
||||
public string name;
|
||||
public int value;
|
||||
}
|
||||
}
|
||||
}
|
||||
46
BeyondTools.SparkBuffer/Extensions/BinaryReaderExtensions.cs
Normal file
46
BeyondTools.SparkBuffer/Extensions/BinaryReaderExtensions.cs
Normal file
@@ -0,0 +1,46 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace BeyondTools.SparkBuffer.Extensions
|
||||
{
|
||||
public static class BinaryReaderExtensions
|
||||
{
|
||||
public static SparkType ReadSparkType(this BinaryReader reader)
|
||||
=> (SparkType)reader.ReadByte();
|
||||
|
||||
public static long Seek(this BinaryReader reader, long pos, SeekOrigin seekOrigin = SeekOrigin.Begin)
|
||||
=> reader.BaseStream.Seek(pos, seekOrigin);
|
||||
|
||||
public static string ReadSparkBufferString(this BinaryReader reader)
|
||||
{
|
||||
using MemoryStream buffer = new();
|
||||
while (true)
|
||||
{
|
||||
byte b = reader.ReadByte();
|
||||
if (b == 0)
|
||||
break;
|
||||
buffer.WriteByte(b);
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetString(buffer.ToArray());
|
||||
}
|
||||
|
||||
public static string ReadSparkBufferStringOffset(this BinaryReader reader)
|
||||
{
|
||||
var stringOffset = reader.ReadInt32();
|
||||
if (stringOffset == -1)
|
||||
return string.Empty;
|
||||
|
||||
var oldPosition = reader.BaseStream.Position;
|
||||
|
||||
reader.Seek(stringOffset);
|
||||
var tmp = reader.ReadSparkBufferString();
|
||||
|
||||
reader.BaseStream.Position = oldPosition;
|
||||
return tmp;
|
||||
}
|
||||
|
||||
public static void Align4Bytes(this BinaryReader reader)
|
||||
=> reader.Seek((reader.BaseStream.Position - 1) + (4 - ((reader.BaseStream.Position - 1) % 4)));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace BeyondTools.SparkBuffer.Extensions
|
||||
{
|
||||
public static class SparkTypeExtensions
|
||||
{
|
||||
public static bool IsEnumOrBeanType(this SparkType type)
|
||||
=> type is SparkType.Enum or SparkType.Bean;
|
||||
}
|
||||
}
|
||||
8
BeyondTools.SparkBuffer/Properties/launchSettings.json
Normal file
8
BeyondTools.SparkBuffer/Properties/launchSettings.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"profiles": {
|
||||
"BeyondTools.SparkBuffer": {
|
||||
"commandName": "Project",
|
||||
"commandLineArgs": "C:\\EndfieldOut\\Assets\\TextAsset\\TableCfg C:\\EndfieldOut\\TableCfgOutput"
|
||||
}
|
||||
}
|
||||
}
|
||||
243
BeyondTools.SparkBuffer/SparkBufferDump.cs
Normal file
243
BeyondTools.SparkBuffer/SparkBufferDump.cs
Normal file
@@ -0,0 +1,243 @@
|
||||
using BeyondTools.SparkBuffer.Extensions;
|
||||
using ConsoleAppFramework;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace BeyondTools.SparkBuffer
|
||||
{
|
||||
internal class SparkBufferDump
|
||||
{
|
||||
static void Main(string[] args)
|
||||
{
|
||||
ConsoleApp.Run(args, (
|
||||
[Argument] string tableCfgDir,
|
||||
[Argument] string outputDir) =>
|
||||
{
|
||||
if (!Directory.Exists(tableCfgDir))
|
||||
throw new FileNotFoundException($"{tableCfgDir} isn't a valid directory");
|
||||
if (!Directory.Exists(outputDir))
|
||||
Directory.CreateDirectory(outputDir);
|
||||
|
||||
var success = 0;
|
||||
var processingCount = 0;
|
||||
foreach (var tablePath in Directory.EnumerateFiles(tableCfgDir))
|
||||
{
|
||||
var fileName = Path.GetFileName(tablePath);
|
||||
|
||||
Console.WriteLine("Reading {0}...", fileName);
|
||||
processingCount++;
|
||||
|
||||
using var file = File.OpenRead(tablePath);
|
||||
using var binaryReader = new BinaryReader(file);
|
||||
|
||||
try
|
||||
{
|
||||
var typeDefOffset = binaryReader.ReadInt32();
|
||||
var rootDefOffset = binaryReader.ReadInt32();
|
||||
var dataOffset = binaryReader.ReadInt32();
|
||||
|
||||
binaryReader.Seek(typeDefOffset);
|
||||
SparkManager.ReadTypeDefinitions(binaryReader);
|
||||
|
||||
binaryReader.Seek(rootDefOffset);
|
||||
var rootDef = new BeanType.Field
|
||||
{
|
||||
type = binaryReader.ReadSparkType(),
|
||||
name = binaryReader.ReadSparkBufferString()
|
||||
};
|
||||
|
||||
if (rootDef.type.IsEnumOrBeanType())
|
||||
{
|
||||
binaryReader.Align4Bytes();
|
||||
rootDef.typeHash = binaryReader.ReadInt32();
|
||||
}
|
||||
if (rootDef.type == SparkType.Map)
|
||||
{
|
||||
rootDef.type2 = binaryReader.ReadSparkType();
|
||||
rootDef.type3 = binaryReader.ReadSparkType();
|
||||
|
||||
if (rootDef.type2.Value.IsEnumOrBeanType())
|
||||
{
|
||||
binaryReader.Align4Bytes();
|
||||
rootDef.typeHash = binaryReader.ReadInt32();
|
||||
}
|
||||
if (rootDef.type3.Value.IsEnumOrBeanType())
|
||||
{
|
||||
binaryReader.Align4Bytes();
|
||||
rootDef.typeHash2 = binaryReader.ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
binaryReader.Seek(dataOffset);
|
||||
var resultFilePath = Path.Combine(outputDir, $"{rootDef.name}.json");
|
||||
switch (rootDef.type)
|
||||
{
|
||||
case SparkType.Bean:
|
||||
var rootBeanType = SparkManager.BeanTypeFromHash((int)rootDef.typeHash!);
|
||||
var beanDump = ReadBeanAsJObject(binaryReader, rootBeanType);
|
||||
File.WriteAllText(resultFilePath, beanDump!.ToString());
|
||||
break;
|
||||
case SparkType.Map:
|
||||
var mapDump = ReadMapAsJObject(binaryReader, rootDef);
|
||||
File.WriteAllText(resultFilePath, JsonSerializer.Serialize(mapDump, SparkManager.jsonSerializerOptions));
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(string.Format("Unsupported root type {0}", rootDef.type));
|
||||
}
|
||||
|
||||
Console.WriteLine("Dumped {0} successfully", rootDef.name);
|
||||
success++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine("Error in reading {0}, Error: {1}", fileName, ex.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
Console.WriteLine("Dumped {0}/{1}", success, processingCount);
|
||||
});
|
||||
}
|
||||
|
||||
static JsonObject? ReadMapAsJObject(BinaryReader binaryReader, BeanType.Field typeDef)
|
||||
{
|
||||
var mapDump = new JsonObject();
|
||||
var kvCount = binaryReader.ReadInt32();
|
||||
|
||||
for (int i = 0; i < kvCount; i++)
|
||||
{
|
||||
var key = typeDef.type2 switch
|
||||
{
|
||||
SparkType.String => binaryReader.ReadSparkBufferStringOffset(),
|
||||
SparkType.Int => binaryReader.ReadInt32().ToString(),
|
||||
_ => throw new NotSupportedException(string.Format("Unsupported map key type {0}", typeDef.type2)),
|
||||
};
|
||||
mapDump[key] = null;
|
||||
}
|
||||
|
||||
for (int i = 0; i < kvCount; i++)
|
||||
{
|
||||
mapDump[i] = typeDef.type3 switch
|
||||
{
|
||||
SparkType.Bean => ReadBeanAsJObject(binaryReader, SparkManager.BeanTypeFromHash((int)typeDef.typeHash2!), true),
|
||||
SparkType.String => binaryReader.ReadSparkBufferStringOffset(),
|
||||
SparkType.Int => binaryReader.ReadInt32(),
|
||||
_ => throw new NotSupportedException(string.Format("Unsupported map value type {0}", typeDef.type3)),
|
||||
};
|
||||
}
|
||||
|
||||
return mapDump;
|
||||
}
|
||||
|
||||
static JsonObject? ReadBeanAsJObject(BinaryReader binaryReader, BeanType beanType, bool pointer = false)
|
||||
{
|
||||
long? pointerOrigin = null;
|
||||
if (pointer)
|
||||
{
|
||||
var beanOffset = binaryReader.ReadInt32();
|
||||
if (beanOffset == -1)
|
||||
return null;
|
||||
|
||||
pointerOrigin = binaryReader.BaseStream.Position;
|
||||
binaryReader.Seek(beanOffset);
|
||||
}
|
||||
|
||||
var dumpObj = new JsonObject();
|
||||
|
||||
foreach (var (fieldIndex, beanField) in beanType.fields.Index())
|
||||
{
|
||||
long? origin = null;
|
||||
if (beanField.type == SparkType.Array)
|
||||
{
|
||||
var fieldOffset = binaryReader.ReadInt32();
|
||||
if (fieldOffset == -1)
|
||||
{
|
||||
dumpObj[beanField.name] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
origin = binaryReader.BaseStream.Position;
|
||||
binaryReader.Seek(fieldOffset);
|
||||
}
|
||||
|
||||
switch (beanField.type)
|
||||
{
|
||||
case SparkType.Array:
|
||||
var jArray = new JsonArray();
|
||||
|
||||
var itemCount = binaryReader.ReadInt32();
|
||||
while (itemCount-- > 0)
|
||||
{
|
||||
switch (beanField.type2)
|
||||
{
|
||||
case SparkType.String:
|
||||
jArray.Add(binaryReader.ReadSparkBufferStringOffset());
|
||||
break;
|
||||
case SparkType.Bean:
|
||||
jArray.Add(ReadBeanAsJObject(binaryReader, SparkManager.BeanTypeFromHash((int)beanField.typeHash!), true));
|
||||
break;
|
||||
case SparkType.Float:
|
||||
jArray.Add(binaryReader.ReadSingle());
|
||||
break;
|
||||
case SparkType.Long:
|
||||
jArray.Add(binaryReader.ReadInt64());
|
||||
break;
|
||||
case SparkType.Int:
|
||||
case SparkType.Enum:
|
||||
jArray.Add(binaryReader.ReadInt32());
|
||||
break;
|
||||
case SparkType.Bool:
|
||||
jArray.Add(binaryReader.ReadBoolean());
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(string.Format("Unsupported array type {0} on bean field, position: {1}", beanField.type2, binaryReader.BaseStream.Position));
|
||||
}
|
||||
}
|
||||
|
||||
dumpObj[beanField.name] = jArray;
|
||||
break;
|
||||
case SparkType.Int:
|
||||
case SparkType.Enum:
|
||||
dumpObj[beanField.name] = binaryReader.ReadInt32();
|
||||
break;
|
||||
case SparkType.Long:
|
||||
dumpObj[beanField.name] = binaryReader.ReadInt64();
|
||||
break;
|
||||
case SparkType.Float:
|
||||
dumpObj[beanField.name] = binaryReader.ReadSingle();
|
||||
break;
|
||||
case SparkType.Double:
|
||||
dumpObj[beanField.name] = binaryReader.ReadDouble();
|
||||
break;
|
||||
case SparkType.String:
|
||||
dumpObj[beanField.name] = binaryReader.ReadSparkBufferStringOffset();
|
||||
break;
|
||||
case SparkType.Bean:
|
||||
dumpObj[beanField.name] = ReadBeanAsJObject(binaryReader, SparkManager.BeanTypeFromHash((int)beanField.typeHash!), true);
|
||||
break;
|
||||
case SparkType.Bool:
|
||||
dumpObj[beanField.name] = binaryReader.ReadBoolean();
|
||||
if (beanType.fields.Length > fieldIndex + 1 && beanType.fields[fieldIndex + 1].type != SparkType.Bool)
|
||||
binaryReader.Align4Bytes();
|
||||
break;
|
||||
case SparkType.Map:
|
||||
var mapOffset = binaryReader.ReadInt32();
|
||||
var mapOrigin = binaryReader.BaseStream.Position;
|
||||
binaryReader.Seek(mapOffset);
|
||||
dumpObj[beanField.name] = ReadMapAsJObject(binaryReader, beanField);
|
||||
binaryReader.Seek(mapOrigin);
|
||||
break;
|
||||
case SparkType.Byte:
|
||||
throw new Exception(string.Format("Dumping bean field type {0} isn't supported, position: {1}", beanField.type, binaryReader.BaseStream.Position));
|
||||
}
|
||||
|
||||
if (origin is not null)
|
||||
binaryReader.BaseStream.Position = (long)origin;
|
||||
}
|
||||
|
||||
if (pointerOrigin is not null)
|
||||
binaryReader.BaseStream.Position = (long)pointerOrigin;
|
||||
|
||||
return dumpObj;
|
||||
}
|
||||
}
|
||||
}
|
||||
38
BeyondTools.SparkBuffer/SparkManager.cs
Normal file
38
BeyondTools.SparkBuffer/SparkManager.cs
Normal file
@@ -0,0 +1,38 @@
|
||||
using BeyondTools.SparkBuffer.Extensions;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace BeyondTools.SparkBuffer
|
||||
{
|
||||
public static class SparkManager
|
||||
{
|
||||
public static readonly JsonSerializerOptions jsonSerializerOptions = new() { IncludeFields = true, WriteIndented = true, NumberHandling = JsonNumberHandling.AllowNamedFloatingPointLiterals };
|
||||
|
||||
private static readonly Dictionary<int, BeanType> beanTypeMap = [];
|
||||
private static readonly Dictionary<int, EnumType> enumTypeMap = [];
|
||||
|
||||
public static BeanType BeanTypeFromHash(int hash)
|
||||
=> beanTypeMap[hash];
|
||||
|
||||
public static void ReadTypeDefinitions(BinaryReader reader)
|
||||
{
|
||||
var typeDefCount = reader.ReadInt32();
|
||||
while (typeDefCount-- > 0)
|
||||
{
|
||||
var sparkType = reader.ReadSparkType();
|
||||
reader.Align4Bytes();
|
||||
|
||||
if (sparkType == SparkType.Enum)
|
||||
{
|
||||
var enumType = new EnumType(reader);
|
||||
enumTypeMap.TryAdd(enumType.typeHash, enumType);
|
||||
}
|
||||
else if (sparkType == SparkType.Bean)
|
||||
{
|
||||
var beanType = new BeanType(reader);
|
||||
beanTypeMap.TryAdd(beanType.typeHash, beanType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
17
BeyondTools.SparkBuffer/SparkType.cs
Normal file
17
BeyondTools.SparkBuffer/SparkType.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
namespace BeyondTools.SparkBuffer
|
||||
{
|
||||
public enum SparkType : byte
|
||||
{
|
||||
Bool,
|
||||
Byte,
|
||||
Int,
|
||||
Long,
|
||||
Float,
|
||||
Double,
|
||||
Enum,
|
||||
String,
|
||||
Bean,
|
||||
Array,
|
||||
Map
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user