using BeyondTools.SparkBuffer.Extensions; namespace BeyondTools.SparkBuffer { public struct BeanType { public int typeHash; public string name; public Field[] fields; public BeanType(BinaryReader reader) { typeHash = reader.ReadInt32(); name = reader.ReadSparkBufferString(); reader.Align4Bytes(); var fieldCount = reader.ReadInt32(); fields = new Field[fieldCount]; foreach (ref var field in fields.AsSpan()) { field.name = reader.ReadSparkBufferString(); field.type = reader.ReadSparkType(); switch (field.type) { case SparkType.Bool: case SparkType.Byte: case SparkType.Int: case SparkType.Long: case SparkType.Float: case SparkType.Double: case SparkType.String: break; case SparkType.Enum: case SparkType.Bean: reader.Align4Bytes(); field.typeHash = reader.ReadInt32(); break; case SparkType.Array: field.type2 = reader.ReadSparkType(); if (field.type2.Value.IsEnumOrBeanType()) { reader.Align4Bytes(); field.typeHash = reader.ReadInt32(); } break; case SparkType.Map: field.type2 = reader.ReadSparkType(); field.type3 = reader.ReadSparkType(); if (field.type2.Value.IsEnumOrBeanType()) { reader.Align4Bytes(); field.typeHash = reader.ReadInt32(); } if (field.type3.Value.IsEnumOrBeanType()) { reader.Align4Bytes(); field.typeHash2 = reader.ReadInt32(); } break; default: throw new Exception(string.Format("Unsupported bean field type {0} at position {1}", field.type, reader.BaseStream.Position)); } } } public struct Field { public string name; public SparkType type; /// /// or key type /// public SparkType? type2; /// /// value type /// public SparkType? type3; /// /// , , , or key type hash /// public int? typeHash; /// /// value type hash /// public int? typeHash2; } } }