2
0
mirror of https://github.com/esiur/esiur-dotnet.git synced 2025-12-13 16:30:24 +00:00

Embedding types

This commit is contained in:
2025-11-03 05:59:04 +03:00
parent 9eb57e98a2
commit a764b452e2
5 changed files with 254 additions and 141 deletions

View File

@@ -7,6 +7,7 @@ using Esiur.Resource;
using Esiur.Resource.Template;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
@@ -505,8 +506,8 @@ public static class DataDeserializer
if (cs > 0)
{
offset += (uint)cs;
length -= (uint)cs;
offset += (uint)current.TotalLength;
length -= (uint)current.TotalLength;
previous = current;
}
else
@@ -886,42 +887,138 @@ public static class DataDeserializer
}
public static object TypedMapParser(ParsedTDU tdu, Warehouse warehouse)
public static Array TypedArrayParser(ParsedTDU tdu, TRU tru, Warehouse warehouse)
{
// get key type
var (keyCs, keyRepType) = TRU.Parse(tdu.Metadata, 0);
var (valueCs, valueRepType) = TRU.Parse(tdu.Metadata, keyCs);
var map = (IMap)Activator.CreateInstance(typeof(Map<,>).MakeGenericType(keyRepType.GetRuntimeType(warehouse), valueRepType.GetRuntimeType(warehouse)));
switch (tru.Identifier)
{
case TRUIdentifier.Int32:
return GroupInt32Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.Int64:
return GroupInt64Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.Int16:
return GroupInt16Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt32:
return GroupUInt32Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt64:
return GroupUInt64Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt16:
return GroupUInt16Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
default:
var results = new List<object>();
var list = new List<object>();
ParsedTDU current;
ParsedTDU? previous = null;
var offset = tdu.Offset;
var length = tdu.ContentLength;
var ends = offset + (uint)length;
while (length > 0)
{
var (cs, reply) = Codec.ParseSync(tdu.Data, offset, warehouse);
current = ParsedTDU.Parse(tdu.Data, offset, ends);
if (current.Class == TDUClass.Invalid)
throw new Exception("Unknown type.");
results.Add(reply);
if (current.Identifier == TDUIdentifier.TypeContinuation)
{
current.Class = previous.Value.Class;
current.Identifier = previous.Value.Identifier;
current.Metadata = previous.Value.Metadata;
}
else if (current.Identifier == TDUIdentifier.TypeOfTarget)
{
var (idf, mt) = tru.GetMetadata();
current.Class = TDUClass.Typed;
current.Identifier = idf;
current.Metadata = mt;
current.Index = (int)idf & 0x7;
}
var (cs, reply) = Codec.ParseSync(current, warehouse);
list.Add(reply);
if (cs > 0)
{
offset += (uint)cs;
length -= (uint)cs;
offset += (uint)current.TotalLength;
length -= (uint)current.TotalLength;
previous = current;
}
else
throw new Exception("Error while parsing structured data");
}
for (var i = 0; i < results.Count; i += 2)
map.Add(results[i], results[i + 1]);
var runtimeType = tru.GetRuntimeType(warehouse);
var rt = Array.CreateInstance(runtimeType, list.Count);
Array.Copy(list.ToArray(), rt, rt.Length);
return rt;
}
}
public static object TypedMapParser(ParsedTDU tdu, Warehouse warehouse)
{
// get key type
var (keyCs, keysTru) = TRU.Parse(tdu.Metadata, 0);
var (valueCs, valuesTru) = TRU.Parse(tdu.Metadata, keyCs);
var map = (IMap)Activator.CreateInstance(typeof(Map<,>).MakeGenericType(keysTru.GetRuntimeType(warehouse), valuesTru.GetRuntimeType(warehouse)));
var keysTdu = ParsedTDU.Parse(tdu.Data, tdu.Offset,
(uint)(tdu.Offset + tdu.ContentLength));
var valuesTdu = ParsedTDU.Parse(tdu.Data,
(uint)(keysTdu.Offset+keysTdu.ContentLength),
tdu.Ends);
var keys = TypedArrayParser(keysTdu, keysTru, warehouse);
var values = TypedArrayParser(valuesTdu, valuesTru, warehouse);
for (var i = 0; i < keys.Length; i++)
map.Add(keys.GetValue(i), values.GetValue(i));
return map;
//var results = new List<object>();
//var offset = tdu.Offset;
//var length = tdu.ContentLength;
//while (length > 0)
//{
// var (cs, reply) = Codec.ParseSync(tdu.Data, offset, warehouse);
// results.Add(reply);
// if (cs > 0)
// {
// offset += (uint)cs;
// length -= (uint)cs;
// }
// else
// throw new Exception("Error while parsing structured data");
//}
//for (var i = 0; i < results.Count; i += 2)
// map.Add(results[i], results[i + 1]);
//return map;
}
public static AsyncReply TupleParserAsync(ParsedTDU tdu, DistributedConnection connection, uint[] requestSequence)
@@ -1071,6 +1168,11 @@ public static class DataDeserializer
}
public static AsyncReply TypedArrayParserAsync(byte[] data, uint offset, TRU tru, DistributedConnection connection, uint[] requestSequence)
{
throw new NotImplementedException();
}
public static AsyncReply TypedListParserAsync(ParsedTDU tdu, DistributedConnection connection, uint[] requestSequence)
{
// get the type
@@ -1149,85 +1251,9 @@ public static class DataDeserializer
public static object TypedListParser(ParsedTDU tdu, Warehouse warehouse)
{
// get the type
var (hdrCs, rep) = TRU.Parse(tdu.Metadata, 0);
var (hdrCs, tru) = TRU.Parse(tdu.Metadata, 0);
switch (rep.Identifier)
{
case TRUIdentifier.Int32:
return GroupInt32Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.Int64:
return GroupInt64Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.Int16:
return GroupInt16Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt32:
return GroupUInt32Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt64:
return GroupUInt64Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
case TRUIdentifier.UInt16:
return GroupUInt16Codec.Decode(tdu.Data.AsSpan(
(int)tdu.Offset, (int)tdu.ContentLength));
default:
var list = new List<object>();
ParsedTDU current;
ParsedTDU? previous = null;
var offset = tdu.Offset;
var length = tdu.ContentLength;
var ends = offset + (uint)length;
while (length > 0)
{
current = ParsedTDU.Parse(tdu.Data, offset, ends);
if (current.Class == TDUClass.Invalid)
throw new Exception("Unknown type.");
if (current.Identifier == TDUIdentifier.TypeContinuation)
{
current.Class = previous.Value.Class;
current.Identifier = previous.Value.Identifier;
current.Metadata = previous.Value.Metadata;
}
else if (current.Identifier == TDUIdentifier.TypeOfTarget)
{
var (idf, mt) = rep.GetMetadata();
current.Class = TDUClass.Typed;
current.Identifier = idf;
current.Metadata = mt;
current.Index = (int)idf & 0x7;
}
var (cs, reply) = Codec.ParseSync(current, warehouse);
list.Add(reply);
if (cs > 0)
{
offset += (uint)cs;
length -= (uint)cs;
previous = current;
}
else
throw new Exception("Error while parsing structured data");
}
var runtimeType = rep.GetRuntimeType(warehouse);
var rt = Array.CreateInstance(runtimeType, list.Count);
Array.Copy(list.ToArray(), rt, rt.Length);
return rt;
}
return TypedArrayParser(tdu, tru, warehouse);
}
public static AsyncBag<PropertyValue> PropertyValueArrayParserAsync(byte[] data, uint offset, uint length, DistributedConnection connection, uint[] requestSequence)//, bool ageIncluded = true)

View File

@@ -406,7 +406,7 @@ public static class DataSerializer
if (ct == null)
return new TDU(TDUIdentifier.Null, null, 0);
return Codec.ComposeInternal(intVal, warehouse, connection);
//return Codec.ComposeInternal(intVal, warehouse, connection);
return new TDU(TDUIdentifier.TypedEnum,
new byte[] { ct.Index }, 1, template.ClassId.Data);
@@ -525,37 +525,35 @@ public static class DataSerializer
//return new TDU(TDUIdentifier.List, rt.ToArray(), (uint)rt.Count);
}
public static TDU TypedListComposer(IEnumerable value, Type type, Warehouse warehouse, DistributedConnection connection)
public static byte[] TypedArrayComposer(IEnumerable value, TRU tru, Warehouse warehouse, DistributedConnection connection)
{
byte[] composed;
if (value == null)
return new TDU(TDUIdentifier.Null, new byte[0], 0);
return null;
var tru = TRU.FromType(type);
if (type == typeof(int))
if (tru.Identifier == TRUIdentifier.Int32)
{
composed = GroupInt32Codec.Encode((IList<int>)value);
}
else if (type == typeof(long))
else if (tru.Identifier == TRUIdentifier.Int64)
{
composed = GroupInt64Codec.Encode((IList<long>)value);
}
else if (type == typeof(short))
else if (tru.Identifier == TRUIdentifier.Int16)
{
composed = GroupInt16Codec.Encode((IList<short>)value);
}
else if (type == typeof(uint))
else if (tru.Identifier == TRUIdentifier.UInt32)
{
composed = GroupUInt32Codec.Encode((IList<uint>)value);
}
else if (type == typeof(ulong))
else if (tru.Identifier == TRUIdentifier.UInt64)
{
composed = GroupUInt64Codec.Encode((IList<ulong>)value);
}
else if (type == typeof(ushort))
else if (tru.Identifier == TRUIdentifier.UInt16)
{
composed = GroupUInt16Codec.Encode((IList<ushort>)value);
}
@@ -564,6 +562,7 @@ public static class DataSerializer
var rt = new List<byte>();
TDU? previous = null;
var isTyped = tru.IsTyped();
foreach (var i in value)
{
@@ -571,7 +570,7 @@ public static class DataSerializer
var currentTru = TRU.FromType(i.GetType());
if (tru.Match(currentTru))
if (isTyped && tru.Match(currentTru))
{
var d = tdu.Composed.Clip(tdu.ContentOffset,
(uint)tdu.Composed.Length - tdu.ContentOffset);
@@ -601,6 +600,16 @@ public static class DataSerializer
}
return composed;
}
public static TDU TypedListComposer(IEnumerable value, Type type, Warehouse warehouse, DistributedConnection connection)
{
var tru = TRU.FromType(type);
byte[] composed = TypedArrayComposer(value, tru, warehouse, connection);
if (composed == null)
return new TDU(TDUIdentifier.Null, new byte[0], 0);
@@ -644,48 +653,100 @@ public static class DataSerializer
if (value == null)
return new TDU(TDUIdentifier.Null, new byte[0], 0);
var kt = TRU.FromType(keyType).Compose();
var vt = TRU.FromType(valueType).Compose();
var kt = TRU.FromType(keyType);
var vt = TRU.FromType(valueType);
var rt = new List<byte>();
//var rt = new List<byte>();
var map = (IMap)value;
foreach (var el in map.Serialize())
rt.AddRange(Codec.Compose(el, warehouse, connection));
var keys = map.GetKeys();
var values = map.GetValues();
var compsedKeys = TypedArrayComposer(keys, kt, warehouse, connection);
var compsedValues = TypedArrayComposer(values, vt, warehouse, connection);
var ktb = kt.Compose();
var vtb = vt.Compose();
var metadata = DC.Combine(ktb, 0, (uint)ktb.Length, vtb, 0, (uint)vtb.Length);
//foreach (var el in map.Serialize())
// rt.AddRange(Codec.Compose(el, warehouse, connection));
var keysTdu = new TDU(TDUIdentifier.TypeOfTarget, compsedKeys, (uint)compsedKeys.Length).Composed;
var valuesTdu = new TDU(TDUIdentifier.TypeOfTarget, compsedValues, (uint)compsedValues.Length).Composed;
var all = DC.Combine(keysTdu, 0, (uint)keysTdu.Length, valuesTdu, 0, (uint)valuesTdu.Length);
return new TDU(TDUIdentifier.TypedMap, all, (uint)all.Length, metadata);
return new TDU(TDUIdentifier.TypedMap, rt.ToArray(), (uint)rt.Count,
DC.Combine(kt, 0, (uint)kt.Length, vt, 0, (uint)vt.Length));
//return new TDU(TDUIdentifier.TypedMap, rt.ToArray(), (uint)rt.Count,
// );
}
public static TDU TypedDictionaryComposer(object value, Type keyType, Type valueType, Warehouse warehouse, DistributedConnection connection)
{
if (value == null)
return new TDU(TDUIdentifier.Null, null, 0);
return new TDU(TDUIdentifier.Null, new byte[0], 0);
var kt = TRU.FromType(keyType).Compose();
var vt = TRU.FromType(valueType).Compose();
var kt = TRU.FromType(keyType);
var vt = TRU.FromType(valueType);
var rt = new List<byte>();
//var rt = new List<byte>();
//rt.AddRange(kt);
//rt.AddRange(vt);
var map = (IDictionary)value;
var dic = (IDictionary)value;
var keys = map.Keys;
var values = map.Values;
var ar = new List<object>();
foreach (var k in dic.Keys)
{
ar.Add(k);
ar.Add(dic[k]);
}
var compsedKeys = TypedArrayComposer(keys, kt, warehouse, connection);
var compsedValues = TypedArrayComposer(values, vt, warehouse, connection);
foreach (var el in ar)
rt.AddRange(Codec.Compose(el, warehouse, connection));
var ktb = kt.Compose();
var vtb = vt.Compose();
var metadata = DC.Combine(ktb, 0, (uint)ktb.Length, vtb, 0, (uint)vtb.Length);
//foreach (var el in map.Serialize())
// rt.AddRange(Codec.Compose(el, warehouse, connection));
var keysTdu = new TDU(TDUIdentifier.TypeOfTarget, compsedKeys, (uint)compsedKeys.Length).Composed;
var valuesTdu = new TDU(TDUIdentifier.TypeOfTarget, compsedValues, (uint)compsedValues.Length).Composed;
var all = DC.Combine(keysTdu, 0, (uint)keysTdu.Length, valuesTdu, 0, (uint)valuesTdu.Length);
return new TDU(TDUIdentifier.TypedMap, all, (uint)all.Length, metadata);
return new TDU(TDUIdentifier.TypedMap, rt.ToArray(), (uint)rt.Count,
DC.Combine(kt, 0, (uint)kt.Length, vt, 0, (uint)vt.Length));
//if (value == null)
// return new TDU(TDUIdentifier.Null, null, 0);
//var kt = TRU.FromType(keyType).Compose();
//var vt = TRU.FromType(valueType).Compose();
//var rt = new List<byte>();
////rt.AddRange(kt);
////rt.AddRange(vt);
//var dic = (IDictionary)value;
//var ar = new List<object>();
//foreach (var k in dic.Keys)
//{
// ar.Add(k);
// ar.Add(dic[k]);
//}
//foreach (var el in ar)
// rt.AddRange(Codec.Compose(el, warehouse, connection));
//return new TDU(TDUIdentifier.TypedMap, rt.ToArray(), (uint)rt.Count,
// DC.Combine(kt, 0, (uint)kt.Length, vt, 0, (uint)vt.Length));
}
public static byte[] DynamicArrayComposer(IEnumerable value, Warehouse warehouse, DistributedConnection connection)
@@ -839,7 +900,8 @@ public static class DataSerializer
var tdu = Codec.ComposeInternal(propValue, warehouse, connection);
if (pt.ValueType.Identifier == TRUIdentifier.TypedRecord && pt.ValueType.Match(tru))
if (pt.ValueType.IsTyped() && // pt.ValueType.Identifier == TRUIdentifier.TypedRecord &&
pt.ValueType.Match(tru))
{
// strip metadata
var len = (uint)tdu.Composed.Length - tdu.ContentOffset;

View File

@@ -59,6 +59,9 @@ public interface IMap
//public void Clear();
//public bool ContainsKey(object key);
public object[] Serialize();
public IEnumerable GetKeys();
public IEnumerable GetValues();
}
public class Map<KT, VT> : Dictionary<KT, VT>, IMap // IEnumerable<KeyValuePair<KT, VT>>
@@ -238,6 +241,9 @@ public class Map<KT, VT> : Dictionary<KT, VT>, IMap // IEnumerable<KeyValuePair<
return rt.ToArray();
}
public IEnumerable GetKeys() => Keys.ToArray();
public IEnumerable GetValues() => Values.ToArray();
//public VT this[KT index]
//{
// get

View File

@@ -15,9 +15,11 @@ namespace Esiur.Data
public byte Exponent;
public ulong TotalLength;
public byte[] Metadata;
public uint Ends;
public static ParsedTDU Parse(byte[] data, uint offset, uint ends)
{
var h = data[offset++];
var cls = (TDUClass)(h >> 6);
@@ -37,6 +39,7 @@ namespace Esiur.Data
Index = (byte)h & 0x7,
ContentLength = 0,
TotalLength = 1,
Ends = ends
};
ulong cl = (ulong)(1 << (exp - 1));
@@ -60,6 +63,7 @@ namespace Esiur.Data
TotalLength = 1 + cl,
Exponent = (byte)exp,
Index = (byte)h & 0x7,
Ends = ends
};
}
else if (cls == TDUClass.Typed)
@@ -99,6 +103,7 @@ namespace Esiur.Data
TotalLength = 1 + cl + cll,
Index = (byte)h & 0x7,
Metadata = metaData,
Ends = ends
};
}
else
@@ -134,7 +139,8 @@ namespace Esiur.Data
Class = cls,
ContentLength = cl,
TotalLength = 1 + cl + cll,
Index = (byte)h & 0x7
Index = (byte)h & 0x7,
Ends = ends
};
}
}

View File

@@ -189,8 +189,19 @@ namespace Esiur.Data
return new TRU(Identifier, true, UUID, SubTypes);
}
public bool IsTyped()
{
if (Identifier == TRUIdentifier.TypedList && SubTypes[0].Identifier == TRUIdentifier.UInt8)
return false;
return (UUID != null) || (SubTypes != null && SubTypes.Length > 0);
}
public bool Match(TRU other)
{
//if (UUID == null && (SubTypes == null || SubTypes.Length == 0))
// return false;
if (other.Identifier != Identifier)
return false;
if (other.UUID != UUID)
@@ -221,6 +232,8 @@ namespace Esiur.Data
case TRUIdentifier.TypedMap:
return (TDUIdentifier.TypedMap,
SubTypes[0].Compose().Concat(SubTypes[1].Compose()).ToArray());
case TRUIdentifier.Enum:
return (TDUIdentifier.TypedEnum, UUID?.Data);
default:
throw new NotImplementedException();